parent
01c101d1ad
commit
7d2f7d5a6c
36
lex3.py
36
lex3.py
|
@ -91,7 +91,7 @@ class PatternRule(Rule):
|
|||
return self.re.match(self.get_line(lexer), lexer.x)
|
||||
def lex(self, lexer, parent, m):
|
||||
if m:
|
||||
yield self.make_token(lexer, m.group(0), self.name, parent)
|
||||
yield self.make_token(lexer, m.group(0), self.name, parent, m.groupdict())
|
||||
raise StopIteration
|
||||
|
||||
class NocasePatternRule(PatternRule):
|
||||
|
@ -126,7 +126,7 @@ class RegionRule(Rule):
|
|||
stopre = re.compile(self.end % t1.matchd, self.reflags)
|
||||
else:
|
||||
stopre = None
|
||||
for t2 in self._lex(lexer, [t1], 'end', stopre):
|
||||
for t2 in self._lex(lexer, [t1], 'end', stopre, self.grammar):
|
||||
yield t2
|
||||
raise StopIteration
|
||||
def resume(self, lexer, toresume):
|
||||
|
@ -137,11 +137,11 @@ class RegionRule(Rule):
|
|||
stopre = re.compile(self.end % t1.matchd, self.reflags)
|
||||
else:
|
||||
stopre = None
|
||||
for t2 in self._lex(lexer, [t1], 'end', stopre):
|
||||
for t2 in self._lex(lexer, [t1], 'end', stopre, self.grammar):
|
||||
yield t2
|
||||
raise StopIteration
|
||||
|
||||
def _lex(self, lexer, toresume, stopname, stopre):
|
||||
def _lex(self, lexer, toresume, stopname, stopre, grammar):
|
||||
assert toresume
|
||||
parent = toresume[0]
|
||||
reenter = len(toresume) > 1
|
||||
|
@ -169,12 +169,12 @@ class RegionRule(Rule):
|
|||
if null_t:
|
||||
yield null_t
|
||||
null_t = None
|
||||
yield self.make_token(lexer, m.group(0), stopname, parent)
|
||||
yield self.make_token(lexer, m.group(0), stopname, parent, m.groupdict())
|
||||
done = True
|
||||
break
|
||||
|
||||
m = None
|
||||
for rule in self.grammar.rules:
|
||||
for rule in grammar.rules:
|
||||
m = rule.match(lexer, parent)
|
||||
if m:
|
||||
if null_t:
|
||||
|
@ -185,9 +185,9 @@ class RegionRule(Rule):
|
|||
break
|
||||
|
||||
if not m:
|
||||
if not null_t:
|
||||
null_t = Token('null', None, lexer.y, lexer.x, '', parent)
|
||||
if lexer.x < len(line):
|
||||
if not null_t:
|
||||
null_t = Token('null', None, lexer.y, lexer.x, '', parent)
|
||||
null_t.add_to_string(line[lexer.x])
|
||||
lexer.x += 1
|
||||
if null_t:
|
||||
|
@ -217,18 +217,21 @@ class DualRegionRule(RegionRule):
|
|||
|
||||
t2 = None
|
||||
if self.middle:
|
||||
stopre = re.compile(self.middle % t1.groupdict(), self.reflags)
|
||||
stopre = re.compile(self.middle % t1.matchd, self.reflags)
|
||||
else:
|
||||
stopre = None
|
||||
for t2 in self._lex(lexer, [t1], 'middle', stopre):
|
||||
for t2 in self._lex(lexer, [t1], 'middle', stopre, self.grammar1):
|
||||
yield t2
|
||||
|
||||
if t2 is not None and t2.name == 'middle':
|
||||
if self.end:
|
||||
stopre = re.compile(self.end % t2.groupdict(), self.reflags)
|
||||
d = dict(t2.matchd)
|
||||
if t1:
|
||||
d.update(t1.matchd)
|
||||
stopre = re.compile(self.end % d, self.reflags)
|
||||
else:
|
||||
stopre = None
|
||||
for t3 in self._lex(lexer, [t2], 'end', stopre):
|
||||
for t3 in self._lex(lexer, [t2], 'end', stopre, self.grammar2):
|
||||
yield t3
|
||||
|
||||
raise StopIteration
|
||||
|
@ -286,6 +289,7 @@ class Lexer:
|
|||
self.y = 0
|
||||
self.x = 0
|
||||
self.lines = None
|
||||
assert self.grammar.rules
|
||||
def get_line(self):
|
||||
return self.lines[self.y] + '\n'
|
||||
def lex(self, lines, y=0, x=0):
|
||||
|
@ -319,9 +323,9 @@ class Lexer:
|
|||
raise StopIteration
|
||||
|
||||
def _lex(self):
|
||||
null_t = None
|
||||
parent = None
|
||||
while self.y < len(self.lines):
|
||||
null_t = None
|
||||
line = self.get_line()
|
||||
while self.x < len(line):
|
||||
m = None
|
||||
|
@ -335,15 +339,15 @@ class Lexer:
|
|||
yield t
|
||||
break
|
||||
|
||||
line = self.get_line()
|
||||
if not m:
|
||||
if not null_t:
|
||||
null_t = Token('null', None, self.y, self.x, '', parent)
|
||||
if self.x < len(line):
|
||||
if null_t is None:
|
||||
null_t = Token('null', None, self.y, self.x, '', parent)
|
||||
null_t.add_to_string(line[self.x])
|
||||
self.x += 1
|
||||
if null_t:
|
||||
yield null_t
|
||||
null_t = None
|
||||
self.y += 1
|
||||
self.x = 0
|
||||
raise StopIteration
|
||||
|
|
64
mode_perl.py
64
mode_perl.py
|
@ -30,7 +30,7 @@ class PerlGrammar(Grammar):
|
|||
RegionRule(r'heredoc', r"<<(?P<heredoc>[a-zA-Z0-9_]+) *;", StringGrammar, r'^%(heredoc)s$'),
|
||||
RegionRule(r'heredoc', r'<< *"(?P<heredoc>[a-zA-Z0-9_]+)" *;', StringGrammar, r'^%(heredoc)s$'),
|
||||
RegionRule(r'heredoc', r"<< *'(?P<heredoc>[a-zA-Z0-9_]+)' *;", Grammar, r'^%(heredoc)s$'),
|
||||
RegionRule(r'eval_heredoc', r"<< *`(?P<heredoc>[a-zA-Z0-9_]+)` *;", StringGrammar, r'^%(heredoc)s$'),
|
||||
RegionRule(r'evaldoc', r"<< *`(?P<heredoc>[a-zA-Z0-9_]+)` *;", StringGrammar, r'^%(heredoc)s$'),
|
||||
|
||||
RegionRule(r'endblock', r"^__END__|__DATA__ *$", Grammar, r''),
|
||||
RegionRule(r'pod', r'^=[a-zA-Z0-9_]+', PodGrammar, r'^=cut'),
|
||||
|
@ -124,37 +124,31 @@ class PerlTabber(tab2.StackTabber):
|
|||
fqname = token.fqname()
|
||||
if fqname == 'delimiter' and token.string == ';':
|
||||
self._opt_pop('cont')
|
||||
elif fqname == 'heredoc1.start':
|
||||
self._opt_append('heredoc1', None)
|
||||
elif fqname == 'heredoc1.end':
|
||||
self._opt_pop('heredoc1')
|
||||
elif fqname == 'heredoc.start':
|
||||
self._opt_append('heredoc', None)
|
||||
elif fqname == 'heredoc.end':
|
||||
self._opt_pop('heredoc')
|
||||
self._opt_pop('cont')
|
||||
elif fqname == 'heredoc2.start':
|
||||
self._opt_append('heredoc2', None)
|
||||
elif fqname == 'heredoc2.end':
|
||||
self._opt_pop('heredoc2')
|
||||
self._opt_pop('cont')
|
||||
elif fqname == 'eval_heredoc.start':
|
||||
self._opt_append('eval_heredoc', None)
|
||||
elif fqname == 'eval_heredoc.end':
|
||||
self._opt_pop('eval_heredoc')
|
||||
elif fqname == 'evaldoc.start':
|
||||
self._opt_append('evaldoc', None)
|
||||
elif fqname == 'evaldoc.end':
|
||||
self._opt_pop('evaldoc')
|
||||
self._opt_pop('cont')
|
||||
elif fqname == 'pod.start':
|
||||
self._opt_append('pod', None)
|
||||
elif fqname == 'pod.end':
|
||||
self._opt_pop('pod')
|
||||
currlvl = 0
|
||||
elif fqname == 'string1.start' or fqname == 'string2.start':
|
||||
elif fqname == 'string.start':
|
||||
self._opt_append('string', None)
|
||||
elif fqname == 'string1.end' or fqname == 'string2.end':
|
||||
elif fqname == 'string.end':
|
||||
self._opt_pop('string')
|
||||
if self.is_rightmost_token(y, i):
|
||||
self._opt_append('cont', currlvl + 4)
|
||||
if self.is_rightmost_token(y, i):
|
||||
if(not fqname.startswith('pod') and
|
||||
not fqname.startswith('heredoc') and
|
||||
not fqname.startswith('string1') and
|
||||
not fqname.startswith('string2') and
|
||||
not fqname.startswith('string') and
|
||||
not fqname.startswith('endblock') and
|
||||
not fqname == 'comment' and
|
||||
not fqname == 'null' and
|
||||
|
@ -217,16 +211,13 @@ class Perl(mode2.Fundamental):
|
|||
'require': color.build('cyan', 'default'),
|
||||
'method': color.build('cyan', 'default'),
|
||||
|
||||
# heredoc
|
||||
'heredoc1.start': color.build('green', 'default'),
|
||||
'heredoc1.null': color.build('green', 'default'),
|
||||
'heredoc1.end': color.build('green', 'default'),
|
||||
'heredoc2.start': color.build('green', 'default'),
|
||||
'heredoc2.null': color.build('green', 'default'),
|
||||
'heredoc2.end': color.build('green', 'default'),
|
||||
'eval_heredoc.start': color.build('cyan', 'default'),
|
||||
'eval_heredoc.null': color.build('cyan', 'default'),
|
||||
'eval_heredoc.end': color.build('cyan', 'default'),
|
||||
# heredoc/evaldoc
|
||||
'heredoc.start': color.build('green', 'default'),
|
||||
'heredoc.null': color.build('green', 'default'),
|
||||
'heredoc.end': color.build('green', 'default'),
|
||||
'evaldoc.start': color.build('cyan', 'default'),
|
||||
'evaldoc.null': color.build('cyan', 'default'),
|
||||
'evaldoc.end': color.build('cyan', 'default'),
|
||||
|
||||
# pod
|
||||
'pod.start': color.build('red', 'default'),
|
||||
|
@ -236,17 +227,12 @@ class Perl(mode2.Fundamental):
|
|||
'pod.entry.end': color.build('magenta', 'default'),
|
||||
'pod.end': color.build('red', 'default'),
|
||||
|
||||
# "" strings
|
||||
'string1.start': color.build('green', 'default'),
|
||||
'string1.null': color.build('green', 'default'),
|
||||
'string1.escaped': color.build('magenta', 'default'),
|
||||
'string1.deref': color.build('yellow', 'default'),
|
||||
'string1.end': color.build('green', 'default'),
|
||||
|
||||
# '' strings
|
||||
'string2.start': color.build('green', 'default'),
|
||||
'string2.null': color.build('green', 'default'),
|
||||
'string2.end': color.build('green', 'default'),
|
||||
# strings
|
||||
'string.start': color.build('green', 'default'),
|
||||
'string.null': color.build('green', 'default'),
|
||||
'string.escaped': color.build('magenta', 'default'),
|
||||
'string.deref': color.build('yellow', 'default'),
|
||||
'string.end': color.build('green', 'default'),
|
||||
|
||||
# `` strings
|
||||
'evalstring.start': color.build('cyan', 'default'),
|
||||
|
|
Loading…
Reference in New Issue