--HG--
branch : pmacs2
This commit is contained in:
moculus 2007-04-01 01:37:42 +00:00
parent 3f895ac112
commit af6306f67e
3 changed files with 72 additions and 67 deletions

19
lex2.py
View File

@ -78,7 +78,6 @@ class RegionRule(Rule):
m = self.start_re.match(lexer.lines[lexer.y], lexer.x)
if m:
self._add_from_regex(context, 'start', lexer, m)
null_t_name = '.'.join(context + [self.name, 'null'])
null_t = None
@ -86,14 +85,18 @@ class RegionRule(Rule):
end_re = re.compile(self.end % m.groupdict())
done = False
# NOTE: need to better handle matches that might consume more than
# one line of input. #### also, seems like some "region" matching isn't
# working, and finally, like the end token(s) might not be handled correctly
while not done and lexer.y < len(lexer.lines):
line = lexer.lines[lexer.y]
if len(line) == 0:
old_y = lexer.y
if len(lexer.lines[lexer.y]) == 0:
null_t = Token(null_t_name, lexer.y, lexer.x, '')
lexer.add_token(null_t)
while not done and lexer.x < len(line):
null_t = None
while not done and lexer.y == old_y and lexer.x < len(lexer.lines[lexer.y]):
if self.end:
m = end_re.match(line, lexer.x)
m = end_re.match(lexer.lines[lexer.y], lexer.x)
if m:
self._add_from_regex(context, 'end', lexer, m)
done = True
@ -109,17 +112,19 @@ class RegionRule(Rule):
if null_t is None:
null_t = Token(null_t_name, lexer.y, lexer.x, '')
lexer.add_token(null_t)
null_t.add_to_string(line[lexer.x])
null_t.add_to_string(lexer.lines[lexer.y][lexer.x])
lexer.x += 1
null_t = None
if not done:
if not done and old_y == lexer.y:
lexer.y += 1
lexer.x = 0
return True
else:
return False
# NOTE: this needs to get synced up with RegionRule's changes...
# right now, it has at least 2-3 different bugs. suck!
class DualRegionRule(Rule):
def __init__(self, name, start, grammar1, middle, grammar2, end):
assert valid_name_re.match(name), 'invalid name %r' % name

View File

@ -116,7 +116,7 @@ class PerlGrammar(Grammar):
),
PatternRule(
name=r'bareword_hash_index',
pattern=r'(?<={) *[A-Za-z0-9_]+(?=})',
pattern=r'(?<={)[A-Za-z0-9_]+(?=})',
),
PatternRule(
name=r'bareword_hash_key',

View File

@ -30,16 +30,16 @@ token_colors = {
'endblock': 'lred',
'pod': 'lred',
'comment': 'lred',
'string1': 'lgreen',
#'string1': 'lgreen',
'string1.start': 'lgreen',
'string1.null': 'lgreen',
'string1.escaped': 'lpurple',
'string1.scalar': 'yellow',
'string1.system_scalar': 'yellow',
#'string1.scalar': 'yellow',
#'string1.system_scalar': 'yellow',
'string1.hash_deref': 'yellow',
'string1.hash_bareword_index': 'lgreen',
#'string1.hash_bareword_index': 'lgreen',
'string1.end': 'lgreen',
'string2': 'lgreen',
#'string2': 'lgreen',
'string2.start': 'lgreen',
'string2.null': 'lgreen',
'string2.end': 'lgreen',
@ -53,11 +53,11 @@ token_colors = {
'dereference': 'yellow',
'array': 'yellow',
'hash': 'yellow',
'hash_bareword_index': 'lgreen',
'bareword_hash_index': 'lgreen',
'quoted_region': 'lcyan',
'match_regex': 'lcyan',
'replace_regex': 'lcyan',
'literal_hash_bareword_index': 'lgreen',
'bareword_hash_key': 'lgreen',
'interpolated_scalar': 'yellow',
'interpolated_system_scalar': 'yellow',
'interpolated_array': 'yellow',