parent
9878252505
commit
759ceeb805
|
@ -31,6 +31,7 @@ class Highlighter:
|
|||
break
|
||||
if color_name is not None:
|
||||
sys.stdout.write(color_dict[color_name])
|
||||
pass
|
||||
elif debug:
|
||||
raise Exception, "no highlighting for %r" % token.name
|
||||
else:
|
||||
|
@ -45,5 +46,50 @@ class Highlighter:
|
|||
for token in self.lexer:
|
||||
self.tokens[token.y].append(token)
|
||||
|
||||
def update(self, lines):
|
||||
pass
|
||||
def update(self, lines, y1=0, x1=0, y2=-1, x2=-1):
|
||||
# basically, we are syncing up our cached internal state with the lexer
|
||||
# so... we need to keep track of where we are in our internal structure.
|
||||
insertion_index = None
|
||||
line_index = None
|
||||
x = x1
|
||||
y = y1
|
||||
|
||||
# so figure out where exactly the document has changed and how this
|
||||
# necessarily affects our internal structure
|
||||
for i in range(0, len(self.tokens[y1])):
|
||||
t = self.tokens[y1][i]
|
||||
if t.x < x1 and t.x + len(t.string) > x1:
|
||||
# this token spans our region, so invalidate it and start our
|
||||
# update from its start instead
|
||||
x1 = t.x
|
||||
insertion_index = i
|
||||
line_index = i
|
||||
del self.tokens[i]
|
||||
break
|
||||
elif t.x == x1:
|
||||
# ok, so it looks like the change starts on a token
|
||||
line_index = i
|
||||
|
||||
assert line_index is not None:
|
||||
|
||||
self.lexer.lex(lines, y1, x1)
|
||||
for lt in self.lexer.lex:
|
||||
if y != lt.y:
|
||||
y = lt.y
|
||||
if insertion_index:
|
||||
# ok, so we have a "gap" that we have to fill, so just insert
|
||||
# the token in our strucutre, and then see if it overlaps
|
||||
# something else that has to go
|
||||
self.tokens[y].insert(insertion_index, lt)
|
||||
line_index = insertion_index + 1
|
||||
insertion_index = None
|
||||
for i in range(line_index, len(self.tokens[y])):
|
||||
if self.tokens[y][i].start < None: #GJIE
|
||||
pass #GGGJGEI
|
||||
insertion_index = None
|
||||
|
||||
|
||||
|
||||
if y2 > 0:
|
||||
for i in range(y1, y2):
|
||||
self.tokens
|
||||
|
|
6
lex2.py
6
lex2.py
|
@ -12,6 +12,11 @@ class Token(object):
|
|||
self.vargs = vargs
|
||||
def add_to_string(self, s):
|
||||
self.string += s
|
||||
def __eq__(self, other):
|
||||
return (self.y == other.y and
|
||||
self.x == other.x and
|
||||
self.string == other.string and
|
||||
self.vargs == other.vargs)
|
||||
def __repr__(self):
|
||||
if len(self.string) < 10:
|
||||
s = self.string
|
||||
|
@ -373,6 +378,7 @@ class Lexer:
|
|||
self.add_token(null_t)
|
||||
null_t.add_to_string(line[self.x])
|
||||
self.x += 1
|
||||
null_t = None
|
||||
self.y += 1
|
||||
self.x = 0
|
||||
|
||||
|
|
5
test2.py
5
test2.py
|
@ -1,3 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
import sys
|
||||
import lex2, lex2_perl
|
||||
|
||||
|
@ -15,4 +16,6 @@ for path in paths:
|
|||
lexer.lex(lines)
|
||||
print path
|
||||
for token in lexer:
|
||||
print '%-28s| %r' % (token.name, token.string)
|
||||
print '%-30s| %-6s | %r' % (token.name,
|
||||
'(%d,%d)' % (token.x, token.y),
|
||||
token.string)
|
||||
|
|
16
test3.py
16
test3.py
|
@ -1,3 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
import sys
|
||||
import lex2, lex2_perl, lex2_python, highlight2
|
||||
|
||||
|
@ -112,6 +113,10 @@ token_colors = {
|
|||
'string.format': 'yellow',
|
||||
'string.end': 'lgreen',
|
||||
|
||||
'integer': 'lred',
|
||||
'float': 'lred',
|
||||
'imaginary': 'lred',
|
||||
|
||||
'tq_string.start': 'lgreen',
|
||||
'tq_string.null': 'lgreen',
|
||||
'tq_string.end': 'lgreen',
|
||||
|
@ -152,11 +157,12 @@ for path in paths:
|
|||
lexer = lex2.Lexer('lexer', grammars[t]())
|
||||
|
||||
if m:
|
||||
highlighter = highlight2.Highlighter(lexer)
|
||||
|
||||
highlighter.highlight(lines)
|
||||
highlighter.display(token_colors[t])
|
||||
h = highlight2.Highlighter(lexer)
|
||||
h.highlight(lines)
|
||||
h.display(token_colors[t])
|
||||
else:
|
||||
lexer.lex(lines)
|
||||
for token in lexer:
|
||||
print '%-28s| %r' % (token.name, token.string)
|
||||
print '%-30s| %-6s | %r' % (token.name,
|
||||
'(%d,%d)' % (token.x, token.y),
|
||||
token.string)
|
||||
|
|
Loading…
Reference in New Issue