improvements in relexing speed

--HG--
branch : pmacs2
This commit is contained in:
moculus 2008-04-05 18:06:49 +00:00
parent b9786298e9
commit 0defded454
3 changed files with 17 additions and 18 deletions

View File

@ -698,12 +698,11 @@ class Application(object):
s = tstring[s_offset:] s = tstring[s_offset:]
token_done = x_offset + len(s) <= slot.width token_done = x_offset + len(s) <= slot.width
token_wrap = x_offset + len(s) > slot.width token_wrap = x_offset + len(s) > slot.width
attr = color.build(*token.color) # for debugging things like lexing/relexing/etc.
#xyz if token._debug:
#if hasattr(token, 'resume') and token.resume: attr = color.build('blue', 'green')
# attr = color.build('blue', 'green') else:
#else: attr = color.build(*token.color)
# attr = color.build(*token.color)
self.win.addstr(slot.offset + count, x_offset, s[:slot.width - x_offset], attr) self.win.addstr(slot.offset + count, x_offset, s[:slot.width - x_offset], attr)
if token_wrap: if token_wrap:

View File

@ -85,8 +85,6 @@ class Highlighter(object):
def highlight(self, lines): def highlight(self, lines):
self.tokens = [[] for l in lines] self.tokens = [[] for l in lines]
#self.lexer.lex(lines, y=0, x=0)
#for token in self.lexer:
for token in self.lexer.lex(lines, y=0, x=0): for token in self.lexer.lex(lines, y=0, x=0):
self.tokens[token.y].append(token) self.tokens[token.y].append(token)
@ -100,10 +98,10 @@ class Highlighter(object):
# these keep track of the current y coordinate, the current token index # these keep track of the current y coordinate, the current token index
# on line[y], and the current "new token", respectively. # on line[y], and the current "new token", respectively.
y = y1 y = y1
i = 0 i = 0
getnext = True getnext = True
new_token = None new_token = None
while True: while True:
# if we have overstepped our bounds, then exit! # if we have overstepped our bounds, then exit!
@ -113,7 +111,6 @@ class Highlighter(object):
# if we need another new_token, then try to get it. # if we need another new_token, then try to get it.
if getnext: if getnext:
try: try:
#new_token = self.lexer.next()
new_token = gen.next() new_token = gen.next()
getnext = False getnext = False
except StopIteration: except StopIteration:
@ -126,6 +123,7 @@ class Highlighter(object):
# if our next token is one a future line, we need to just get rid of # if our next token is one a future line, we need to just get rid of
# all our old tokens until we get there # all our old tokens until we get there
#onfuture = False
while new_token.y > y: while new_token.y > y:
del self.tokens[y][i:] del self.tokens[y][i:]
i = 0 i = 0

12
lex.py
View File

@ -13,7 +13,7 @@ class Token(object):
self.parent = parent self.parent = parent
self.matchd = matchd self.matchd = matchd
self.link = link self.link = link
#self.resume = False #xyz self._debug = False
assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent
def parents(self): def parents(self):
if self.parent is not None: if self.parent is not None:
@ -50,8 +50,10 @@ class Token(object):
def end_x(self): def end_x(self):
return self.x + len(self.string) return self.x + len(self.string)
def __eq__(self, other): def __eq__(self, other):
return (self.y == other.y and self.x == other.x return (other is not None and
and self.name == other.name and self.parent is other.parent and self.y == other.y and self.x == other.x and
self.name == other.name and
self.parent == other.parent and
self.string == other.string) self.string == other.string)
def __repr__(self): def __repr__(self):
if len(self.string) < 10: if len(self.string) < 10:
@ -407,10 +409,10 @@ class Lexer(object):
if toresume: if toresume:
for t in toresume[0].rule.resume(self, toresume): for t in toresume[0].rule.resume(self, toresume):
#t.resume = True #xyz #t._debug = True
yield t yield t
for t in self._lex(): for t in self._lex():
#t.resume = True #xyz #t._debug = True
yield t yield t
del self.action del self.action
raise StopIteration raise StopIteration