From 0defded454839708a89d435fddd0adb38c8b4ef4 Mon Sep 17 00:00:00 2001 From: moculus Date: Sat, 5 Apr 2008 18:06:49 +0000 Subject: [PATCH] improvements in relexing speed --HG-- branch : pmacs2 --- application.py | 11 +++++------ highlight.py | 12 +++++------- lex.py | 12 +++++++----- 3 files changed, 17 insertions(+), 18 deletions(-) diff --git a/application.py b/application.py index c90c5fe..250cf74 100755 --- a/application.py +++ b/application.py @@ -698,12 +698,11 @@ class Application(object): s = tstring[s_offset:] token_done = x_offset + len(s) <= slot.width token_wrap = x_offset + len(s) > slot.width - attr = color.build(*token.color) - #xyz - #if hasattr(token, 'resume') and token.resume: - # attr = color.build('blue', 'green') - #else: - # attr = color.build(*token.color) + # for debugging things like lexing/relexing/etc. + if token._debug: + attr = color.build('blue', 'green') + else: + attr = color.build(*token.color) self.win.addstr(slot.offset + count, x_offset, s[:slot.width - x_offset], attr) if token_wrap: diff --git a/highlight.py b/highlight.py index 82f5937..5c0b5a7 100644 --- a/highlight.py +++ b/highlight.py @@ -85,8 +85,6 @@ class Highlighter(object): def highlight(self, lines): self.tokens = [[] for l in lines] - #self.lexer.lex(lines, y=0, x=0) - #for token in self.lexer: for token in self.lexer.lex(lines, y=0, x=0): self.tokens[token.y].append(token) @@ -100,10 +98,10 @@ class Highlighter(object): # these keep track of the current y coordinate, the current token index # on line[y], and the current "new token", respectively. - y = y1 - i = 0 - getnext = True - new_token = None + y = y1 + i = 0 + getnext = True + new_token = None while True: # if we have overstepped our bounds, then exit! @@ -113,7 +111,6 @@ class Highlighter(object): # if we need another new_token, then try to get it. if getnext: try: - #new_token = self.lexer.next() new_token = gen.next() getnext = False except StopIteration: @@ -126,6 +123,7 @@ class Highlighter(object): # if our next token is one a future line, we need to just get rid of # all our old tokens until we get there + #onfuture = False while new_token.y > y: del self.tokens[y][i:] i = 0 diff --git a/lex.py b/lex.py index 97c7ecc..0254947 100755 --- a/lex.py +++ b/lex.py @@ -13,7 +13,7 @@ class Token(object): self.parent = parent self.matchd = matchd self.link = link - #self.resume = False #xyz + self._debug = False assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent def parents(self): if self.parent is not None: @@ -50,8 +50,10 @@ class Token(object): def end_x(self): return self.x + len(self.string) def __eq__(self, other): - return (self.y == other.y and self.x == other.x - and self.name == other.name and self.parent is other.parent and + return (other is not None and + self.y == other.y and self.x == other.x and + self.name == other.name and + self.parent == other.parent and self.string == other.string) def __repr__(self): if len(self.string) < 10: @@ -407,10 +409,10 @@ class Lexer(object): if toresume: for t in toresume[0].rule.resume(self, toresume): - #t.resume = True #xyz + #t._debug = True yield t for t in self._lex(): - #t.resume = True #xyz + #t._debug = True yield t del self.action raise StopIteration