From 759ceeb80571c4b1048074e3d0cca7dc2472cbc2 Mon Sep 17 00:00:00 2001 From: moculus Date: Mon, 9 Apr 2007 23:21:43 +0000 Subject: [PATCH] --HG-- branch : pmacs2 --- highlight2.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++-- lex2.py | 6 ++++++ test2.py | 5 ++++- test3.py | 16 +++++++++++----- 4 files changed, 69 insertions(+), 8 deletions(-) diff --git a/highlight2.py b/highlight2.py index aa70d18..42f2300 100644 --- a/highlight2.py +++ b/highlight2.py @@ -31,6 +31,7 @@ class Highlighter: break if color_name is not None: sys.stdout.write(color_dict[color_name]) + pass elif debug: raise Exception, "no highlighting for %r" % token.name else: @@ -45,5 +46,50 @@ class Highlighter: for token in self.lexer: self.tokens[token.y].append(token) - def update(self, lines): - pass \ No newline at end of file + def update(self, lines, y1=0, x1=0, y2=-1, x2=-1): + # basically, we are syncing up our cached internal state with the lexer + # so... we need to keep track of where we are in our internal structure. + insertion_index = None + line_index = None + x = x1 + y = y1 + + # so figure out where exactly the document has changed and how this + # necessarily affects our internal structure + for i in range(0, len(self.tokens[y1])): + t = self.tokens[y1][i] + if t.x < x1 and t.x + len(t.string) > x1: + # this token spans our region, so invalidate it and start our + # update from its start instead + x1 = t.x + insertion_index = i + line_index = i + del self.tokens[i] + break + elif t.x == x1: + # ok, so it looks like the change starts on a token + line_index = i + + assert line_index is not None: + + self.lexer.lex(lines, y1, x1) + for lt in self.lexer.lex: + if y != lt.y: + y = lt.y + if insertion_index: + # ok, so we have a "gap" that we have to fill, so just insert + # the token in our strucutre, and then see if it overlaps + # something else that has to go + self.tokens[y].insert(insertion_index, lt) + line_index = insertion_index + 1 + insertion_index = None + for i in range(line_index, len(self.tokens[y])): + if self.tokens[y][i].start < None: #GJIE + pass #GGGJGEI + insertion_index = None + + + + if y2 > 0: + for i in range(y1, y2): + self.tokens diff --git a/lex2.py b/lex2.py index f08a597..8641e63 100755 --- a/lex2.py +++ b/lex2.py @@ -12,6 +12,11 @@ class Token(object): self.vargs = vargs def add_to_string(self, s): self.string += s + def __eq__(self, other): + return (self.y == other.y and + self.x == other.x and + self.string == other.string and + self.vargs == other.vargs) def __repr__(self): if len(self.string) < 10: s = self.string @@ -373,6 +378,7 @@ class Lexer: self.add_token(null_t) null_t.add_to_string(line[self.x]) self.x += 1 + null_t = None self.y += 1 self.x = 0 diff --git a/test2.py b/test2.py index 394cbee..154bbb0 100644 --- a/test2.py +++ b/test2.py @@ -1,3 +1,4 @@ +#!/usr/bin/python import sys import lex2, lex2_perl @@ -15,4 +16,6 @@ for path in paths: lexer.lex(lines) print path for token in lexer: - print '%-28s| %r' % (token.name, token.string) + print '%-30s| %-6s | %r' % (token.name, + '(%d,%d)' % (token.x, token.y), + token.string) diff --git a/test3.py b/test3.py index 6e1417b..fc290f8 100644 --- a/test3.py +++ b/test3.py @@ -1,3 +1,4 @@ +#!/usr/bin/python import sys import lex2, lex2_perl, lex2_python, highlight2 @@ -112,6 +113,10 @@ token_colors = { 'string.format': 'yellow', 'string.end': 'lgreen', + 'integer': 'lred', + 'float': 'lred', + 'imaginary': 'lred', + 'tq_string.start': 'lgreen', 'tq_string.null': 'lgreen', 'tq_string.end': 'lgreen', @@ -152,11 +157,12 @@ for path in paths: lexer = lex2.Lexer('lexer', grammars[t]()) if m: - highlighter = highlight2.Highlighter(lexer) - - highlighter.highlight(lines) - highlighter.display(token_colors[t]) + h = highlight2.Highlighter(lexer) + h.highlight(lines) + h.display(token_colors[t]) else: lexer.lex(lines) for token in lexer: - print '%-28s| %r' % (token.name, token.string) + print '%-30s| %-6s | %r' % (token.name, + '(%d,%d)' % (token.x, token.y), + token.string)