96 lines
3.4 KiB
Python
96 lines
3.4 KiB
Python
import sys
|
|
|
|
color_list = []
|
|
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
|
color_list.extend(['\033[3%d;1m' % x for x in range(0, 8)])
|
|
color_list.append('\033[0m')
|
|
|
|
color_names = [
|
|
'black', 'dred', 'dgreen', 'brown', 'dblue', 'dpurple', 'dcyan', 'lgrey',
|
|
'dgrey', 'lred', 'lgreen', 'yellow', 'lblue', 'lpurple', 'lcyan', 'white',
|
|
'unset',
|
|
]
|
|
|
|
color_dict ={}
|
|
for i in range(0, len(color_list)):
|
|
color_dict[color_names[i]] = color_list[i]
|
|
|
|
class Highlighter:
|
|
def __init__(self, lexer):
|
|
self.lexer = lexer
|
|
self.tokens = []
|
|
|
|
def display(self, token_colors={}, debug=False):
|
|
for group in self.tokens:
|
|
for token in group:
|
|
color_name = None
|
|
name_parts = token.name.split('.')
|
|
for i in range(0, len(name_parts)):
|
|
if '.'.join(name_parts[i:]) in token_colors:
|
|
color_name = token_colors['.'.join(name_parts[i:])]
|
|
break
|
|
if color_name is not None:
|
|
sys.stdout.write(color_dict[color_name])
|
|
pass
|
|
elif debug:
|
|
raise Exception, "no highlighting for %r" % token.name
|
|
else:
|
|
color_name = 'white'
|
|
sys.stdout.write(color_dict[color_name])
|
|
sys.stdout.write(token.string)
|
|
sys.stdout.write('\n')
|
|
|
|
def highlight(self, lines):
|
|
self.tokens = [[] for l in lines]
|
|
self.lexer.lex(lines, y=0, x=0)
|
|
for token in self.lexer:
|
|
self.tokens[token.y].append(token)
|
|
|
|
def update(self, lines, y1=0, x1=0, y2=-1, x2=-1):
|
|
# basically, we are syncing up our cached internal state with the lexer
|
|
# so... we need to keep track of where we are in our internal structure.
|
|
insertion_index = None
|
|
line_index = None
|
|
x = x1
|
|
y = y1
|
|
|
|
# so figure out where exactly the document has changed and how this
|
|
# necessarily affects our internal structure
|
|
for i in range(0, len(self.tokens[y1])):
|
|
t = self.tokens[y1][i]
|
|
if t.x < x1 and t.x + len(t.string) > x1:
|
|
# this token spans our region, so invalidate it and start our
|
|
# update from its start instead
|
|
x1 = t.x
|
|
insertion_index = i
|
|
line_index = i
|
|
del self.tokens[i]
|
|
break
|
|
elif t.x == x1:
|
|
# ok, so it looks like the change starts on a token
|
|
line_index = i
|
|
|
|
assert line_index is not None:
|
|
|
|
self.lexer.lex(lines, y1, x1)
|
|
for lt in self.lexer.lex:
|
|
if y != lt.y:
|
|
y = lt.y
|
|
if insertion_index:
|
|
# ok, so we have a "gap" that we have to fill, so just insert
|
|
# the token in our strucutre, and then see if it overlaps
|
|
# something else that has to go
|
|
self.tokens[y].insert(insertion_index, lt)
|
|
line_index = insertion_index + 1
|
|
insertion_index = None
|
|
for i in range(line_index, len(self.tokens[y])):
|
|
if self.tokens[y][i].start < None: #GJIE
|
|
pass #GGGJGEI
|
|
insertion_index = None
|
|
|
|
|
|
|
|
if y2 > 0:
|
|
for i in range(y1, y2):
|
|
self.tokens
|