parent
30dcf827aa
commit
a5b35c1c0e
|
@ -479,7 +479,8 @@ class Application:
|
|||
group = w.buffer.highlights[modename].tokens[y]
|
||||
j = 0
|
||||
for token in group:
|
||||
assert token.y == y
|
||||
#assert token.y == y, '%d == %d' % (token.y, y)
|
||||
assert token.y == y, w.buffer.highlights[modename].dump()
|
||||
if token.x < x:
|
||||
continue
|
||||
elif token.x >= x + slot.width:
|
||||
|
|
|
@ -67,6 +67,7 @@ class Highlighter:
|
|||
deleted)
|
||||
i = 0
|
||||
y += 1
|
||||
|
||||
def highlight(self, lines):
|
||||
self.tokens = [[] for l in lines]
|
||||
self.lexer.lex(lines, y=0, x=0)
|
||||
|
@ -77,7 +78,10 @@ class Highlighter:
|
|||
# ======================
|
||||
def relex(self, lines, y1, x1, y2, x2):
|
||||
# start the relexing process
|
||||
token = self.tokens[y1][0]
|
||||
if self.tokens[y1]:
|
||||
token = self.tokens[y1][0]
|
||||
else:
|
||||
token = None
|
||||
self.lexer.resume(lines, y1, 0, token)
|
||||
|
||||
# these keep track of the current y coordinate, the current token index
|
||||
|
@ -156,6 +160,63 @@ class Highlighter:
|
|||
assert y1 <= y2
|
||||
assert y2 < len(lines)
|
||||
|
||||
# first let's delete any token who falls in the range of the change (or,
|
||||
# in the case of child tokens, whose parent is being deleted).
|
||||
y = y1
|
||||
i = 0
|
||||
done = False
|
||||
while not done:
|
||||
if i < len(self.tokens[y]):
|
||||
# figure out if this token is in our range. notice that
|
||||
# delete_token() will take care of the need to recursively
|
||||
# delete children for us
|
||||
token = self.tokens[y][i]
|
||||
if token.y >= y2 and token.x >= x2:
|
||||
done = True
|
||||
elif token.y <= y1 and token.x < x1:
|
||||
pass
|
||||
else:
|
||||
self.delete_token(y, i)
|
||||
|
||||
# ok, so now figure out what we should do next, either advancing a
|
||||
# token, or being finished with this part
|
||||
if i < len(self.tokens[y]) - 1:
|
||||
i += 1
|
||||
elif y < len(self.tokens) - 1:
|
||||
y += 1
|
||||
i = 0
|
||||
else:
|
||||
done = True
|
||||
|
||||
# ok, so now we need to "adjust" the (x,y) coordinates of all the tokens
|
||||
# after the change. first we will copy over the pre-deletion tokens.
|
||||
newtokens = [[] for x in range(0, len(self.tokens) - y2 + y1)]
|
||||
|
||||
for y in range(0, y1 + 1):
|
||||
for token in self.tokens[y]:
|
||||
newtokens[y].append(token)
|
||||
|
||||
# then the tokens which occured on the same line as the end of the
|
||||
# deletion.
|
||||
for token in self.tokens[y2]:
|
||||
token.x = token.x - x2 + x1
|
||||
token.y = y1
|
||||
newtokens[y1].append(token)
|
||||
|
||||
# finally, we will copy over the tokens from subsequent lines
|
||||
for y in range(y2 + 1, len(self.tokens)):
|
||||
for token in self.tokens[y]:
|
||||
token.y = token.y - y2 + y1
|
||||
newtokens[y - y2 + y1].append(token)
|
||||
|
||||
# now save our new tokens
|
||||
self.tokens = newtokens
|
||||
|
||||
def update_del2(self, lines, y1, x1, y2, x2):
|
||||
assert y1 >= 0
|
||||
assert y1 <= y2
|
||||
assert y2 < len(lines)
|
||||
|
||||
xdelta = x2 - x1
|
||||
ydelta = y2 - y1
|
||||
|
||||
|
|
7
lex2.py
7
lex2.py
|
@ -323,7 +323,7 @@ class DualRegionRule(Rule):
|
|||
while not done and lexer.y == old_y and lexer.x < len(lexer.lines[lexer.y]):
|
||||
# if we are reentering mid-parse, then that takes precedence
|
||||
if reenter:
|
||||
raise Exception, "aw damn"
|
||||
raise Exception, "aw damn1"
|
||||
#reenter = False
|
||||
#xrule = rulecontext[0].rule
|
||||
#xd = rulecontext[0].matchd
|
||||
|
@ -393,7 +393,7 @@ class DualRegionRule(Rule):
|
|||
|
||||
# if we are reentering mid-parse, then that takes precedence
|
||||
if reenter:
|
||||
raise Exception, "aw damn"
|
||||
raise Exception, "aw damn2"
|
||||
#reenter = False
|
||||
#xrule = rulecontext[0].rule
|
||||
#xd = rulecontext[0].matchd
|
||||
|
@ -491,7 +491,8 @@ class Lexer:
|
|||
if token:
|
||||
toresume = token.parents()
|
||||
if toresume:
|
||||
raise Exception, "aw damn"
|
||||
toresume[0].rule.resume(self, toresume)
|
||||
#raise Exception, "aw damn3"
|
||||
|
||||
def __iter__(self):
|
||||
if self.lines is None:
|
||||
|
|
Loading…
Reference in New Issue