some changes to grammar storage, etc. some bug fixes too

--HG--
branch : pmacs2
This commit is contained in:
moculus 2007-07-08 23:16:53 +00:00
parent 89d9b8ed33
commit 26d884f931
7 changed files with 271 additions and 220 deletions

View File

@ -447,10 +447,10 @@ class Application(object):
if self.mini_active: if self.mini_active:
b = self.mini_buffer b = self.mini_buffer
w = b.windows[0] w = b.windows[0]
(cx, cy) = w.logical_cursor().xy() p = w.logical_cursor()
if cy >= len(b.lines): if p.y >= len(b.lines):
return return
(vy, vx) = (self.y - 1, cx + len(self.mini_prompt)) (vy, vx) = (self.y - 1, min(p.x + len(self.mini_prompt), self.x - 2))
#self.win.move(self.y-1, cx + len(self.mini_prompt)) #self.win.move(self.y-1, cx + len(self.mini_prompt))
else: else:
slot = self.bufferlist.slots[self.active_slot] slot = self.bufferlist.slots[self.active_slot]
@ -475,7 +475,11 @@ class Application(object):
#self.win.move(slot.offset + count, p.x - x) #self.win.move(slot.offset + count, p.x - x)
if vy is None or vx is None: if vy is None or vx is None:
return return
self.win.move(vy, vx) try:
self.win.move(vy, vx)
except:
raise Exception, "(%d,%d==%r) was illegal (%d,%d)" % \
(vx, vy, p, self.x, self.y)
# sub-drawing methods # sub-drawing methods
def draw_slots(self): def draw_slots(self):
@ -490,8 +494,14 @@ class Application(object):
char = junk & 255 char = junk & 255
#attr = color.build(fg, bg, curses.A_REVERSE) #attr = color.build(fg, bg, curses.A_REVERSE)
attr = color.build(fg, bg) attr = color.build(fg, bg)
self.win.addch(sy, sx, char, attr) try:
self.win.addch(sy, sx, char, attr)
except:
raise Exception, "(%d, %d, %r, %r) v. (%d, %d)" % \
(sy, sx, fg, bg, self.y, self.x)
def highlight_chars(self, sy, sx1, sx2, fg='default', bg='default'): def highlight_chars(self, sy, sx1, sx2, fg='default', bg='default'):
assert sx2 < self.x, "%d < %d" % (sx2, self.x)
for x in range(sx1, sx2): for x in range(sx1, sx2):
self.highlight_char(sy, x, fg, bg) self.highlight_char(sy, x, fg, bg)
@ -516,13 +526,17 @@ class Application(object):
(x, y) = w.first.xy() (x, y) = w.first.xy()
px = p1.x px = p1.x
while count < slot.height: while count < slot.height:
if p1.y == y and px >= x and px < x + slot.width: if p1.y == y and px >= x and px - x < slot.width:
if px + slot.width > p2.x: if slot.width > p2.x - x:
self.highlight_chars(slot.offset + count, px - x, p2.x -x, fg, bg) #assert p2.x-x < self.x, \
# "%d-%d < %d" % (p2.x, x, self.x)
self.highlight_chars(slot.offset + count, px-x, p2.x-x, fg, bg)
break break
else: else:
self.highlight_chars(slot.offset + count, px - x, px + slot.width -x-1, fg, bg) #assert px - x < self.x, \
px += slot.width # "%d+%d-%d-1 < %d" % (px, slot.width, x, self.x)
self.highlight_chars(slot.offset + count, px-x, slot.width, fg, bg)
px += slot.width - px + x
if x + slot.width >= len(w.buffer.lines[y]): if x + slot.width >= len(w.buffer.lines[y]):
x = 0 x = 0
y += 1 y += 1

47
lex2.py
View File

@ -63,14 +63,20 @@ class Rule:
raise Exception, "%s rule cannot match!" % self.name raise Exception, "%s rule cannot match!" % self.name
def make_token(self, lexer, s, name, parent=None, matchd={}): def make_token(self, lexer, s, name, parent=None, matchd={}):
return Token(name, self, lexer.y, lexer.x, s, parent, matchd) return Token(name, self, lexer.y, lexer.x, s, parent, matchd)
def _set_group(self, group):
if group is None:
self.group = self.name
else:
self.group = group
class ConstantRule(Rule): class ConstantRule(Rule):
def __init__(self, name, constant): def __init__(self, name, constant, group=None):
assert valid_name_re.match(name), 'invalid name %r' % name assert valid_name_re.match(name), 'invalid name %r' % name
assert name not in reserved_names, "reserved rule name: %r" % name assert name not in reserved_names, "reserved rule name: %r" % name
self.name = name self.name = name
self.constant = constant self.constant = constant
self.lenth = len(self.constant) self.length = len(self.constant)
self._set_group(group)
def match(self, lexer, parent): def match(self, lexer, parent):
if lexer.lines[lexer.y][lexer.x:].startswith(self.constant): if lexer.lines[lexer.y][lexer.x:].startswith(self.constant):
token = self.make_token(lexer, self.constant, self.name, parent) token = self.make_token(lexer, self.constant, self.name, parent)
@ -81,12 +87,13 @@ class ConstantRule(Rule):
return False return False
class PatternRule(Rule): class PatternRule(Rule):
def __init__(self, name, pattern): def __init__(self, name, pattern, group=None):
assert valid_name_re.match(name), 'invalid name %r' % name assert valid_name_re.match(name), 'invalid name %r' % name
assert name not in reserved_names, "reserved rule name: %r" % name assert name not in reserved_names, "reserved rule name: %r" % name
self.name = name self.name = name
self.pattern = pattern self.pattern = pattern
self._compile() self._compile()
self._set_group(group)
def _compile(self): def _compile(self):
self.re = re.compile(self.pattern) self.re = re.compile(self.pattern)
def _match(self, lexer, parent, m): def _match(self, lexer, parent, m):
@ -107,13 +114,14 @@ class NocasePatternRule(PatternRule):
self.re = re.compile(self.pattern, re.IGNORECASE) self.re = re.compile(self.pattern, re.IGNORECASE)
class ContextPatternRule(PatternRule): class ContextPatternRule(PatternRule):
def __init__(self, name, pattern, fallback): def __init__(self, name, pattern, fallback, group=None):
assert valid_name_re.match(name), 'invalid name %r' % name assert valid_name_re.match(name), 'invalid name %r' % name
assert name not in reserved_names, "reserved rule name: %r" % name assert name not in reserved_names, "reserved rule name: %r" % name
self.name = name self.name = name
self.pattern = pattern self.pattern = pattern
self.fallback = fallback self.fallback = fallback
self.fallback_re = re.compile(fallback) self.fallback_re = re.compile(fallback)
self._set_group(group)
def match(self, lexer, parent): def match(self, lexer, parent):
try: try:
r = re.compile(self.pattern % parent.matchd) r = re.compile(self.pattern % parent.matchd)
@ -127,7 +135,7 @@ class ContextPatternRule(PatternRule):
return False return False
class RegionRule(Rule): class RegionRule(Rule):
def __init__(self, name, start, grammar, end): def __init__(self, name, start, grammar, end, group=None):
assert valid_name_re.match(name), 'invalid name %r' % name assert valid_name_re.match(name), 'invalid name %r' % name
assert name not in reserved_names, "reserved rule name: %r" % name assert name not in reserved_names, "reserved rule name: %r" % name
self.name = name self.name = name
@ -135,6 +143,7 @@ class RegionRule(Rule):
self.grammar = grammar self.grammar = grammar
self.end = end self.end = end
self.start_re = self._compile_start() self.start_re = self._compile_start()
self._set_group(group)
def _compile_start(self): def _compile_start(self):
return re.compile(self.start) return re.compile(self.start)
@ -270,7 +279,7 @@ class NocaseRegionRule(RegionRule):
return re.compile(self.end % d, re.IGNORECASE) return re.compile(self.end % d, re.IGNORECASE)
class DualRegionRule(Rule): class DualRegionRule(Rule):
def __init__(self, name, start, grammar1, middle, grammar2, end): def __init__(self, name, start, grammar1, middle, grammar2, end, group=None):
assert valid_name_re.match(name), 'invalid name %r' % name assert valid_name_re.match(name), 'invalid name %r' % name
assert name not in reserved_names, "reserved rule name: %r" % name assert name not in reserved_names, "reserved rule name: %r" % name
self.name = name self.name = name
@ -280,6 +289,7 @@ class DualRegionRule(Rule):
self.grammar2 = grammar2 self.grammar2 = grammar2
self.end = end self.end = end
self.start_re = re.compile(start) self.start_re = re.compile(start)
self._set_group(group)
def _add_from_regex(self, name, lexer, parent, m, matchd={}): def _add_from_regex(self, name, lexer, parent, m, matchd={}):
s = m.group(0) s = m.group(0)
token = self.make_token(lexer, s, name, parent, matchd) token = self.make_token(lexer, s, name, parent, matchd)
@ -480,6 +490,30 @@ class Grammar:
if hasattr(rule, 'grammar') and rule.grammar is None: if hasattr(rule, 'grammar') and rule.grammar is None:
rule.grammar = self rule.grammar = self
grammars = {}
grammars['null'] = Grammar()
crash = False
def add(name, grammar):
global crash, grammars
if crash and name in grammars:
raise Exception, "oh no! already have a grammar for %r" %name
else:
grammars[name] = grammar
def get(name):
global crash, grammars
try:
return grammars[name]
except KeyError:
if crash:
raise
elif name == 'null':
return Grammar()
else:
return get('null')
class Lexer: class Lexer:
def __init__(self, name, grammar): def __init__(self, name, grammar):
self.name = name self.name = name
@ -542,3 +576,4 @@ class Lexer:
return self.tokens.pop(0) return self.tokens.pop(0)
else: else:
raise StopIteration raise StopIteration

View File

@ -1,39 +1,33 @@
import color, mode2 import color, lex2, mode2
from lex2 import Grammar, PatternRule, RegionRule from lex2 import Grammar, PatternRule, RegionRule
class OpenTagGrammar(Grammar):
rules = [
RegionRule(name=r'string', start=r'(?P<tag>["\'])', grammar=Grammar(), end=r'%(tag)s'),
PatternRule(name=r'namespace', pattern=r'[a-zA-Z_]+:'),
PatternRule(name=r'attrname', pattern=r'[^ =>\n]+(?==)'),
PatternRule(name=r'name', pattern=r'[^ =>\n]+'),
]
class StringGrammar(Grammar): class StringGrammar(Grammar):
rules = [ rules = [
PatternRule(name=r'octal', pattern=r'\\[0-7]{3}'), PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(name=r'escaped', pattern=r'\\.'), PatternRule(r'escaped', r'\\.'),
] ]
lex2.add('xml-string', StringGrammar)
class BDSGrammar(Grammar): class BDSGrammar(Grammar):
rules = [ rules = [
RegionRule(name=r'comment', start=r'<!--', grammar=Grammar(), end=r'-->'), RegionRule(r'comment', r'<!--', lex2.get('null'), r'-->'),
RegionRule(name=r'opentag', start=r'<', grammar=OpenTagGrammar(), end=r'/?>'), RegionRule(r'opentag', r'<', lex2.get('xml-opentag'), r'/?>'),
PatternRule(name=r'closetag', pattern=r'< */ *[ =>\n]+ *>'), PatternRule(r'closetag', r'< */ *[ =>\n]+ *>'),
PatternRule(name=r'delimiter', pattern=r'[\[\]\{\}\(\),\?:]'), PatternRule(r'delimiter', r'[\[\]\{\}\(\),\?:]'),
PatternRule(name=r'derived', pattern=r'(?:FM|CD|FS|FM|TA)[0-9]{3}-[0-9]{3}-[0-9]{3}'), PatternRule(r'derived', r'(?:FM|CD|FS|FM|TA)[0-9]{3}-[0-9]{3}-[0-9]{3}'),
PatternRule(name=r'question', pattern=r'GQ[0-9]{3}-[0-9]{3}-[0-9]{3}:MQ[0-9]{3}-[0-9]{3}-[0-9]{3}'), PatternRule(r'question', r'GQ[0-9]{3}-[0-9]{3}-[0-9]{3}:MQ[0-9]{3}-[0-9]{3}-[0-9]{3}'),
PatternRule(name=r'bdsfunc', pattern=r'[A-Z_][A-Z0-9_]+(?= *\()'), PatternRule(r'bdsfunc', r'[A-Z_][A-Z0-9_]+(?= *\()'),
PatternRule(name=r'perlfunc', pattern=r'[a-zA-Z_][a-zA-Z0-9_]+(?= *\()'), PatternRule(r'perlfunc', r'[a-zA-Z_][a-zA-Z0-9_]+(?= *\()'),
PatternRule(name=r'misquoted', pattern=r"'[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3}(?::[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3})?'"), PatternRule(r'misquoted', r"'[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3}(?::[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3})?'"),
PatternRule(name=r'misquoted', pattern=r'"[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3}(?::[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3})?"'), PatternRule(r'misquoted', r'"[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3}(?::[A-Z]{2}[0-9]{3}-[0-9]{3}-[0-9]{3})?"'),
RegionRule(name=r'string', start='"', grammar=StringGrammar(), end='"'), RegionRule(r'string', '"', lex2.get('perl-string'), '"'),
RegionRule(name=r'string', start="'", grammar=Grammar(), end="'"), RegionRule(r'string', "'", lex2.get('null'), "'"),
PatternRule(name=r'operator', pattern=r'(?:&gt;=|&lt;=|&gt;|&lt;|==|&amp;&amp;|\|\||eq|ne)'), PatternRule(r'operator', r'(?:&gt;=|&lt;=|&gt;|&lt;|==|&amp;&amp;|\|\||eq|ne)'),
] ]
lex2.add('bds', BDSGrammar)
class BDS(mode2.Fundamental): class BDS(mode2.Fundamental):
grammar = BDSGrammar grammar = lex2.get('bds')
opentoken = 'delimiter' opentoken = 'delimiter'
opentags = {'(': ')', '[': ']', '{': '}'} opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter' closetoken = 'delimiter'

View File

@ -6,98 +6,97 @@ from method import Argument, Method
class PodGrammar(Grammar): class PodGrammar(Grammar):
rules = [ rules = [
PatternRule(name=r'entry', pattern=r'(?<=^=head[1-4]) +.*$'), PatternRule(r'entry', r'(?<=^=head[1-4]) +.*$'),
PatternRule(name=r'entry', pattern=r'(?<=^=over) +.*$'), PatternRule(r'entry', r'(?<=^=over) +.*$'),
PatternRule(name=r'entry', pattern=r'(?<=^=item) +.*$'), PatternRule(r'entry', r'(?<=^=item) +.*$'),
PatternRule(name=r'entry', pattern=r'(?:(?<=^=begin)|(?<=^=end)) +.*$'), PatternRule(r'entry', r'(?:(?<=^=begin)|(?<=^=end)) +.*$'),
PatternRule(name=r'entry', pattern=r'(?<=^=encoding) +.*$'), PatternRule(r'entry', r'(?<=^=encoding) +.*$'),
] ]
lex2.add('perl-pod', PodGrammar)
class StringGrammar(Grammar): class StringGrammar(Grammar):
rules = [ rules = [
PatternRule(name=r'octal', pattern=r'\\[0-7]{3}'), PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(name=r'escaped', pattern=r'\\.'), PatternRule(r'escaped', r'\\.'),
PatternRule(name=r'deref', pattern=r"\$+[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?:->{\$?(?:[a-zA-Z_][a-zA-Z_0-9]*|'(?:\\.|[^'\\])*'|\"(\\.|[^\\\"])*\")}|->\[\$?[0-9a-zA-Z_]+\])+"), PatternRule(r'deref', r"\$+[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?:->{\$?(?:[a-zA-Z_][a-zA-Z_0-9]*|'(?:\\.|[^'\\])*'|\"(\\.|[^\\\"])*\")}|->\[\$?[0-9a-zA-Z_]+\])+"),
PatternRule(name=r'length', pattern=r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'length', r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
ContextPatternRule(name=r'scalar', pattern=r"\$[^A-Za-z0-9 %(delim)s](?![A-Za-z0-9_])", fallback=r"\$[^A-Za-z0-9 ](?![A-Za-z0-9_])"), ContextPatternRule(r'scalar', r"\$[^A-Za-z0-9 %(delim)s](?![A-Za-z0-9_])", r"\$[^A-Za-z0-9 ](?![A-Za-z0-9_])"),
PatternRule(name=r'scalar', pattern=r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'scalar', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
PatternRule(name=r'cast', pattern=r"[\$\@\%\&]{.*?}"), PatternRule(r'cast', r"[\$\@\%\&]{.*?}"),
PatternRule(name=r'array', pattern=r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'array', r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
] ]
lex2.add('perl-string', StringGrammar)
g = Grammar()
pg = PodGrammar()
sg = StringGrammar()
class PerlGrammar(Grammar): class PerlGrammar(Grammar):
rules = [ rules = [
RegionRule(name=r'heredoc1', start=r"<<(?P<heredoc>[a-zA-Z0-9_]+) *;", grammar=sg, end=r'^%(heredoc)s$'), RegionRule(r'heredoc1', r"<<(?P<heredoc>[a-zA-Z0-9_]+) *;", lex2.get('perl-string'), r'^%(heredoc)s$'),
RegionRule(name=r'heredoc1', start=r'<< *"(?P<heredoc>[a-zA-Z0-9_]+)" *;', grammar=sg, end=r'^%(heredoc)s$'), RegionRule(r'heredoc1', r'<< *"(?P<heredoc>[a-zA-Z0-9_]+)" *;', lex2.get('perl-string'), r'^%(heredoc)s$'),
RegionRule(name=r'heredoc2', start=r"<< *'(?P<heredoc>[a-zA-Z0-9_]+)' *;", grammar=g, end=r'^%(heredoc)s$'), RegionRule(r'heredoc2', r"<< *'(?P<heredoc>[a-zA-Z0-9_]+)' *;", lex2.get('null'), r'^%(heredoc)s$'),
RegionRule(name=r'eval_heredoc', start=r"<< *`(?P<heredoc>[a-zA-Z0-9_]+)` *;", grammar=sg, end=r'^%(heredoc)s$'), RegionRule(r'eval_heredoc', r"<< *`(?P<heredoc>[a-zA-Z0-9_]+)` *;", lex2.get('perl-string'), r'^%(heredoc)s$'),
RegionRule(name=r'endblock', start=r"^__END__|__DATA__ *$", grammar=g, end=r''), RegionRule(r'endblock', r"^__END__|__DATA__ *$", lex2.get('null'), r''),
RegionRule(name=r'pod', start=r'^=[a-zA-Z0-9_]+', grammar=pg, end=r'^=cut'), RegionRule(r'pod', r'^=[a-zA-Z0-9_]+', lex2.get('perl-pod'), r'^=cut'),
PatternRule(name=r'comment', pattern=r'#.*$'), PatternRule(r'comment', r'#.*$'),
RegionRule(name=r'string1', start=r'"', grammar=sg, end=r'"'), RegionRule(r'string1', r'"', lex2.get('perl-string'), r'"'),
RegionRule(name=r'string2', start=r"'", grammar=g, end=r"'"), RegionRule(r'string2', r"'", lex2.get('null'), r"'"),
RegionRule(name=r'evalstring', start=r"`", grammar=sg, end=r"`"), RegionRule(r'evalstring', r"`", lex2.get('perl-string'), r"`"),
PatternRule(name=r'number', pattern=r'0?\.[0-9]+|[0-9]+(?:\.[0-9]+)?'), PatternRule(r'number', r'0?\.[0-9]+|[0-9]+(?:\.[0-9]+)?'),
PatternRule(name=r'keyword', pattern=r"(?<!->)(?:STDIN|STDERR|STDOUT|continue|do|else|elsif|eval|foreach|for|if|last|my|next|our|package|require|return|sub|undef|unless|until|use|while)(?![a-zA-Z0-9_])"), PatternRule(r'keyword', r"(?<!->)(?:STDIN|STDERR|STDOUT|continue|do|else|elsif|eval|foreach|for|if|last|my|next|our|package|require|return|sub|undef|unless|until|use|while)(?![a-zA-Z0-9_])"),
PatternRule(name=r'hash_key', pattern=r'(?<={)[A-Za-z0-9_]+(?=})'), PatternRule(r'hash_key', r'(?<={)[A-Za-z0-9_]+(?=})'),
PatternRule(name=r'hash_key', pattern=r'[A-Za-z0-9_]+(?= *=>)'), PatternRule(r'hash_key', r'[A-Za-z0-9_]+(?= *=>)'),
PatternRule(name=r'length', pattern=r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'length', r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
PatternRule(name=r'cast', pattern=r'[\$\@\%\^\&](?= *{)'), PatternRule(r'cast', r'[\$\@\%\^\&](?= *{)'),
PatternRule(name=r'scalar', pattern=r"\$[][><ab/'\"_@\?#\$!%^|&*()](?![A-Za-z0-9_])"), PatternRule(r'scalar', r"\$[][><ab/'\"_@\?#\$!%^|&*()](?![A-Za-z0-9_])"),
PatternRule(name=r'array', pattern=r"@_"), PatternRule(r'array', r"@_"),
PatternRule(name=r'function', pattern=r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?=-> *\()"), PatternRule(r'function', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?=-> *\()"),
PatternRule(name=r'scalar', pattern=r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'scalar', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
PatternRule(name=r'array', pattern=r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'array', r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
PatternRule(name=r'hash', pattern=r"%\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"), PatternRule(r'hash', r"%\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
PatternRule(name=r'deref', pattern=r"[@%\$&\*](?={)"), PatternRule(r'deref', r"[@%\$&\*](?={)"),
# match regexes # match regexes
RegionRule(name=r'match', start=r'(?:(?<==~)|(?<=!~)|(?<=\()|(?<=split)) *(?P<delim>/)', grammar=sg, end=r'/[a-z]*'), RegionRule(r'match', r'(?:(?<==~)|(?<=!~)|(?<=\()|(?<=split)) *(?P<delim>/)', lex2.get('perl-string'), r'/[a-z]*'),
RegionRule(name=r'match', start=r'm *(?P<delim>[^ #a-zA-Z0-9_])', grammar=sg, end=r'%(delim)s[a-z]*'), RegionRule(r'match', r'm *(?P<delim>[^ #a-zA-Z0-9_])', lex2.get('perl-string'), r'%(delim)s[a-z]*'),
RegionRule(name=r'match', start=r'm(?P<delim>#)', grammar=sg, end=r'#[a-z]*'), RegionRule(r'match', r'm(?P<delim>#)', lex2.get('perl-string'), r'#[a-z]*'),
# replace regexes # replace regexes
DualRegionRule(name=r'replace', start=r's *(?P<delim>[^ a-zA-Z0-9_])', grammar1=sg, middle=r'%(delim)s', grammar2=sg, end=r'%(delim)s[a-z]*'), DualRegionRule(r'replace', r's *(?P<delim>[^ a-zA-Z0-9_])', lex2.get('perl-string'), r'%(delim)s', lex2.get('perl-string'), r'%(delim)s[a-z]*'),
DualRegionRule(name=r'replace', start=r's(?P<delim>#)', grammar1=sg, middle=r'#', grammar2=sg, end=r'#[a-z]*'), DualRegionRule(r'replace', r's(?P<delim>#)', lex2.get('perl-string'), r'#', lex2.get('perl-string'), r'#[a-z]*'),
# translate operator # translate operator
DualRegionRule(name=r'translate', start=r'(?:y|tr) *(?P<delim>[^ a-zA-Z0-9_])', grammar1=g, middle=r'%(delim)s', grammar2=g, end=r'%(delim)s[a-z]*'), DualRegionRule(r'translate', r'(?:y|tr) *(?P<delim>[^ a-zA-Z0-9_])', lex2.get('null'), r'%(delim)s', lex2.get('null'), r'%(delim)s[a-z]*'),
DualRegionRule(name=r'translate', start=r'(?:y|tr)#', grammar1=g, middle=r'#', grammar2=g, end=r'#[a-z]*'), DualRegionRule(r'translate', r'(?:y|tr)#', lex2.get('null'), r'#', lex2.get('null'), r'#[a-z]*'),
# some more basic stuff # some more basic stuff
PatternRule(name=r'package', pattern=r"(?<=package )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'package', r"(?<=package )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'sub', pattern=r"(?<=sub )[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'sub', r"(?<=sub )[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'use', pattern=r"(?<=use )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'use', r"(?<=use )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'require', pattern=r"(?<=require )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'require', r"(?<=require )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'label', pattern=r'[a-zA-Z_][a-zA-Z0-9_]*:(?!:)'), PatternRule(r'label', r'[a-zA-Z_][a-zA-Z0-9_]*:(?!:)'),
PatternRule(name=r'method', pattern=r"(?<=->)[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'method', r"(?<=->)[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'function', pattern=r"&\$*(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"), PatternRule(r'function', r"&\$*(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
PatternRule(name=r'builtin', pattern=r"(?<!->)&?(?:write|warn|wantarray|waitpid|wait|vec|values|utime|use|untie|unshift|unpack|unlink|undef|umask|ucfirst|uc|truncate|times|time|tied|tie|telldir|tell|syswrite|system|sysseek|sysread|sysopen|syscall|symlink|substr|sub|study|stat|srand|sqrt|sprintf|split|splice|sort|socketpair|socket|sleep|sin|shutdown|shmwrite|shmread|shmget|shmctl|shift|setsockopt|setservent|setpwent|setprotoent|setpriority|setpgrp|setnetent|sethostent|setgrent|send|semop|semget|semctl|select|seekdir|seek|scalar|rmdir|rindex|rewinddir|reverse|return|reset|require|rename|ref|redo|recv|readpipe|readlink|readline|readdir|read|rand|quotemeta|push|prototype|printf|print|pos|pop|pipe|package|pack|our|ord|opendir|open|oct|no|next|my|msgsnd|msgrcv|msgget|msgctl|mkdir|map|lstat|log|lock|localtime|local|listen|link|length|lcfirst|lc|last|kill|keys|join|ioctl|int|index|import|hex|grep|goto|gmtime|glob|getsockopt|getsockname|getservent|getservbyport|getservbyname|getpwuid|getpwnam|getpwent|getprotoent|getprotobynumber|getprotobyname|getpriority|getppid|getpgrp|getpeername|getnetent|getnetbyname|getnetbyaddr|getlogin|gethostent|gethostbyname|gethostbyaddr|getgrnam|getgrgid|getgrent|getc|formline|format|fork|flock|fileno|fcntl|exp|exit|exists|exec|eval|eof|endservent|endpwent|endprotoent|endnetent|endhostent|endgrent|each|dump|do|die|delete|defined|dbmopen|dbmclose|crypt|cos|continue|connect|closedir|close|chroot|chr|chown|chop|chomp|chmod|chdir|caller|bless|binmode|bind|atan2|alarm|accept|abs)(?![a-zA-Z0-9_])"), PatternRule(r'builtin', r"(?<!->)&?(?:write|warn|wantarray|waitpid|wait|vec|values|utime|use|untie|unshift|unpack|unlink|undef|umask|ucfirst|uc|truncate|times|time|tied|tie|telldir|tell|syswrite|system|sysseek|sysread|sysopen|syscall|symlink|substr|sub|study|stat|srand|sqrt|sprintf|split|splice|sort|socketpair|socket|sleep|sin|shutdown|shmwrite|shmread|shmget|shmctl|shift|setsockopt|setservent|setpwent|setprotoent|setpriority|setpgrp|setnetent|sethostent|setgrent|send|semop|semget|semctl|select|seekdir|seek|scalar|rmdir|rindex|rewinddir|reverse|return|reset|require|rename|ref|redo|recv|readpipe|readlink|readline|readdir|read|rand|quotemeta|push|prototype|printf|print|pos|pop|pipe|package|pack|our|ord|opendir|open|oct|no|next|my|msgsnd|msgrcv|msgget|msgctl|mkdir|map|lstat|log|lock|localtime|local|listen|link|length|lcfirst|lc|last|kill|keys|join|ioctl|int|index|import|hex|grep|goto|gmtime|glob|getsockopt|getsockname|getservent|getservbyport|getservbyname|getpwuid|getpwnam|getpwent|getprotoent|getprotobynumber|getprotobyname|getpriority|getppid|getpgrp|getpeername|getnetent|getnetbyname|getnetbyaddr|getlogin|gethostent|gethostbyname|gethostbyaddr|getgrnam|getgrgid|getgrent|getc|formline|format|fork|flock|fileno|fcntl|exp|exit|exists|exec|eval|eof|endservent|endpwent|endprotoent|endnetent|endhostent|endgrent|each|dump|do|die|delete|defined|dbmopen|dbmclose|crypt|cos|continue|connect|closedir|close|chroot|chr|chown|chop|chomp|chmod|chdir|caller|bless|binmode|bind|atan2|alarm|accept|abs)(?![a-zA-Z0-9_])"),
# quote operator # quote operator
RegionRule(name=r'quoted', start=r'q[rqwx]? *\(', grammar=g, end=r'\)'), RegionRule(r'quoted', r'q[rqwx]? *\(', lex2.get('null'), r'\)'),
RegionRule(name=r'quoted', start=r'q[rqwx]? *{', grammar=g, end=r'}'), RegionRule(r'quoted', r'q[rqwx]? *{', lex2.get('null'), r'}'),
RegionRule(name=r'quoted', start=r'q[rqwx]? *<', grammar=g, end=r'>'), RegionRule(r'quoted', r'q[rqwx]? *<', lex2.get('null'), r'>'),
RegionRule(name=r'quoted', start=r'q[rqwx]? *\[', grammar=g, end=r'\]'), RegionRule(r'quoted', r'q[rqwx]? *\[', lex2.get('null'), r'\]'),
RegionRule(name=r'quoted', start=r'q[rqwx]? *(?P<delim>[^ #])', grammar=g, end=r'%(delim)s'), RegionRule(r'quoted', r'q[rqwx]? *(?P<delim>[^ #])', lex2.get('null'), r'%(delim)s'),
RegionRule(name=r'quoted', start=r'q[rqwx]?#', grammar=g, end=r'#'), RegionRule(r'quoted', r'q[rqwx]?#', lex2.get('null'), r'#'),
PatternRule(name=r'function', pattern=r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?= *\()"), PatternRule(r'function', r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?= *\()"),
PatternRule(name=r'class', pattern=r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?=->)"), PatternRule(r'class', r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?=->)"),
# some basic stuff # some basic stuff
PatternRule(name=r'delimiter', pattern=r"[,;=\?(){}\[\]]|->|=>|(?<!:):(?!=:)"), PatternRule(r'delimiter', r"[,;=\?(){}\[\]]|->|=>|(?<!:):(?!=:)"),
PatternRule(name=r'operator', pattern=r"\+=|-=|\*=|/=|//=|%=|&=\|\^=|>>=|<<=|\*\*="), PatternRule(r'operator', r"\+=|-=|\*=|/=|//=|%=|&=\|\^=|>>=|<<=|\*\*="),
PatternRule(name=r'operator', pattern=r"\+\+|\+|<=>|<>|<<|<=|<|-|>>|>=|>|\*\*|\*|&&|&|\|\||\||/|\^|==|//|~|=~|!~|!=|%|!|\.|x(?![a-zA-Z_])"), PatternRule(r'operator', r"\+\+|\+|<=>|<>|<<|<=|<|-|>>|>=|>|\*\*|\*|&&|&|\|\||\||/|\^|==|//|~|=~|!~|!=|%|!|\.|x(?![a-zA-Z_])"),
PatternRule(name=r'operator2', pattern=r"(?:xor|or|not|ne|lt|le|gt|ge|eq|cmp|and)(?![a-zA-Z_])"), PatternRule(r'operator2', r"(?:xor|or|not|ne|lt|le|gt|ge|eq|cmp|and)(?![a-zA-Z_])"),
PatternRule(name=r'bareword', pattern=r'(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*') PatternRule(r'bareword', r'(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*')
] ]
lex2.add('perl', PerlGrammar)
class PerlTabber(tab2.StackTabber): class PerlTabber(tab2.StackTabber):
def is_base(self, y): def is_base(self, y):
@ -166,7 +165,7 @@ class PerlTabber(tab2.StackTabber):
class Perl(mode2.Fundamental): class Perl(mode2.Fundamental):
tabbercls = PerlTabber tabbercls = PerlTabber
grammar = PerlGrammar() grammar = lex2.get('perl')
opentoken = 'delimiter' opentoken = 'delimiter'
opentags = {'(': ')', '[': ']', '{': '}'} opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter' closetoken = 'delimiter'

View File

@ -6,35 +6,37 @@ from lex2 import Grammar, PatternRule, RegionRule
class StringGrammar(Grammar): class StringGrammar(Grammar):
rules = [ rules = [
PatternRule(name=r'octal', pattern=r'\\[0-7]{3}'), PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(name=r'escaped', pattern=r'\\.'), PatternRule(r'escaped', r'\\.'),
] ]
lex2.grammars['string-py'] = StringGrammar
class PythonGrammar(Grammar): class PythonGrammar(Grammar):
rules = [ rules = [
PatternRule(name=r'functionname', pattern=r'(?<=def )[a-zA-Z_][a-zA-Z0-9_]*'), PatternRule(r'functionname', r'(?<=def )[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(name=r'classname', pattern=r'(?<=class )[a-zA-Z_][a-zA-Z0-9_]*'), PatternRule(r'classname', r'(?<=class )[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(name=r'reserved', pattern=r'(?:True|None|False|Exception|self)(?![a-zA-Z0-9_])'), PatternRule(r'reserved', r'(?:True|None|False|Exception|self)(?![a-zA-Z0-9_])'),
PatternRule(name=r'keyword', pattern=r'(?:yield|while|try|return|raise|print|pass|or|not|lambda|is|in|import|if|global|from|for|finally|exec|except|else|elif|del|def|continue|class|break|assert|as|and)(?![a-zA-Z0-9_])'), PatternRule(r'keyword', r'(?:yield|while|try|return|raise|print|pass|or|not|lambda|is|in|import|if|global|from|for|finally|exec|except|else|elif|del|def|continue|class|break|assert|as|and)(?![a-zA-Z0-9_])'),
PatternRule(name=r"builtin", pattern=r'(?<!\.)(?:zip|xrange|vars|unicode|unichr|type|tuple|super|sum|str|staticmethod|sorted|slice|setattr|set|round|repr|reduce|raw_input|range|property|pow|ord|open|oct|object|max|min|map|long|locals|list|len|iter|issubclass|isinstance|int|input|id|hex|hash|hasattr|globals|getattr|frozenset|float|filter|file|execfile|eval|enumerate|divmod|dir|dict|delattr|complex|compile|coerce|cmp|classmethod|chr|callable|bool)(?![a-zA-Z0-9_])'), PatternRule(r"builtin", r'(?<!\.)(?:zip|xrange|vars|unicode|unichr|type|tuple|super|sum|str|staticmethod|sorted|slice|setattr|set|round|repr|reduce|raw_input|range|property|pow|ord|open|oct|object|max|min|map|long|locals|list|len|iter|issubclass|isinstance|int|input|id|hex|hash|hasattr|globals|getattr|frozenset|float|filter|file|execfile|eval|enumerate|divmod|dir|dict|delattr|complex|compile|coerce|cmp|classmethod|chr|callable|bool)(?![a-zA-Z0-9_])'),
PatternRule(name=r'methodcall', pattern=r'(?<=\. )[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'), PatternRule(r'methodcall', r'(?<=\. )[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'),
PatternRule(name=r'functioncall', pattern=r'[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'), PatternRule(r'functioncall', r'[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'),
PatternRule(name=r'system_identifier', pattern=r'__[a-zA-Z0-9_]+__'), PatternRule(r'system_identifier', r'__[a-zA-Z0-9_]+__'),
PatternRule(name=r'private_identifier', pattern=r'__[a-zA-Z0-9_]*'), PatternRule(r'private_identifier', r'__[a-zA-Z0-9_]*'),
PatternRule(name=r'hidden_identifier', pattern=r'_[a-zA-Z0-9_]*'), PatternRule(r'hidden_identifier', r'_[a-zA-Z0-9_]*'),
PatternRule(name=r'identifier', pattern=r'[a-zA-Z_][a-zA-Z0-9_]*'), PatternRule(r'identifier', r'[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(name=r'delimiter', pattern=r'\(|\)|\[|\]|{|}|@|,|:|\.|`|=|;|\+=|-=|\*=|/=|//=|%=|&=|\|=|\^=|>>=|<<=|\*\*='), PatternRule(r'delimiter', r'\(|\)|\[|\]|{|}|@|,|:|\.|`|=|;|\+=|-=|\*=|/=|//=|%=|&=|\|=|\^=|>>=|<<=|\*\*='),
PatternRule(name=r"operator", pattern=r"\+|<>|<<|<=|<|-|>>|>=|>|\*\*|&|\*|\||/|\^|==|//|~|!=|%"), PatternRule(r"operator", r"\+|<>|<<|<=|<|-|>>|>=|>|\*\*|&|\*|\||/|\^|==|//|~|!=|%"),
PatternRule(name=r"integer", pattern=r"(?<![\.0-9a-zA-Z_])(?:0|[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"), PatternRule(r"integer", r"(?<![\.0-9a-zA-Z_])(?:0|[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"),
PatternRule(name=r"float", pattern=r"(?<![\.0-9a-zA-Z_])(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"), PatternRule(r"float", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"),
PatternRule(name=r"imaginary", pattern=r"(?<![\.0-9a-zA-Z_])(?:[0-9]+|(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)[jJ])(?![\.0-9a-zA-Z_])"), PatternRule(r"imaginary", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+|(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)[jJ])(?![\.0-9a-zA-Z_])"),
RegionRule(name=r'string', start=r'"""', grammar=StringGrammar(), end=r'"""'), RegionRule(r'string', r'"""', lex2.grammars['string-py'], r'"""'),
RegionRule(name=r'string', start=r"'''", grammar=StringGrammar(), end=r"'''"), RegionRule(r'string', r"'''", lex2.grammars['string-py'], r"'''"),
RegionRule(name=r'string', start=r'"', grammar=StringGrammar(), end=r'"'), RegionRule(r'string', r'"', lex2.grammars['string-py'], r'"'),
RegionRule(name=r'string', start=r"'", grammar=StringGrammar(), end=r"'"), RegionRule(r'string', r"'", lex2.grammars['string-py'], r"'"),
PatternRule(name=r'comment', pattern=r'#.*$'), PatternRule(r'comment', r'#.*$'),
PatternRule(name=r'continuation', pattern=r'\\$'), PatternRule(r'continuation', r'\\$'),
] ]
lex2.grammars['python'] = PythonGrammar
class PythonTabber(tab2.StackTabber): class PythonTabber(tab2.StackTabber):
endlevel_names = ('pass', 'return', 'yield', 'raise', 'break', 'continue') endlevel_names = ('pass', 'return', 'yield', 'raise', 'break', 'continue')
@ -164,7 +166,7 @@ class PythonTabber(tab2.StackTabber):
class Python(mode2.Fundamental): class Python(mode2.Fundamental):
tabbercls = PythonTabber tabbercls = PythonTabber
grammar = PythonGrammar() grammar = lex2.grammars['python']
opentoken = 'delimiter' opentoken = 'delimiter'
opentags = {'(': ')', '[': ']', '{': '}'} opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter' closetoken = 'delimiter'
@ -178,8 +180,8 @@ class Python(mode2.Fundamental):
# add python-specific methods # add python-specific methods
self.add_action_and_bindings(PythonCheckSyntax(), ('C-c s',)) self.add_action_and_bindings(PythonCheckSyntax(), ('C-c s',))
self.add_action_and_bindings(PythonDictCleanup(), ('C-c h',)) self.add_action_and_bindings(PythonDictCleanup(), ('C-c h',))
self.add_action_and_bindings(PythonUpdateTags(), ('C-c t',)) #self.add_action_and_bindings(PythonUpdateTags(), ('C-c t',))
self.add_action_and_bindings(PythonTagComplete(), ('C-c k',)) #self.add_action_and_bindings(PythonTagComplete(), ('C-c k',))
# highlighting # highlighting
self.colors = { self.colors = {
'keyword': color.build('cyan', 'default'), 'keyword': color.build('cyan', 'default'),
@ -225,77 +227,77 @@ class PythonCheckSyntax(method.Method):
output = output + "\ncommand exit status: %d" % (status) output = output + "\ncommand exit status: %d" % (status)
w.application.data_buffer("python-syntax", output, switch_to=True) w.application.data_buffer("python-syntax", output, switch_to=True)
class PythonUpdateTags(method.Method): #class PythonUpdateTags(method.Method):
'''Update the CTag data associated with a python buffer''' # '''Update the CTag data associated with a python buffer'''
args = [method.Argument("lib", prompt="Module Base: ", datatype='path', # args = [method.Argument("lib", prompt="Module Base: ", datatype='path',
default=default.build_constant("."))] # default=default.build_constant("."))]
def _execute(self, w, **vargs): # def _execute(self, w, **vargs):
w.mode.ctagger = ctag_python.PythonCTagger() # w.mode.ctagger = ctag_python.PythonCTagger()
w.mode.ctagger.process_paths([vargs['lib']]) # w.mode.ctagger.process_paths([vargs['lib']])
w.application.set_error('Tag data updated') # w.application.set_error('Tag data updated')
#
class PythonTagComplete(method.Method): #class PythonTagComplete(method.Method):
'''Complete a symbol using tag data''' # '''Complete a symbol using tag data'''
def _execute(self, w, **vargs): # def _execute(self, w, **vargs):
if not w.mode.ctagger.packages: # if not w.mode.ctagger.packages:
w.application.methods['python-update-tags'].execute(w) # w.application.methods['python-update-tags'].execute(w)
return # return
#
cursor = w.logical_cursor() # cursor = w.logical_cursor()
b = w.buffer # b = w.buffer
line = b.lines[cursor.y] # line = b.lines[cursor.y]
end = cursor.x # end = cursor.x
start = cursor.x # start = cursor.x
#
word_chars = string.letters + string.digits + '_' # word_chars = string.letters + string.digits + '_'
if start == 0: # if start == 0:
w.application.set_error('walrus 1') # w.application.set_error('walrus 1')
return # return
#
c = line[start - 1] # c = line[start - 1]
if c == '(': # if c == '(':
w.application.set_error('goldfinch 1') # w.application.set_error('goldfinch 1')
return # return
elif c not in word_chars: # elif c not in word_chars:
w.application.set_error('walrus 2') # w.application.set_error('walrus 2')
return # return
#
while start > 0 and line[start - 1] in word_chars: # while start > 0 and line[start - 1] in word_chars:
start -= 1 # start -= 1
if start == end: # if start == end:
w.application.set_error('walrus 3') # w.application.set_error('walrus 3')
return # return
word = line[start:end] # word = line[start:end]
#
candidates = [] # candidates = []
seen = sets.Set() # seen = sets.Set()
for p in w.mode.ctagger.packages.iterkeys(): # for p in w.mode.ctagger.packages.iterkeys():
if p.startswith(word): # if p.startswith(word):
if p in seen: # if p in seen:
continue # continue
candidates.append(p) # candidates.append(p)
seen.add(p) # seen.add(p)
for e in w.mode.ctagger.entries.itervalues(): # for e in w.mode.ctagger.entries.itervalues():
if e.symbol.startswith(word): # if e.symbol.startswith(word):
if e.symbol in seen: # if e.symbol in seen:
continue # continue
candidates.append(e.symbol) # candidates.append(e.symbol)
seen.add(e.symbol) # seen.add(e.symbol)
if len(candidates) == 0: # if len(candidates) == 0:
w.application.set_error('No match: %r' % word) # w.application.set_error('No match: %r' % word)
return # return
elif len(candidates) == 1: # elif len(candidates) == 1:
newword = candidates[0] # newword = candidates[0]
if word == newword: # if word == newword:
w.application.set_error('Already completed!') # w.application.set_error('Already completed!')
return # return
else: # else:
w.application.set_error('Unique match!') # w.application.set_error('Unique match!')
else: # else:
newword = completer.find_common_string(candidates) # newword = completer.find_common_string(candidates)
w.application.set_error('Ambiguous match: %r' % (candidates)) # w.application.set_error('Ambiguous match: %r' % (candidates))
b.delete_string(Point(start, cursor.y), Point(end, cursor.y)) # b.delete_string(Point(start, cursor.y), Point(end, cursor.y))
b.insert_string(Point(start, cursor.y), newword) # b.insert_string(Point(start, cursor.y), newword)
class PythonDictCleanup(method.Method): class PythonDictCleanup(method.Method):
'''Align assignment blocks and literal dictionaries''' '''Align assignment blocks and literal dictionaries'''

View File

@ -1,28 +1,29 @@
import color, mode2 import color, lex2, mode2
from lex2 import Grammar, PatternRule, RegionRule from lex2 import Grammar, PatternRule, RegionRule
class OpenTagGrammar(Grammar): class OpenTagGrammar(Grammar):
rules = [ rules = [
RegionRule(name=r'string', start=r'(?P<tag>["\'])', grammar=Grammar(), end=r'%(tag)s'), RegionRule(r'string', r'"', lex2.grammars['null'], r'"'),
PatternRule(name=r'namespace', pattern=r'[a-zA-Z_]+:'), RegionRule(r'string', r"'", lex2.grammars['null'], r"'"),
PatternRule(name=r'attrname', pattern=r'[^ =>\n]+(?==)'), PatternRule(r'namespace', r'[a-zA-Z_]+:'),
PatternRule(name=r'name', pattern=r'[^ =>\n]+'), PatternRule(r'attrname', r'[^ =>\n]+(?==)'),
PatternRule(r'name', r'[^ =>\n]+'),
] ]
lex2.grammars['xml-opentag'] = OpenTagGrammar
class XMLGrammar(Grammar): class XMLGrammar(Grammar):
rules = [ rules = [
RegionRule(name=r'comment', start=r'<!--', grammar=Grammar(), end=r'-->'), RegionRule(r'comment', r'<!--', lex2.grammars['null'], r'-->'),
RegionRule(name=r'opentag', start=r'<', grammar=OpenTagGrammar(), end=r'/?>'), RegionRule(r'opentag', r'<', lex2.grammars['xml-opentag'], r'/?>'),
PatternRule(name=r'closetag', pattern=r'< */ *[ =>\n]+ *>'), PatternRule(r'closetag', r'< */ *[ =>\n]+ *>'),
] ]
lex2.grammars['xml'] = XMLGrammar
class XML(mode2.Fundamental): class XML(mode2.Fundamental):
grammar = XMLGrammar grammar = lex2.grammars['xml']
def __init__(self, w): def __init__(self, w):
mode2.Fundamental.__init__(self, w) mode2.Fundamental.__init__(self, w)
#self.add_bindings('close-paren', (')',))
#self.add_bindings('close-brace', ('}',))
#self.add_bindings('close-bracket', (']',))
self.colors = { self.colors = {
'comment.start': color.build('red', 'default'), 'comment.start': color.build('red', 'default'),
'comment.null': color.build('red', 'default'), 'comment.null': color.build('red', 'default'),

View File

@ -16,8 +16,14 @@ def find_ranges(s, w, start=None, end=None):
(x2, y2) = (len(w.buffer.lines[-1]) - 1, len(w.buffer.lines) - 1) (x2, y2) = (len(w.buffer.lines[-1]) - 1, len(w.buffer.lines) - 1)
else: else:
(x2, y2) = end.xy() (x2, y2) = end.xy()
#if x2 == 0:
# y2 -= 1
# x2 = len(w.buffer.lines[-1]) - 1
#else:
# x2 -= 1
ranges = [] ranges = []
#while y <= y2:
while y <= y2: while y <= y2:
if y == y2: if y == y2:
limit = x2 limit = x2