dynamically parsed nested regions, ahoy
--HG-- branch : pmacs2
This commit is contained in:
parent
ecbabf1076
commit
a8ecc35701
|
@ -7,8 +7,9 @@ sub foo {
|
|||
unless 9 && 3;
|
||||
}
|
||||
|
||||
#@@:string:mode_sql.SqlGrammar
|
||||
my $foo = {
|
||||
'foo',
|
||||
'drop table ',
|
||||
'bar',
|
||||
};
|
||||
|
||||
|
|
73
lex3.py
73
lex3.py
|
@ -3,14 +3,16 @@ import regex, util
|
|||
|
||||
class Token(object):
|
||||
def __init__(self, name, rule, y, x, s, parent=None, matchd={}, link=None):
|
||||
self.name = name
|
||||
self.rule = rule
|
||||
self.y = y
|
||||
self.x = x
|
||||
self.string = s
|
||||
self.parent = parent
|
||||
self.matchd = matchd
|
||||
self.link = link
|
||||
self.name = name
|
||||
self.rule = rule
|
||||
self.y = y
|
||||
self.x = x
|
||||
self.string = s
|
||||
self.parent = parent
|
||||
self.matchd = matchd
|
||||
self.link = link
|
||||
self.loverride = None
|
||||
self.goverride = None
|
||||
assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent
|
||||
def parents(self):
|
||||
if self.parent is not None:
|
||||
|
@ -87,10 +89,26 @@ class PatternRule(Rule):
|
|||
if m:
|
||||
yield self.make_token(lexer, m.group(0), self.name, parent, m.groupdict())
|
||||
raise StopIteration
|
||||
|
||||
class NocasePatternRule(PatternRule):
|
||||
reflags = re.IGNORECASE
|
||||
|
||||
class OverridePatternRule(PatternRule):
|
||||
def lex(self, lexer, parent, m):
|
||||
if m:
|
||||
a = lexer.mode.window.application
|
||||
d = m.groupdict()
|
||||
try:
|
||||
names = d['grammar'].split('.')
|
||||
grammar = a.globals()[names.pop(0)]
|
||||
for name in names:
|
||||
grammar = getattr(grammar, name)
|
||||
lexer.loverride['%s.start' % d['token']] = grammar
|
||||
except Exception:
|
||||
#raise
|
||||
pass
|
||||
yield self.make_token(lexer, m.group(0), self.name, parent, d)
|
||||
raise StopIteration
|
||||
|
||||
class ContextPatternRule(PatternRule):
|
||||
def __init__(self, name, pattern, fallback):
|
||||
Rule.__init__(self, name)
|
||||
|
@ -150,6 +168,7 @@ class RegionRule(Rule):
|
|||
self.pairs.append((grammar, pattern))
|
||||
if len(args) == 1:
|
||||
self.pairs.append((grammar, None))
|
||||
|
||||
def match(self, lexer, parent):
|
||||
return self.start_re.match(self.get_line(lexer), lexer.x)
|
||||
def lex(self, lexer, parent, m):
|
||||
|
@ -188,7 +207,21 @@ class RegionRule(Rule):
|
|||
while i < len(self.pairs):
|
||||
# for each one, we will compile our stop-regex, and figure out the
|
||||
# name of the stop token to be created if this stop-regex matches.
|
||||
grammar = self.pairs[i][0]
|
||||
fqname = toresume[0].fqname()
|
||||
if fqname in lexer.loverride:
|
||||
grammar = lexer.loverride[fqname]
|
||||
del lexer.loverride[fqname]
|
||||
#toresume[0].loverride = grammar
|
||||
elif fqname in lexer.goverride:
|
||||
grammar = lexer.goverride[fqname]
|
||||
#toresume[0].goverride = grammar
|
||||
#elif toresume[0].loverride:
|
||||
# grammar = toresume[0].loverride
|
||||
#elif toresume[0].goverride:
|
||||
# grammar = toresume[0].goverride
|
||||
else:
|
||||
grammar = self.pairs[i][0]
|
||||
|
||||
if self.pairs[i][1]:
|
||||
stopre = re.compile(self.pairs[i][1] % matchd, self.reflags)
|
||||
else:
|
||||
|
@ -301,28 +334,33 @@ class Grammar:
|
|||
grammar = Grammar()
|
||||
|
||||
class Lexer:
|
||||
def __init__(self, name, grammar):
|
||||
self.name = name
|
||||
self.grammar = grammar
|
||||
self.y = 0
|
||||
self.x = 0
|
||||
self.lines = None
|
||||
def __init__(self, mode, grammar):
|
||||
self.mode = mode
|
||||
self.grammar = grammar
|
||||
self.y = 0
|
||||
self.x = 0
|
||||
self.lines = None
|
||||
self.loverride = {}
|
||||
self.goverride = {}
|
||||
assert self.grammar.rules
|
||||
def get_line(self):
|
||||
return self.lines[self.y] + '\n'
|
||||
def lex(self, lines, y=0, x=0):
|
||||
self.action = 'lex'
|
||||
self.y = y
|
||||
self.x = x
|
||||
self.lines = lines
|
||||
self.tokens = []
|
||||
self.loverride = {}
|
||||
for t in self._lex():
|
||||
yield t
|
||||
raise StopIteration
|
||||
def resume(self, lines, y, x, token):
|
||||
self.action = 'resume'
|
||||
self.y = y
|
||||
self.x = x
|
||||
self.lines = lines
|
||||
self.tokens = []
|
||||
self.loverride = {}
|
||||
toresume = token.parents()
|
||||
|
||||
i = 1
|
||||
|
@ -370,4 +408,3 @@ class Lexer:
|
|||
self.y += 1
|
||||
self.x = 0
|
||||
raise StopIteration
|
||||
|
||||
|
|
4
mode2.py
4
mode2.py
|
@ -7,7 +7,7 @@ DEBUG = False
|
|||
class ActionError(Exception):
|
||||
pass
|
||||
|
||||
class Handler:
|
||||
class Handler(object):
|
||||
def __init__(self):
|
||||
self.prefixes = sets.Set(["C-x", "C-c", "C-u"])
|
||||
self.last_sequence = ''
|
||||
|
@ -157,7 +157,7 @@ class Fundamental(Handler):
|
|||
|
||||
# lexing for highlighting, etc.
|
||||
if self.grammar:
|
||||
self.lexer = Lexer(self.name(), self.grammar)
|
||||
self.lexer = Lexer(self, self.grammar)
|
||||
|
||||
# tab handling
|
||||
if self.tabbercls:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import re, sets, string, sys
|
||||
import color, commands, default, method, mode2, regex, tab2
|
||||
from point2 import Point
|
||||
from lex3 import Grammar, PatternRule, ContextPatternRule, RegionRule
|
||||
from lex3 import Grammar, PatternRule, ContextPatternRule, RegionRule, OverridePatternRule
|
||||
from method import Argument, Method
|
||||
|
||||
class PodGrammar(Grammar):
|
||||
|
@ -35,6 +35,7 @@ class PerlGrammar(Grammar):
|
|||
RegionRule(r'endblock', r"^__END__|__DATA__ *$", Grammar, r''),
|
||||
RegionRule(r'pod', r'^=[a-zA-Z0-9_]+', PodGrammar, r'^=cut'),
|
||||
|
||||
OverridePatternRule(r'comment', r'#@@:(?P<token>[.a-zA-Z0-9_]+):(?P<grammar>[.a-zA-Z0-9_]+) *$'),
|
||||
PatternRule(r'comment', r'#.*$'),
|
||||
RegionRule(r'string', r'"', StringGrammar, r'"'),
|
||||
RegionRule(r'string', r"'", Grammar, r"'"),
|
||||
|
|
Loading…
Reference in New Issue