branch : pmacs2
This commit is contained in:
moculus 2008-10-02 16:49:22 +00:00
parent 72e7eed05a
commit e47d74aa40
4 changed files with 249 additions and 145 deletions

View File

@ -2,7 +2,7 @@ import commands
import color, mode, tab import color, mode, tab
from lex import Grammar, PatternRule, RegionRule from lex import Grammar, PatternRule, RegionRule
from mode.python import StringGrammar2 from mode.python import StringGrammar2
from tab import Marker from tab import StackTabber2
class RegexGrammar(Grammar): class RegexGrammar(Grammar):
rules = [ rules = [
@ -37,37 +37,17 @@ class AwkGrammar(Grammar):
PatternRule(r'eol', r'\n'), PatternRule(r'eol', r'\n'),
] ]
class AwkTabber(tab.StackTabber): class AwkTabber(StackTabber2):
open_tokens = {'{': '}', '(': ')', '[': ']'} open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
close_tokens = {'}': '{', ')': '(', ']': '['} close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
def __init__(self, m): control_tokens = {
self.mode = m 'keyword': {'if': 1, 'else': 1, 'while': 1, 'do': 1, 'for': 1},
self.name = m.name() }
self.lines = {} end_at_eof = True
self._reset() end_at_tokens = {}
def region_added(self, p, newlines):
self.lines = {}
def region_removed(self, p1, p2):
self.lines = {}
def get_level(self, y):
if y not in self.lines:
self._calc_level(y)
return self.lines.get(y)
def _calc_level(self, y):
target = y
while not self._is_base(y) and y > 0:
y -= 1
self._reset()
while y <= target:
self._save_curr_level()
self._handle_tokens(y)
y += 1
def _is_base(self, y): def _is_base(self, y):
if y == 0: if y == 0:
return True return True
t = self._get_tokens(y)[0] t = self._get_tokens(y)[0]
if t.fqname() == 'awk_regex.start': if t.fqname() == 'awk_regex.start':
return True return True
@ -77,116 +57,11 @@ class AwkTabber(tab.StackTabber):
return True return True
else: else:
return False return False
def _is_indent(self, t):
return t.name == 'spaces'
def _is_ignored(self, t):
return t.name in ('spaces', 'eol', 'comment')
def _reset(self):
self.record = {}
self.stack = []
self.markers = self.stack
self.curr_level = 0
def _get_curr_level(self):
if self.stack:
return self.stack[-1].level
else:
return 0
def _get_next_level(self):
return self._get_curr_level() + self.mode.tabwidth
def _save_curr_level(self):
self.curr_level = self._get_curr_level()
def _match(self, *names):
return self.stack and self.stack[-1].name in names
def _nomatch(self, *names):
return self.stack and self.stack[-1].name not in names
def _pop(self, *names):
if self._match(*names):
self.stack.pop()
def _pop_while(self, *names):
while self._match(*names):
self.stack.pop()
def _pop_until(self, *names):
while self._nomatch(*names):
self.stack.pop()
def _append(self, name, level):
self.stack.append(Marker(name, level))
def _append_unless(self, name, level):
if self._nomatch(name):
self.stack.append(Marker(name, level))
def _get_tokens(self, y):
return self.mode.window.buffer.highlights[self.name].tokens[y]
def _handle_tokens(self, y):
tokens = self._get_tokens(y)
assert tokens
start = int(self._is_indent(tokens[0]))
end = len(tokens) - 1
while end > 0 and self._is_ignored(tokens[end]):
end -= 1
for i in range(0, end + 1 - start):
token = tokens[start + i]
if self._is_ignored(token):
pass
elif self._is_close_token(token):
self._handle_close_token(y, tokens, start, end, i, token)
elif self._is_open_token(token):
self._handle_open_token(y, tokens, start, end, i, token)
else:
self._handle_other_token(y, tokens, start, end, i, token)
self.lines[y] = self.curr_level
self.record[y] = tuple(self.stack)
def _is_indent(self, token):
return token.name == 'spaces'
def _is_ignored(self, token):
return token.name in ('spaces', 'eol', 'comment')
def _is_close_token(self, token):
return token.name == 'delimiter' and token.string in self.close_tokens
def _handle_close_token(self, y, tokens, start, end, i, token):
if not self.stack:
raise Exception, "unmatched %r, line %d" % (token.string, y)
while True:
marker = self.stack[-1]
if marker.name in ('control', 'continue'):
self.stack.pop()
elif marker.name in self.open_tokens:
if self.open_tokens[marker.name] == token.string:
self.stack.pop()
break
else:
raise Exception, "mismatched %r, line %d (expected %r)" % \
(token.string, y, d[marker.name])
else:
raise Exception, "what? %r" % marker.name
if i == 0:
self._save_curr_level()
def _is_open_token(self, token):
return token.name == 'delimiter' and token.string in self.open_tokens
def _handle_open_token(self, y, tokens, start, end, i, token):
if i == 0 and self.stack and self.stack[-1].name == 'continue':
self.stack.pop()
if token.string == '{':
self._pop_while('continue', 'control')
if i == end - start:
level = self._get_next_level()
else:
level = tokens[i + 1].x
self._append(token.string, level)
def _handle_other_token(self, y, tokens, start, end, i, token):
name, s = token.name, token.string
if i + start == end:
self._pop_while('continue', 'control')
if name == 'continuation':
self._append_unless('continue', self._get_next_level())
elif name == 'keyword' and s in ('if', 'else', 'while', 'do', 'for'):
if i == start:
self._save_curr_level()
self._append_unless('control', self._get_next_level())
class Awk(mode.Fundamental): class Awk(mode.Fundamental):
tabbercls = AwkTabber tabbercls = AwkTabber

View File

@ -36,7 +36,6 @@ class CGrammar(Grammar):
PatternRule(r"delimiter", r"\.|\(|\)|\[|\]|{|}|@|,|:|`|;|=(?!=)|\?|->"), PatternRule(r"delimiter", r"\.|\(|\)|\[|\]|{|}|@|,|:|`|;|=(?!=)|\?|->"),
PatternRule(r'eol', r"\n$"), PatternRule(r'eol', r"\n$"),
PatternRule(r'function', r'[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'),
PatternGroupRule(r'structgroup', r'keyword', r'struct', r'spaces', PatternGroupRule(r'structgroup', r'keyword', r'struct', r'spaces',
r' +', r'structname', r'[a-zA-Z_][a-zA-Z0-9_]*'), r' +', r'structname', r'[a-zA-Z_][a-zA-Z0-9_]*'),
@ -44,6 +43,8 @@ class CGrammar(Grammar):
r' +', r'enumname', r'[a-zA-Z_][a-zA-Z0-9_]*'), r' +', r'enumname', r'[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(r'keyword', r"(?:auto|break|case|char|const|continue|default|double|do|else|enum|extern|float|for|goto|if|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|union|unsigned|void|volatile|while)(?![a-zA-Z_])"), PatternRule(r'keyword', r"(?:auto|break|case|char|const|continue|default|double|do|else|enum|extern|float|for|goto|if|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|union|unsigned|void|volatile|while)(?![a-zA-Z_])"),
PatternRule(r'function', r'[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'),
PatternRule(r'builtin', r"(?:NULL|TRUE|FALSE)"), PatternRule(r'builtin', r"(?:NULL|TRUE|FALSE)"),
PatternRule(r'label', r'[a-zA-Z_][a-zA-Z0-9_]*(?=:)'), PatternRule(r'label', r'[a-zA-Z_][a-zA-Z0-9_]*(?=:)'),
@ -72,6 +73,51 @@ class CGrammar(Grammar):
OverridePatternRule(r'comment', r'// *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'), OverridePatternRule(r'comment', r'// *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
] ]
class CTabber2(tab.StackTabber2):
open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
control_tokens = {'keyword': {'if': 1, 'else': 1, 'while': 1, 'do': 1, 'for': 1}}
end_at_eof = False
end_at_tokens = {'delimiter': {';': 1}}
nocontinue_tokens = {'delimiter': {';': 1}}
start_free_tokens = {'string.start': 'string.end'}
end_free_tokens = {'string.end': 'string.start'}
def is_base(self, y):
if y == 0:
return True
tokens = self._get_tokens(y)
# this assumes that people aren't gonna use these macros inside of
# blocks, which is probably ok.
t0 = tokens[0]
if t0.fqname() == 'macro.start' and t0.string in ('#define', '#include'):
return True
# detecting function declarations is annoying; this assumes that people
# won't put a variable type and name on different lines, but that they
# might do that for function return type and name.
#
# unfortunately, valid function return types might include any of the
# four types of tokens below
decl = False
for t in tokens:
if t.name in ('keyword', 'identifier', 'structname', 'enumname'):
decl = True
continue
if decl and t.name == 'function':
break
else:
decl = False
break
return decl
def _is_indent(self, t):
return t.name == 'spaces'
def _is_ignored(self, t):
if t.name in ('spaces', 'eol', 'comment'): return True
elif t.fqname() in ('comment.start', 'comment.null', 'comment.end'): return True
else: return False
class CTabber(tab.StackTabber): class CTabber(tab.StackTabber):
wst = ('spaces', 'eol', 'comment', 'comment.start', 'comment.null', 'comment.end') wst = ('spaces', 'eol', 'comment', 'comment.start', 'comment.null', 'comment.end')
def token_is_whitespace(self, y, i): def token_is_whitespace(self, y, i):
@ -222,7 +268,8 @@ class CMake(method.shell.Exec):
class C(mode.Fundamental): class C(mode.Fundamental):
modename = 'C' modename = 'C'
extensions = ['.c', '.h', '.cpp'] extensions = ['.c', '.h', '.cpp']
tabbercls = CTabber #tabbercls = CTabber
tabbercls = CTabber2
grammar = CGrammar grammar = CGrammar
opentokens = ('delimiter',) opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'} opentags = {'(': ')', '[': ']', '{': '}'}

182
tab.py
View File

@ -1,5 +1,6 @@
import regex, util import regex, util
from point import Point from point import Point
from sets import Set
class Marker(object): class Marker(object):
def __init__(self, name, level): def __init__(self, name, level):
@ -221,3 +222,184 @@ class StackTabber(Tabber):
def _opt_pop(self, *names): def _opt_pop(self, *names):
if self.markers and self.markers[-1].name in names: if self.markers and self.markers[-1].name in names:
self.markers.pop(-1) self.markers.pop(-1)
class Marker2(object):
def __init__(self, name, type_, level):
self.name = name
self.type_ = type_
self.level = level
def __repr__(self):
return '<Marker2(%r, %r, %r)>' % (self.name, self.type_, self.level)
#class StackTabber2(tab.StackTabber):
class StackTabber2(Tabber):
open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
scope_tokens = {'delimiter': Set(['{'])}
control_tokens = {'keyword': Set(['if', 'else', 'while', 'do', 'for'])}
end_at_eof = True
end_at_tokens = {}
continue_tokens = {}
nocontinue_tokens = {}
start_free_tokens = {'string.start': 'string.end'}
end_free_tokens = {'string.end': 'string.start'}
def __init__(self, m):
self.mode = m
self.name = m.name()
self.lines = {}
self._reset()
def region_added(self, p, newlines):
self.lines = {}
def region_removed(self, p1, p2):
self.lines = {}
def get_level(self, y):
if y not in self.lines:
self._calc_level(y)
return self.lines.get(y)
def _calc_level(self, y):
target = y
while not self._is_base(y) and y > 0:
y -= 1
self._reset()
while y <= target:
self._save_curr_level()
self._handle_tokens(y)
y += 1
def _is_base(self, y):
return y == 0
def _reset(self):
self.record = {}
self.stack = []
self.markers = self.stack
self.curr_level = 0
def _get_curr_level(self):
if self.stack:
return self.stack[-1].level
else:
return 0
def _get_next_level(self):
return self._get_curr_level() + self.mode.tabwidth
def _save_curr_level(self):
self.curr_level = self._get_curr_level()
def _match(self, *names):
return self.stack and self.stack[-1].name in names
def _nomatch(self, *names):
return self.stack and self.stack[-1].name not in names
def _pop(self, *names):
if self._match(*names):
self.stack.pop()
def _pop_while(self, *names):
while self._match(*names):
self.stack.pop()
def _pop_until(self, *names):
while self._nomatch(*names):
self.stack.pop()
def _append(self, name, type_, level):
self.stack.append(Marker2(name, type_, level))
def _append_unless(self, name, type_, level):
if self._nomatch(name):
self.stack.append(Marker2(name, type_, level))
def _peek(self):
if self.stack:
return self.stack[-1]
else:
return None
def _get_tokens(self, y):
return self.mode.window.buffer.highlights[self.name].tokens[y]
def _handle_tokens(self, y):
tokens = self._get_tokens(y)
assert tokens
start = int(self._is_indent(tokens[0]))
end = len(tokens) - 1
while end > 0 and self._is_ignored(tokens[end]):
end -= 1
for i in range(0, end + 1 - start):
t = tokens[start + i]
if self._is_ignored(t):
pass
elif self._is_close_token(t):
self._handle_close_token(y, tokens, start, end, i, t)
elif self._is_open_token(t):
self._handle_open_token(y, tokens, start, end, i, t)
else:
self._handle_other_token(y, tokens, start, end, i, t)
self.lines[y] = self.curr_level
self.record[y] = tuple(self.stack)
def _is_indent(self, t):
return t.name == 'spaces'
def _is_ignored(self, t):
return t.name in ('spaces', 'eol', 'comment')
def _is_close_token(self, t):
return (t.name == 'delimiter' and
t.string in self.close_tokens['delimiter'])
def _handle_close_token(self, y, tokens, start, end, i, t):
if not self.stack:
raise Exception, "unmatched %r, line %d" % (t.string, y)
while True:
marker = self.stack[-1]
if marker.name in ('control', 'continue'):
self.stack.pop()
elif marker.name in self.open_tokens[marker.type_]:
if self.open_tokens[marker.type_][marker.name] == t.string:
self.stack.pop()
break
else:
raise Exception, "mismatched %r, line %d (expected %r)" % \
(t.string, y, d[marker.name])
else:
raise Exception, "what? %r" % marker.name
if i == 0:
self._save_curr_level()
def _is_open_token(self, t):
return (t.name == 'delimiter' and
t.string in self.open_tokens['delimiter'])
def _handle_open_token(self, y, tokens, start, end, i, t):
if i == 0 and self.stack and self.stack[-1].name == 'continue':
self.stack.pop()
if t.string in self.scope_tokens.get(t.name, {}):
self._pop_while('continue', 'control')
if i == end - start:
level = self._get_next_level()
else:
level = tokens[i + 1].x + 1
self._append(t.string, t.name, level)
def _handle_other_token(self, y, tokens, start, end, i, t):
name, s = t.fqname(), t.string
if name in self.start_free_tokens:
self._append('free', name, None)
return
elif name in self.end_free_tokens:
self._pop('free')
if self.end_at_eof and i + start == end:
self._pop_while('continue', 'control')
elif self.end_at_tokens.get(name, {}).get(s):
self._pop_while('continue', 'control')
top = self._peek()
if top and top.name in self.scope_tokens.get(top.type_, {}):
if self.continue_tokens:
if s in self.continue_tokens.get(name, {}):
self._append_unless('continue', name, self._get_next_level())
elif self.nocontinue_tokens:
if s not in self.nocontinue_tokens.get(name, {}):
self._append_unless('continue', name, self._get_next_level())
if name == 'continuation':
self._append_unless('continue', name, self._get_next_level())
elif s in self.control_tokens.get(name, {}):
if i == start:
self._save_curr_level()
self._append_unless('control', name, self._get_next_level())