branch : pmacs2
This commit is contained in:
moculus 2009-02-02 14:44:32 +00:00
parent 87dce5ee27
commit 182fd7880d
4 changed files with 176 additions and 123 deletions

View File

@ -709,7 +709,7 @@ class Application(object):
p = w.logical_cursor()
blen = len(w.buffer.lines)
count = w.mode.header #XYZ
count = w.mode.header
(x, y) = w.first.xy()
(vy, vx) = (None, None)
while count < slot.height:
@ -778,30 +778,30 @@ class Application(object):
w = slot.window
modename = w.mode.name()
# XYZ
# draw the header
rstrs = w.mode.get_header(w)
assert len(rstrs) >= w.mode.header
for j in range(0, w.mode.header):
rstrs[j].draw(self.win, slot.y_offset + j, slot.x_offset)
# XYZ
# draw the actual slot
self._draw_slot(i)
# highlighted regions
for hr in self.highlighted_ranges:
(high_w, p1, p2, fg, bg) = hr
if w is high_w and p2 >= w.first and p1 <= w.last:
#count = 0
count = w.mode.header #XYZ
count = w.mode.header
x, y = w.first.xy()
px = p1.x
while count < slot.heigh:
while count < slot.height:
if p1.y == y and px >= x and px - x < slot.width:
sy, sx = slot.y_offset + count, px - x + w.mode.lmargin
if slot.width > p2.x - x:
self.highlight_chars(slot.y_offset + count, px-x + w.mode.lmargin, p2.x-x, fg, bg)
self.highlight_chars(sy, sx, p2.x-x, fg, bg)
break
else:
self.highlight_chars(slot.y_offset + count, px-x + w.mode.lmargin, slot.width - 1, fg, bg)
self.highlight_chars(sy, sx, slot.width - 1, fg, bg)
px += slot.width - px + x - 1
if x + slot.width > len(w.buffer.lines[y]):
x = 0
@ -827,8 +827,7 @@ class Application(object):
x, y = w.first.xy()
lm, rm = w.mode.lmargin, w.mode.rmargin
lines = w.buffer.lines
#count = 0
count = w.mode.header #XYZ
count = w.mode.header
k = x // (slot.width - lm - rm)
modename = w.mode.name()
lit = w.mode.name() in w.buffer.highlights

21
lex.py
View File

@ -18,6 +18,27 @@ class Token(object):
self.link = link
self._debug = False
assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent
def match(self, name, string):
return self.name == name and self.string == string
def matchs(self, name, strings):
return self.name == name and self.string in strings
def matchp(self, pairs):
for (name, string) in pairs:
if self.match(name, string):
return True
return False
def fqmatch(self, name, string):
return self.fqname() == name and self.string == string
def fqmatchs(self, name, strings):
return self.fqname() == name and self.string in strings
def fqmatchp(self, pairs):
for (name, string) in pairs:
if self.fqmatch(name, string):
return True
return False
def parents(self):
if self.parent is not None:
parents = self.parent.parents()

View File

@ -1,4 +1,5 @@
import color, mode, tab
import context
from lex import Grammar, PatternRule, RegionRule
from mode.python import StringGrammar2
from mode.c import CTabber2
@ -35,136 +36,77 @@ class JavaGrammar(Grammar):
PatternRule(r"eol", r"\n$"),
]
class JavaTabber(CTabber2):
class JavaTabber2(tab.StackTabber2):
open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
control_tokens = {'keyword': {'if': 1, 'else': 1, 'while': 1, 'do': 1, 'for': 1}}
end_at_eof = False
end_at_tokens = {'delimiter': {';': 1}}
nocontinue_tokens = {'delimiter': {';': 1},
'java_comment.start': 1,
'java_comment.data': 1,
'java_comment.end': 1}
start_free_tokens = {'string.start': 'string.end'}
end_free_tokens = {'string.end': 'string.start'}
def is_base(self, y):
if y == 0:
return True
highlighter = self.mode.window.buffer.highlights[self.mode.name()]
if not highlighter.tokens[y]:
else:
return False
def _is_indent(self, t):
return t.name == 'spaces'
def _is_ignored(self, t):
return t.fqname() in ('spaces', 'eol', 'comment', 'comment.start',
'comment.data', 'comment.null', 'comment.end')
for t in highlighter.tokens[y]:
if t.name == 'null':
pass
elif t.name == 'keyword':
if t.string in ('class', 'interface'):
return True
elif t.string in ('public', 'private', 'protected', 'static',
'final', 'native', 'synchronized', 'abstract',
'threadsafe', 'transient'):
pass
else:
return False
else:
return False
return False
class JavaContext(context.Context):
def _regen_stack(self, y):
if y > 0 and self.namelines[y - 1][1]:
return list(self.namelines[y - 1][1])
else:
return []
# detecting function declarations is annoying; this assumes that people
# won't put a variable type and name on different lines, but that they
# might do that for function return type and name.
#
# unfortunately, valid function return types might include any of the
# four types of tokens below
decl = False
for t in highlighter.tokens[y]:
if t.name in ('keyword', 'identifier', 'structname', 'enumname'):
decl = True
continue
if decl and t.name == 'function':
break
else:
decl = False
break
if decl:
return True
def _build_name_map(self, y1, y2, last, curr, stack):
blen = len(self.mode.window.buffer.lines)
highlights = self.mode.window.get_highlighter()
i = y1
return False
def _handle_open_token(self, currlvl, y, i):
self._opt_pop('cont')
token = self.get_token(y, i)
if token.string == '{':
self._opt_pop('cond')
currlvl = tab.StackTabber._handle_open_token(self, currlvl, y, i)
return currlvl
def _handle_close_token(self, currlvl, y, i):
w = self.mode.tabwidth
self._opt_pop('cont')
currlvl = tab.StackTabber._handle_close_token(self, currlvl, y, i)
token = self.get_token(y, i)
if self.is_rightmost_token(y, i):
if token.string == '}':
self._opt_pop('cond')
self._opt_pop('cont')
elif self._peek_name() == 'cond':
pass
else:
self._opt_append('cont', currlvl + w)
return currlvl
def _handle_other_token(self, currlvl, y, i):
w = self.mode.tabwidth
token = self.get_token(y, i)
fqname = token.fqname()
if fqname == 'delimiter' and token.string == ';':
self._opt_pop('cond')
self._opt_pop('cont')
self._opt_pop('cond')
while i < y2:
if not stack:
curr = None
elif fqname == 'keyword':
if token.string in ('do', 'else', 'for', 'if', 'while'):
self._append('cond', currlvl + w)
elif token.string == 'break':
self._opt_pop('case', 'while', 'for')
elif token.string == 'continue':
self._opt_pop('while', 'for')
elif token.string == 'case':
self._opt_pop('case')
currlvl = self.get_curr_level()
self._opt_append('case', currlvl + w)
g = highlights.tokens[i]
gl = len(g)
elif fqname == 'string.start':
self._opt_append('string', None)
elif fqname == 'string.end':
self._opt_pop('string')
if self.is_rightmost_token(y, i):
self._opt_append('cont', currlvl + w)
if gl > 2 and g[0].match('keyword', 'class'):
curr = g[2].string
elif gl > 4 and g[2].match('keyword', 'class'):
curr = g[4].string
# TODO: this could be a lot better
elif fqname == 'macro':
currlvl = 0
elif fqname.startswith('macro.start'):
self._opt_append('macro', None)
currlvl = 0
elif fqname.startswith('macro.end'):
self._opt_pop('macro', None)
if curr is not None and curr not in self.names:
self.names[curr] = i
elif fqname.startswith('macroblock.start'):
self._opt_append('macroblock', None)
currlvl = 0
elif fqname.startswith('macroblock.end'):
self._opt_pop('macroblock', None)
if i == y2 - 1 and curr != self.namelines[i][0] and y2 < blen:
y2 += 1
if self.is_rightmost_token(y, i):
if self._has_markers() and self._peek_name() == 'cond':
pass
elif(not fqname.startswith('string') and
not fqname.startswith('java-comment') and
not fqname.startswith('macro') and
not fqname == 'delimiter' and
not fqname == 'header' and
not fqname == 'null' and
not fqname == 'eol' and
token.string not in ('}', ';', '(', '{', '[', ',')):
self._opt_append('cont', currlvl + w)
return currlvl
for t in g:
if t.match('delimiter', '{'):
stack.append(t.string)
elif t.match('delimiter', '}'):
#assert stack, "uh oh"
if stack:
stack.pop(-1)
if curr:
self.namelines[i] = (curr, tuple(stack))
i += 1
class Java(mode.Fundamental):
modename = 'Java'
extensions = ['.java']
tabbercls = JavaTabber
tabbercls = JavaTabber2
grammar = JavaGrammar
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetokens = ('delimiter',)
@ -182,10 +124,26 @@ class Java(mode.Fundamental):
'java_integer': ('green', 'default', 'bold'),
'java_float': ('green', 'default', 'bold'),
}
format = "%(flag)s %(bname)-18s (%(mname)s) %(indent)s %(cursor)s/%(mark)s %(perc)s [%(func)s]"
def get_status_names(self):
names = mode.Fundamental.get_status_names(self)
c = self.window.logical_cursor()
names['func'] = self.get_line_function(c.y)
return names
def __init__(self, w):
mode.Fundamental.__init__(self, w)
self.add_bindings('close-paren', (')',))
self.add_bindings('close-brace', ('}',))
self.add_bindings('close-bracket', (']',))
self.context = JavaContext(self)
def get_functions(self):
return self.context.get_names()
def get_function_names(self):
return self.context.get_name_list()
def get_line_function(self, y):
return self.context.get_line_name(y)
install = Java.install

75
parse.py Normal file
View File

@ -0,0 +1,75 @@
import lex
class Rule(object):
def __init__(self, *rules):
self.rules = rules
def match(self, tokens):
raise Exception("unimplemented")
class Match(Rule):
method = lex.Token.match
def __init__(self, *args):
self.args = args
def match(self, tokens):
if not tokens:
return False
elif method(tokens[0], *self.args):
tokens.pop(0)
return True
else:
return False
class Matchs(Match):
method = lex.Token.matchs
class Matchp(Match):
method = lex.Token.matchp
class Fqmatch(Match):
method = lex.Token.fqmatch
class Fqmatchs(Match):
method = lex.Token.fqmatchs
class Fqmatchp(Match):
method = lex.Token.fqmatchp
class And(Rule):
def match(self, tokens):
for r in self.rules:
if not r.match(tokens):
return False
return True
class Or(Rule):
def match(self, tokens):
for r in self.rules:
if r.match(tokens):
return True
return False
class Repeat(Rule):
def __init__(self, rule, minimum, maximum):
self.rule = rule
self.minimum = minimum
self.maximum = maximum
def match(self, tokens):
for i in range(0, self.minimum):
if not self.rule.match(tokens):
return False
while self.rules.match(tokens):
pass
return True
self.rule.match(tokens)
return True
class Star(Rule):
def __init__(self, rule):
self.rule = rule
def match(self, tokens):
if not self.rule.match(tokens):
return False
while self.rules.match(tokens):
pass
return True
class End(Rule):
def __init__(self):
pass
def match(self, tokens):
return not tokens