pychecker-inspired cleanup

--HG--
branch : pmacs2
This commit is contained in:
Erik Osheim 2009-11-10 00:30:55 -05:00
parent 67d13d0559
commit 6d3eab1a47
7 changed files with 68 additions and 85 deletions

View File

@ -170,7 +170,6 @@ class Buffer(object):
def remove_window(self, w):
if w in self.windows:
self.windows.remove(w)
modename = w.mode.name
if w.mode.name in self.highlights:
for w2 in self.windows:
if w2.mode.name == w.mode.name:
@ -244,7 +243,7 @@ class Buffer(object):
mode = None
if os.path.exists(path):
if force:
mode = os.stat(self.path)[0]
mode = os.stat(path)[0]
else:
# XYZ
raise Exception("oh no! %r already exists" % path)
@ -253,7 +252,7 @@ class Buffer(object):
data = self.write_filter(self.make_string())
# create a safe temporary path to write to, and write out data to it
temp_path = self._temp_path()
temp_path = self._temp_path(path)
f2 = self._open_file_w(temp_path)
f2.write(data)
f2.close()
@ -362,7 +361,6 @@ class Buffer(object):
self.delete_char(p, act=act, force=force)
self.insert_string(p, c, act=act, force=force)
def delete_line(self, y, act=ACT_NORM, force=False):
line = self.lines[y]
p1 = Point(0, y)
if y < len(self.lines) - 1:
p2 = Point(0, y + 1)
@ -526,10 +524,10 @@ class IperlBuffer(InterpreterBuffer):
return '*%s*' % cls._basename
create_name = classmethod(create_name)
def get_cmd(self):
cmd = ['iperl', '-p']
if self.parent:
return ('iperl', '-p', '-r', self.parent.path)
else:
return ('iperl', '-p')
cmd.extend(['-r', self.parent.path])
return cmd
def get_env(self):
lib = ':'.join(self.application.config.get('perl.libs', []))
return {'PERL5LIB': lib}

View File

@ -29,7 +29,7 @@ def find_common_string(candidates):
class Completer(object):
def __init__(self, application):
self.application = application
def get_candidates(self, s):
def get_candidates(self, s, w=None):
assert "Not implemented"
def tab_string(self, s, w=None):
'''returns a tuple of three things:

19
lex.py
View File

@ -6,7 +6,8 @@ def escape(s):
return re.escape(s)
class Token(object):
def __init__(self, name, rule, y, x, s, color=None, parent=None, matchd={}, link=None):
def __init__(self, name, rule, y, x, s, color=None, parent=None,
matchd={}, link=None):
self.name = name
self.rule = rule
self.y = y
@ -17,11 +18,7 @@ class Token(object):
self.matchd = matchd
self.link = link
self._debug = False
#self._fqlist = None
#self._fqname = None
#self._fqlist = self.mkfqlist()
#self._fqname = self.mkfqname()
assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent
assert not parent or parent.name
def isa(self, *names):
return self.name in names
@ -62,11 +59,6 @@ class Token(object):
if self.link and not self.link.startswith('middle'):
names.append(self.rule.name)
return names
#def fqlist(self):
# if self._fqlist is None:
# self._fqlist = self.mkfqlist()
# return self._fqlist
#def mkfqlist(self):
def fqlist(self):
if self.parent is not None:
names = self.parent.domain()
@ -76,11 +68,6 @@ class Token(object):
names.append(self.rule.name)
names.append(self.name)
return names
#def fqname(self):
# if self._fqname is None:
# self._fqname = self.mkfqname()
# return self._fqname
#def mkfqname(self):
def fqname(self):
names = self.fqlist()
return '.'.join(names)

View File

@ -13,15 +13,13 @@ def arg(n, t=type(''), dt=None, p=None, h='', dv=default.none, ld=False, q='defa
return Argument(n, type=t, datatype=dt, prompt=p, help=h, default=dv,
load_default=ld, queue=q)
class Argument(object):
def __init__(self, name, type=type(""), datatype=None, prompt=None, help='',
default=default.none, load_default=False, queue='default'):
def __init__(self, name, type=type(""), datatype=None, prompt=None,
help='', default=default.none, load_default=False,
queue='default'):
self.name = name
self.type = type
self.datatype = datatype
if prompt is None:
self.prompt = "%s: " % (name)
else:
self.prompt = prompt
self.prompt = prompt or name + ': '
self.help = help
self.load_default = load_default
self.default = default

View File

@ -261,8 +261,6 @@ class Fundamental(Handler):
self.add_bindings('uppercase-word', ('M-u',))
self.add_bindings('lowercase-word', ('M-l',))
i = 31
# used for all word operations
if not self.word_letters:
self.word_letters = w.application.config['word_letters']
@ -342,7 +340,7 @@ class Fundamental(Handler):
y = self.window.first.y
if self.window.first.x > 0:
y += 1
lvl = self.tabber.get_level(y)
#lvl = self.tabber.get_level(y)
markers = self.tabber.record[y]
if w.buffer.is_whitespace(y):
ws = None
@ -464,8 +462,6 @@ class Fundamental(Handler):
self.window.set_error(str(e))
def region_added(self, p, newlines):
mname = self.name
if self.lexer is not None:
ydelta = len(newlines) - 1
xdelta = len(newlines[-1])

View File

@ -425,25 +425,27 @@ class PerlDequoteWord(Method):
word_re = re.compile('^[a-zA-Z0-9_]+$')
def _execute(self, w, **vargs):
p = w.logical_cursor()
x1, x2 = p.x, p.end_x()
tokens = w.get_line_token_list_at_point(p)
data_token = None
saw_start = False
for token in tokens:
if token.end_x() < x1:
continue
elif token.fqname().endswith('string.start'):
saw_start = True
elif token.fqname().endswith('string.data'):
data_token = token
elif saw_start:
tokens = w.get_token_list_at_point(p)
token = None
seen = False
for t in tokens:
if t.end_x() < p.x:
pass
elif t.fqname().endswith('string.start'):
seen = True
elif t.fqname().endswith('string.data'):
token = t
elif token and t.fqname().endswith('string.end'):
break
elif seen:
token = None
break
if not data_token:
if not token:
w.set_error('no suitable quoted word found!')
return
w.set_error('going to dequote %r' % token.data)
w.delete_char(Point(token.end_x(), token.y))
w.delete_char(Point(token.x - 1, token.y))
class PerlInitFunctions(Method):
'''Jump to a function defined in this module'''
@ -502,7 +504,7 @@ class PerlHashCleanup(Method):
myregex = r
if myregex is None:
raise Exception, "Not a perl hash line"
raise Exception("Not a perl hash line")
groups_by_line[cursor.y] = myregex.match(line).groups()
@ -596,7 +598,6 @@ class PerlWrapParagraph(WrapParagraph):
return ltype
def _fix_comments(self, c, w):
h = w.buffer.highlights[w.mode.name]
y1 = c.y
y2 = c.y
while y2 < len(w.buffer.lines) - 1:
@ -776,7 +777,7 @@ class PerlContext(context.Context):
i += 1
# white is for delimiters, operators, numbers
default = ('default', 'default')
c_default = ('default', 'default')
# magenta is for keywords/builtins, translation, globs
lo_magenta = ('magenta202', 'default')
@ -872,7 +873,7 @@ class Perl(Fundamental):
'evaldoc.null': hi_cyan,
# numbers
'perl.number': default,
'perl.number': c_default,
# strings
'perl.string.start': lo_green,
@ -933,7 +934,6 @@ class Perl(Fundamental):
'perl-hash-cleanup': ('C-c h',),
'perl-open-module': ('C-c C-f',),
'perl-open-module-word': ('C-c M-f',),
#'perl-run': ('C-c r',),
'perl-semantic-complete': ('C-c TAB',),
'perl-set-lib': ('C-c l',),
'perl-wrap-paragraph': ('M-q',),
@ -942,6 +942,8 @@ class Perl(Fundamental):
'close-paren': (')'),
'close-bracket': (']'),
'close-brace': ('}'),
'perl-quote-word': ("C-c '",),
'perl-dequote-word': ("C-u '",),
}
def __init__(self, w):
Fundamental.__init__(self, w)
@ -974,7 +976,7 @@ class Perl(Fundamental):
data = p.stdout.read()
status = p.wait()
if status != 0: raise Exception, "%r failed" % cmd
if status != 0: raise Exception("%r failed" % cmd)
self.perlinc = data.split('\n')
return self.perlinc

View File

@ -34,9 +34,9 @@ class Names(Rule):
def __init__(self, names):
self.names = names
def _match(self, tokens):
for token in tokens:
if token.name in self.names:
return [1]
else:
return []
class String(Rule):
@ -51,17 +51,17 @@ class Strings(Rule):
def __init__(self, ss):
self.strings = ss
def _match(self, tokens):
for token in tokens:
if token.string in self.strings:
return [1]
else:
return []
class Stringre(Rule):
def __init__(self, r):
self.regex = re.compile(r)
def _match(self, tokens):
for token in tokens:
if self.regex.match(token.string):
return [1]
else:
return []
class Match(Rule):
@ -98,10 +98,12 @@ class And(Rule):
return [n]
class Or(Rule):
def match(self, tokens):
n = 0
for r in self.rules:
result = r.match(tokens[n:])
if result:
return result
n += result[0]
return []
class Repeat(Rule):