2009-05-10 01:45:57 -04:00
|
|
|
from method import Method
|
|
|
|
import buffer.color
|
|
|
|
import util
|
|
|
|
import lex
|
|
|
|
|
2009-05-10 02:01:09 -04:00
|
|
|
class VcException(Exception): pass
|
|
|
|
|
2009-05-10 01:45:57 -04:00
|
|
|
class VcBlame(Method):
|
2009-05-10 22:45:16 -04:00
|
|
|
_is_method = False
|
|
|
|
line_re = None
|
|
|
|
prefix_fmt = None
|
|
|
|
pretest_err_msg = None
|
2009-05-14 23:59:13 -04:00
|
|
|
num_fields = 1
|
2009-05-10 22:45:16 -04:00
|
|
|
def _pretest(self):
|
|
|
|
return True
|
2009-05-10 01:45:57 -04:00
|
|
|
def _filter(self, line):
|
|
|
|
m = self.line_re.match(line)
|
|
|
|
if not m:
|
2009-05-10 02:01:09 -04:00
|
|
|
raise VcException("couldn't parse %r" % line)
|
2009-05-14 23:59:13 -04:00
|
|
|
groups = m.groups()
|
|
|
|
return {'fields': groups[:-1], 'content': groups[-1], 'tokens': []}
|
2009-05-10 01:45:57 -04:00
|
|
|
def _open_pipe(self, w, **vargs):
|
|
|
|
raise Exception('unimplemented')
|
2009-05-14 23:59:13 -04:00
|
|
|
def _build_groups(self, w, **vargs):
|
2009-05-10 01:45:57 -04:00
|
|
|
pipe = self._open_pipe(w, **vargs)
|
2009-05-14 23:59:13 -04:00
|
|
|
groups = []
|
|
|
|
gsizes = [0] * self.num_fields
|
|
|
|
for line in pipe.stdout:
|
|
|
|
d = self._filter(line)
|
|
|
|
for i in range(0, self.num_fields):
|
|
|
|
gsizes[i] = max(gsizes[i], len(d['fields'][i]))
|
|
|
|
groups.append(d)
|
2009-05-10 01:45:57 -04:00
|
|
|
status = pipe.wait() >> 8
|
2009-05-14 23:59:13 -04:00
|
|
|
if status != 0:
|
|
|
|
raise Exception("There was an error (%d)" % status)
|
|
|
|
return groups, gsizes
|
|
|
|
def _lex_groups(self, groups, w, **vargs):
|
2009-05-10 01:45:57 -04:00
|
|
|
if w.mode.grammar:
|
|
|
|
lexer = lex.Lexer(w.mode, w.mode.grammar)
|
2009-05-14 23:59:13 -04:00
|
|
|
for t in lexer.lex([d['content'] for d in groups]):
|
2009-05-10 01:45:57 -04:00
|
|
|
groups[t.y]['tokens'].append(t)
|
2009-05-14 23:59:13 -04:00
|
|
|
def _build_lines(self, groups, gsizes, w, **vargs):
|
|
|
|
self._lex_groups(groups, w, **vargs)
|
2009-05-10 01:45:57 -04:00
|
|
|
lines = []
|
|
|
|
for d in groups:
|
|
|
|
if d['tokens']:
|
|
|
|
suffix = ''
|
|
|
|
for t in d['tokens']:
|
|
|
|
code = buffer.color.get_cbuf_code(*t.color)
|
|
|
|
suffix += code + util.cbuf_escape(t.string)
|
|
|
|
else:
|
|
|
|
suffix = d['content'] + '\n'
|
2009-05-14 23:59:13 -04:00
|
|
|
tpl = tuple(util.flatzip(gsizes, d['fields']))
|
|
|
|
lines.append(self.prefix_fmt % tpl + ' ' + suffix)
|
|
|
|
return lines
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
if not self._pretest():
|
|
|
|
w.set_error(self.pretest_err_msg)
|
|
|
|
return
|
|
|
|
elif not hasattr(w.buffer, 'path'):
|
|
|
|
w.set_error("Buffer has no corresponding file")
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
groups, gsizes = self._build_groups(w, **vargs)
|
|
|
|
except Exception, e:
|
|
|
|
w.set_error(str(e))
|
2009-05-10 01:45:57 -04:00
|
|
|
|
2009-05-14 23:59:13 -04:00
|
|
|
data = ''.join(self._build_lines(groups, gsizes, w, **vargs))
|
|
|
|
w.application.color_data_buffer("*Blame*", data, switch_to=True)
|