49 lines
1.6 KiB
Python
49 lines
1.6 KiB
Python
|
from method import Method
|
||
|
import buffer.color
|
||
|
import util
|
||
|
import lex
|
||
|
|
||
|
class VcBlame(Method):
|
||
|
_is_method = False
|
||
|
line_re = None
|
||
|
prefix_fmt = None
|
||
|
def _filter(self, line):
|
||
|
m = self.line_re.match(line)
|
||
|
if not m:
|
||
|
raise SvnException("couldn't parse %r" % line)
|
||
|
return m.groupdict()
|
||
|
def _open_pipe(self, w, **vargs):
|
||
|
raise Exception('unimplemented')
|
||
|
def _execute(self, w, **vargs):
|
||
|
if not hasattr(w.buffer, 'path'):
|
||
|
w.set_error("Buffer has no corresponding file")
|
||
|
return
|
||
|
|
||
|
pipe = self._open_pipe(w, **vargs)
|
||
|
groups = [self._filter(line) for line in pipe.stdout]
|
||
|
status = pipe.wait() >> 8
|
||
|
|
||
|
if w.mode.grammar:
|
||
|
lexer = lex.Lexer(w.mode, w.mode.grammar)
|
||
|
tokens = list(lexer.lex([d['content'] for d in groups]))
|
||
|
for t in tokens:
|
||
|
groups[t.y].setdefault('tokens', [])
|
||
|
groups[t.y]['tokens'].append(t)
|
||
|
|
||
|
lines = []
|
||
|
for d in groups:
|
||
|
if d['tokens']:
|
||
|
suffix = ''
|
||
|
for t in d['tokens']:
|
||
|
code = buffer.color.get_cbuf_code(*t.color)
|
||
|
suffix += code + util.cbuf_escape(t.string)
|
||
|
else:
|
||
|
suffix = d['content'] + '\n'
|
||
|
lines.append(self.prefix_fmt % d + ' ' + suffix)
|
||
|
data = ''.join(lines)
|
||
|
|
||
|
if status == 0:
|
||
|
w.application.color_data_buffer("*Blame*", data, switch_to=True)
|
||
|
else:
|
||
|
w.set_error("There was an error (%s)" % (status))
|