created abstract base class vc-diff; refactored svn-diff
--HG-- branch : pmacs2
This commit is contained in:
parent
c05127348e
commit
637aa1f610
|
@ -4,6 +4,7 @@ from subprocess import Popen, PIPE, STDOUT
|
||||||
import buffer, default, dirutil, lex, regex, util, window
|
import buffer, default, dirutil, lex, regex, util, window
|
||||||
from point import Point
|
from point import Point
|
||||||
import buffer.color
|
import buffer.color
|
||||||
|
from method.vc import VcBlame
|
||||||
|
|
||||||
from method import Method, Argument
|
from method import Method, Argument
|
||||||
|
|
||||||
|
@ -256,91 +257,11 @@ class SvnDiff3(Method):
|
||||||
else:
|
else:
|
||||||
w.set_error("No difference found")
|
w.set_error("No difference found")
|
||||||
|
|
||||||
class SvnBlame(Method):
|
class SvnBlame(VcBlame):
|
||||||
'''show blame output for the current version in SVN'''
|
'''show blame output for the current version in SVN'''
|
||||||
line_re = re.compile('^ *(?P<rev>\d+) *(?P<user>[a-zA-Z0-9_]+) *(?P<date>[-0-9]+) *(?P<time>[:0-9]+) *(?P<tz>-\d{4}) *\((?P<vdate>[^\)]+)\) (?P<content>.*)\n$')
|
line_re = re.compile('^ *(?P<rev>\d+) *(?P<user>[a-zA-Z0-9_]+) *(?P<date>[-0-9]+) *(?P<time>[:0-9]+) *(?P<tz>-\d{4}) *\((?P<vdate>[^\)]+)\) (?P<content>.*)\n$')
|
||||||
prefix_fmt = '[b:d:*]%-4(rev)s [c:d:*]%-10(user)s [b:d:*]%10(date)s[d:d:*]'
|
prefix_fmt = '[b:d:*]%(rev)-4s [c:d:*]%(user)-10s [b:d:*]%(date)10s[d:d:*]'
|
||||||
def _filter(self, line):
|
_is_method = True
|
||||||
m = self.line_re.match(line)
|
|
||||||
if not m:
|
|
||||||
raise SvnException("couldn't parse %r" % line)
|
|
||||||
return m.groupdict()
|
|
||||||
def _open_pipe(self, w, **vargs):
|
def _open_pipe(self, w, **vargs):
|
||||||
cmd = ("/usr/bin/svn", 'blame', '-v', w.buffer.path)
|
cmd = ("/usr/bin/svn", 'blame', '-v', w.buffer.path)
|
||||||
return Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
return Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||||
def _execute(self, w, **vargs):
|
|
||||||
if not hasattr(w.buffer, 'path'):
|
|
||||||
w.set_error("Buffer has no corresponding file")
|
|
||||||
return
|
|
||||||
|
|
||||||
pipe = self._open_pipe(w, **vargs)
|
|
||||||
groups = [self._filter(line) for line in pipe.stdout]
|
|
||||||
status = pipe.wait() >> 8
|
|
||||||
|
|
||||||
if w.mode.grammar:
|
|
||||||
lexer = lex.Lexer(w.mode, w.mode.grammar)
|
|
||||||
tokens = list(lexer.lex([d['content'] for d in groups]))
|
|
||||||
groups[t.y]['tokens'] = tokens
|
|
||||||
|
|
||||||
data = ''
|
|
||||||
for d in groups:
|
|
||||||
if d['tokens']:
|
|
||||||
suffix = ''
|
|
||||||
for t in d['tokens']:
|
|
||||||
code = buffer.color.get_cbuf_code(t.color)
|
|
||||||
suffix += code + util.cbuf_escape(t.string)
|
|
||||||
else:
|
|
||||||
suffix = d['content'] + '\n'
|
|
||||||
data += self.prefix_fmt % d + ' ' + suffix
|
|
||||||
|
|
||||||
if status == 0:
|
|
||||||
w.application.color_data_buffer("*Blame*", data, switch_to=True)
|
|
||||||
else:
|
|
||||||
w.set_error("There was an error (%s)" % (status))
|
|
||||||
#class SvnBlame(Method):
|
|
||||||
# '''show blame output for the current version in SVN'''
|
|
||||||
# line_re = re.compile('^ *(\d+) *([a-zA-Z0-9_]+) *([-0-9]+) *([:0-9]+) *(-\d{4}) *\(([^\)]+)\) (.*)\n$')
|
|
||||||
# def _execute(self, w, **vargs):
|
|
||||||
# if not hasattr(w.buffer, 'path'):
|
|
||||||
# w.set_error("Buffer has no corresponding file")
|
|
||||||
# return
|
|
||||||
#
|
|
||||||
# cmd = ("/usr/bin/svn", 'blame', '-v', w.buffer.path)
|
|
||||||
# pipe = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
||||||
#
|
|
||||||
# linetokens = []
|
|
||||||
# for line in pipe.stdout:
|
|
||||||
# m = self.line_re.match(line)
|
|
||||||
# if not m:
|
|
||||||
# raise Exception, line
|
|
||||||
# (rev, user, date, t, tz, vdate, content) = m.groups()
|
|
||||||
# linetokens.append([rev, user, date, content, []])
|
|
||||||
# status = pipe.wait() >> 8
|
|
||||||
#
|
|
||||||
# lines = [x[3] for x in linetokens]
|
|
||||||
# if w.mode.grammar:
|
|
||||||
# lexer = lex.Lexer(w.mode, w.mode.grammar)
|
|
||||||
# lextokens = [[] for l in lines]
|
|
||||||
# for t in lexer.lex(lines):
|
|
||||||
# linetokens[t.y][4].append(t)
|
|
||||||
#
|
|
||||||
# lines = []
|
|
||||||
# for linetoken in linetokens:
|
|
||||||
# (rev, user, date, content, lextokens) = linetoken
|
|
||||||
# prefix = '[b:d:*]%-4s [c:d:*]%-10s [b:d:*]%10s[d:d:*]' % (rev, user, date)
|
|
||||||
# if lextokens:
|
|
||||||
# suffixes = []
|
|
||||||
# for lt in lextokens:
|
|
||||||
# s = lt.string.replace('\\', '\\\\')
|
|
||||||
# s = s.replace('[', '\\[').replace(']', '\\]')
|
|
||||||
# suffixes.append('[%s:%s:*]%s' % (lt.color[0], lt.color[1], s))
|
|
||||||
# suffix = ''.join(suffixes)
|
|
||||||
# else:
|
|
||||||
# suffix = content + '\n'
|
|
||||||
# lines.append('%s %s' % (prefix, suffix))
|
|
||||||
# data = ''.join(lines)
|
|
||||||
#
|
|
||||||
# if status == 0:
|
|
||||||
# w.application.color_data_buffer("*Blame*", data, switch_to=True)
|
|
||||||
# else:
|
|
||||||
# w.set_error("There was an error (%s)" % (status))
|
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
from method import Method
|
||||||
|
import buffer.color
|
||||||
|
import util
|
||||||
|
import lex
|
||||||
|
|
||||||
|
class VcBlame(Method):
|
||||||
|
_is_method = False
|
||||||
|
line_re = None
|
||||||
|
prefix_fmt = None
|
||||||
|
def _filter(self, line):
|
||||||
|
m = self.line_re.match(line)
|
||||||
|
if not m:
|
||||||
|
raise SvnException("couldn't parse %r" % line)
|
||||||
|
return m.groupdict()
|
||||||
|
def _open_pipe(self, w, **vargs):
|
||||||
|
raise Exception('unimplemented')
|
||||||
|
def _execute(self, w, **vargs):
|
||||||
|
if not hasattr(w.buffer, 'path'):
|
||||||
|
w.set_error("Buffer has no corresponding file")
|
||||||
|
return
|
||||||
|
|
||||||
|
pipe = self._open_pipe(w, **vargs)
|
||||||
|
groups = [self._filter(line) for line in pipe.stdout]
|
||||||
|
status = pipe.wait() >> 8
|
||||||
|
|
||||||
|
if w.mode.grammar:
|
||||||
|
lexer = lex.Lexer(w.mode, w.mode.grammar)
|
||||||
|
tokens = list(lexer.lex([d['content'] for d in groups]))
|
||||||
|
for t in tokens:
|
||||||
|
groups[t.y].setdefault('tokens', [])
|
||||||
|
groups[t.y]['tokens'].append(t)
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for d in groups:
|
||||||
|
if d['tokens']:
|
||||||
|
suffix = ''
|
||||||
|
for t in d['tokens']:
|
||||||
|
code = buffer.color.get_cbuf_code(*t.color)
|
||||||
|
suffix += code + util.cbuf_escape(t.string)
|
||||||
|
else:
|
||||||
|
suffix = d['content'] + '\n'
|
||||||
|
lines.append(self.prefix_fmt % d + ' ' + suffix)
|
||||||
|
data = ''.join(lines)
|
||||||
|
|
||||||
|
if status == 0:
|
||||||
|
w.application.color_data_buffer("*Blame*", data, switch_to=True)
|
||||||
|
else:
|
||||||
|
w.set_error("There was an error (%s)" % (status))
|
Loading…
Reference in New Issue