2007-04-09 19:21:43 -04:00
|
|
|
#!/usr/bin/python
|
2007-03-28 18:38:32 -04:00
|
|
|
import sys
|
2007-04-08 10:34:49 -04:00
|
|
|
import lex2, lex2_perl, lex2_python, highlight2
|
2007-03-28 18:38:32 -04:00
|
|
|
|
|
|
|
color_list = []
|
|
|
|
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
|
|
|
color_list.extend(['\033[3%d;1m' % x for x in range(0, 8)])
|
|
|
|
color_list.append('\033[0m')
|
|
|
|
|
|
|
|
color_names = [
|
|
|
|
'black', 'dred', 'dgreen', 'brown', 'dblue', 'dpurple', 'dcyan', 'lgrey',
|
|
|
|
'dgrey', 'lred', 'lgreen', 'yellow', 'lblue', 'lpurple', 'lcyan', 'white',
|
|
|
|
'unset',
|
|
|
|
]
|
|
|
|
|
|
|
|
color_dict ={}
|
|
|
|
for i in range(0, len(color_list)):
|
|
|
|
color_dict[color_names[i]] = color_list[i]
|
|
|
|
|
|
|
|
token_colors = {
|
2007-04-08 10:34:49 -04:00
|
|
|
'perl': {
|
|
|
|
# basic stuff
|
|
|
|
'escaped': 'lpurple',
|
|
|
|
'null': 'white',
|
|
|
|
'delimiter': 'white',
|
|
|
|
'sub': 'lcyan',
|
|
|
|
'number': 'white',
|
|
|
|
'operator': 'white',
|
|
|
|
'endblock': 'lred',
|
|
|
|
'keyword': 'lpurple',
|
|
|
|
'scalar': 'yellow',
|
|
|
|
'array': 'yellow',
|
|
|
|
'deref': 'yellow',
|
|
|
|
'hash': 'yellow',
|
|
|
|
'hash_key': 'lgreen',
|
|
|
|
'comment': 'lred',
|
|
|
|
'function': 'lcyan',
|
|
|
|
'builtin': 'lpurple',
|
|
|
|
'method': 'lcyan',
|
|
|
|
'bareword': 'white',
|
|
|
|
'label': 'lcyan',
|
|
|
|
'package': 'lcyan',
|
|
|
|
'class': 'lcyan',
|
|
|
|
'use': 'lcyan',
|
|
|
|
'method': 'lcyan',
|
|
|
|
|
|
|
|
# heredoc
|
|
|
|
'heredoc1.start': 'lgreen',
|
|
|
|
'heredoc1.null': 'lgreen',
|
|
|
|
'heredoc1.end': 'lgreen',
|
|
|
|
'heredoc2.start': 'lgreen',
|
|
|
|
'heredoc2.null': 'lgreen',
|
|
|
|
'heredoc2.end': 'lgreen',
|
|
|
|
'eval_heredoc.start': 'lcyan',
|
|
|
|
'eval_heredoc.null': 'lcyan',
|
|
|
|
'eval_heredoc.end': 'lcyan',
|
|
|
|
|
|
|
|
# pod
|
|
|
|
'pod.start': 'lred',
|
|
|
|
'pod.null': 'lred',
|
|
|
|
'pod.entry': 'lpurple',
|
|
|
|
'pod.end': 'lred',
|
|
|
|
|
|
|
|
# "" strings
|
|
|
|
'string1.start': 'lgreen',
|
|
|
|
'string1.null': 'lgreen',
|
|
|
|
'string1.escaped': 'lpurple',
|
|
|
|
'string1.deref': 'yellow',
|
|
|
|
'string1.end': 'lgreen',
|
|
|
|
|
|
|
|
# '' strings
|
|
|
|
'string2.start': 'lgreen',
|
|
|
|
'string2.null': 'lgreen',
|
|
|
|
'string2.end': 'lgreen',
|
|
|
|
|
|
|
|
# `` strings
|
|
|
|
'evalstring': 'lcyan',
|
|
|
|
|
|
|
|
# quoted region
|
|
|
|
'quoted': 'lcyan',
|
|
|
|
'quoted.start': 'lcyan',
|
|
|
|
'quoted.null': 'lcyan',
|
|
|
|
'quoted.end': 'lcyan',
|
|
|
|
|
|
|
|
# match regex
|
|
|
|
'match.start': 'lcyan',
|
|
|
|
'match.end': 'lcyan',
|
|
|
|
'match.null': 'lcyan',
|
|
|
|
|
|
|
|
# replace regex
|
|
|
|
'replace.start': 'lcyan',
|
|
|
|
'replace.middle': 'lcyan',
|
|
|
|
'replace.end': 'lcyan',
|
|
|
|
'replace.null': 'lcyan',
|
|
|
|
|
|
|
|
# translate regex
|
|
|
|
'translate.start': 'lpurple',
|
|
|
|
'translate.middle': 'lpurple',
|
|
|
|
'translate.end': 'lpurple',
|
|
|
|
'translate.null': 'lpurple',
|
|
|
|
},
|
|
|
|
|
|
|
|
'python': {
|
|
|
|
'keyword': 'lcyan',
|
|
|
|
'builtin_method': 'lcyan',
|
|
|
|
'methodname': 'lblue',
|
|
|
|
'classname': 'lgreen',
|
|
|
|
|
|
|
|
'string.start': 'lgreen',
|
|
|
|
'string.null': 'lgreen',
|
|
|
|
'string.escaped': 'lpurple',
|
|
|
|
'string.octal': 'lpurple',
|
|
|
|
'string.format': 'yellow',
|
|
|
|
'string.end': 'lgreen',
|
|
|
|
|
2007-04-09 19:21:43 -04:00
|
|
|
'integer': 'lred',
|
|
|
|
'float': 'lred',
|
|
|
|
'imaginary': 'lred',
|
|
|
|
|
2007-04-08 10:34:49 -04:00
|
|
|
'tq_string.start': 'lgreen',
|
|
|
|
'tq_string.null': 'lgreen',
|
|
|
|
'tq_string.end': 'lgreen',
|
|
|
|
|
|
|
|
'docstring.start': 'lgreen',
|
|
|
|
'docstring.null': 'lgreen',
|
|
|
|
'docstring.end': 'lgreen',
|
|
|
|
|
|
|
|
'comment': 'lred',
|
|
|
|
'continuation': 'lred',
|
|
|
|
#'operator': 'yellow',
|
|
|
|
#'delimiter': 'lpurple',
|
|
|
|
'system_identifier': 'lcyan',
|
|
|
|
#'bound method': color.build('yellow', 'default'),
|
|
|
|
'import': 'lpurple',
|
|
|
|
#'bizzaro': 'lpurple',
|
|
|
|
},
|
|
|
|
}
|
2007-04-01 00:54:52 -04:00
|
|
|
|
2007-04-08 10:34:49 -04:00
|
|
|
grammars = {
|
|
|
|
'perl': lex2_perl.PerlGrammar,
|
|
|
|
'python': lex2_python.PythonGrammar,
|
|
|
|
}
|
2007-04-01 01:35:10 -04:00
|
|
|
|
2007-05-02 00:17:12 -04:00
|
|
|
import optparse
|
2007-04-01 01:35:10 -04:00
|
|
|
|
2007-05-02 00:17:12 -04:00
|
|
|
parser = optparse.OptionParser()
|
|
|
|
parser.add_option('-d', '--dump', dest='dump', action='store_true', default=False)
|
|
|
|
parser.add_option('-g', '--grammar', dest='grammar', action='store', default='python')
|
|
|
|
parser.add_option('-n', '--normal', dest='normal', action='store_true', default=False)
|
2007-03-28 18:38:32 -04:00
|
|
|
|
2007-05-02 00:17:12 -04:00
|
|
|
(opts, args) = parser.parse_args()
|
|
|
|
|
|
|
|
for path in args:
|
2007-03-28 18:38:32 -04:00
|
|
|
f = open(path, 'r')
|
|
|
|
data = f.read()
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
lines = data.split('\n')
|
2007-05-02 00:17:12 -04:00
|
|
|
lexer = lex2.Lexer('lexer', grammars[opts.grammar]())
|
|
|
|
|
|
|
|
h = highlight2.Highlighter(lexer)
|
|
|
|
h.highlight(lines)
|
2007-04-08 10:34:49 -04:00
|
|
|
|
2007-05-02 00:17:12 -04:00
|
|
|
if opts.normal:
|
|
|
|
if opts.dump:
|
|
|
|
h.dump()
|
|
|
|
else:
|
|
|
|
h.display(token_colors[opts.grammar])
|
2007-05-05 13:22:19 -04:00
|
|
|
elif False:
|
2007-05-02 00:17:12 -04:00
|
|
|
(y1, x1) = (5, 9)
|
|
|
|
(y2, x2) = (7, 14)
|
|
|
|
#(y2, x2) = (82, 2)
|
|
|
|
for i in range(y1 + 1, y2):
|
|
|
|
del lines[y1 + 1]
|
|
|
|
lines[y1] = lines[y1][0:x1] + lines[y1 + 1][x2:]
|
|
|
|
del lines[y1 + 1]
|
|
|
|
|
|
|
|
h.relex_del(lines, y1, x1, y2, x2)
|
|
|
|
#h.update_del(lines, y1, x1, y2, x2)
|
|
|
|
#h.highlight(lines)
|
|
|
|
if opts.dump:
|
|
|
|
h.dump()
|
|
|
|
else:
|
|
|
|
h.display(token_colors[opts.grammar])
|
2007-05-05 13:22:19 -04:00
|
|
|
else:
|
|
|
|
#newlines = ['one two three']
|
|
|
|
newlines = ['one two three', 'cat', 'dog', 'del self.foo[3]', 'oops']
|
|
|
|
(y1, x1) = (5, 9)
|
|
|
|
|
|
|
|
if len(newlines) > 1:
|
|
|
|
lines.insert(y1 + 1, newlines[-1] + lines[y1][x1:])
|
|
|
|
lines[y1] = lines[y1][:x1] + newlines[0]
|
|
|
|
for i in range(1, len(newlines) - 1):
|
|
|
|
newline = newlines[i]
|
|
|
|
lines.insert(y1 + i, newline)
|
|
|
|
else:
|
|
|
|
lines[y1] = lines[y1][:x1] + newlines[0] + lines[y1][x1:]
|
|
|
|
|
|
|
|
h.relex_add(lines, y1, x1, newlines)
|
|
|
|
#h.update_add(lines, y1, x1, newlines)
|
|
|
|
#h.highlight(lines)
|
|
|
|
if opts.dump:
|
|
|
|
h.dump()
|
|
|
|
else:
|
|
|
|
h.display(token_colors[opts.grammar])
|