parent
8d33c8b0e2
commit
33dc4ff2e3
|
@ -1,14 +1,35 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
color_list = []
|
||||||
|
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
||||||
|
color_list.extend(['\033[3%d;1m' % x for x in range(0, 8)])
|
||||||
|
color_list.append('\033[0m')
|
||||||
|
|
||||||
|
color_names = [
|
||||||
|
'black', 'dred', 'dgreen', 'brown', 'dblue', 'dpurple', 'dcyan', 'lgrey',
|
||||||
|
'dgrey', 'lred', 'lgreen', 'yellow', 'lblue', 'lpurple', 'lcyan', 'white',
|
||||||
|
'unset',
|
||||||
|
]
|
||||||
|
|
||||||
|
color_dict ={}
|
||||||
|
for i in range(0, len(color_list)):
|
||||||
|
color_dict[color_names[i]] = color_list[i]
|
||||||
|
|
||||||
class Highlighter:
|
class Highlighter:
|
||||||
def __init__(self, lexer):
|
def __init__(self, lexer):
|
||||||
self.lexer = lexer
|
self.lexer = lexer
|
||||||
self.tokens = []
|
self.tokens = []
|
||||||
|
|
||||||
def display(self):
|
def display(self, token_colors={}, debug=False):
|
||||||
for group in self.tokens:
|
for group in self.tokens:
|
||||||
for token in group:
|
for token in group:
|
||||||
|
if token.name in token_colors:
|
||||||
|
color_name = token_colors[token.name]
|
||||||
|
sys.stdout.write(color_dict[color_name])
|
||||||
|
elif debug:
|
||||||
|
raise Exception, "no highlighting for %r" % token.name
|
||||||
sys.stdout.write(token.string)
|
sys.stdout.write(token.string)
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write('\n')
|
||||||
sys.stdout.write('\n')
|
|
||||||
|
|
||||||
def highlight(self, lines):
|
def highlight(self, lines):
|
||||||
self.tokens = [[] for l in lines]
|
self.tokens = [[] for l in lines]
|
||||||
|
|
4
lex2.py
4
lex2.py
|
@ -221,6 +221,10 @@ class DualRegionRule(Rule):
|
||||||
|
|
||||||
class Grammar:
|
class Grammar:
|
||||||
rules = []
|
rules = []
|
||||||
|
def __init__(self):
|
||||||
|
for rule in self.rules:
|
||||||
|
if hasattr(rule, 'grammar') and rule.grammar is None:
|
||||||
|
rule.grammar = self
|
||||||
|
|
||||||
class Lexer:
|
class Lexer:
|
||||||
def __init__(self, name, grammar):
|
def __init__(self, name, grammar):
|
||||||
|
|
32
lex2_perl.py
32
lex2_perl.py
|
@ -115,11 +115,11 @@ class PerlGrammar(Grammar):
|
||||||
pattern=r"(?<!->)(?:STDIN|STDERR|STDOUT|and|cmp|continue|do|else|elsif|eq|eval|foreach|for|if|last|my|next|ne|not|or|our|package|require|return|sub|undef|unless|until|use|while)(?![a-zA-Z0-9_])",
|
pattern=r"(?<!->)(?:STDIN|STDERR|STDOUT|and|cmp|continue|do|else|elsif|eq|eval|foreach|for|if|last|my|next|ne|not|or|our|package|require|return|sub|undef|unless|until|use|while)(?![a-zA-Z0-9_])",
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'hash_bareword_index',
|
name=r'bareword_hash_index',
|
||||||
pattern=r'(?<={) *[A-Za-z0-9_]+(?=})',
|
pattern=r'(?<={) *[A-Za-z0-9_]+(?=})',
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'literal_hash_bareword_index',
|
name=r'bareword_hash_key',
|
||||||
pattern=r'[A-Za-z0-9_]+(?= *=>)',
|
pattern=r'[A-Za-z0-9_]+(?= *=>)',
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
|
@ -226,7 +226,7 @@ class PerlGrammar(Grammar):
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'sub',
|
name=r'sub',
|
||||||
pattern=r"(?<=sub )[a-zA-Z_][a-zA-Z_0-9]*(?=[ \n]*{)",
|
pattern=r"(?<=sub )[a-zA-Z_][a-zA-Z_0-9]*(?= *{)",
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'use',
|
name=r'use',
|
||||||
|
@ -258,11 +258,33 @@ class PerlGrammar(Grammar):
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'bareword_method',
|
name=r'bareword_method',
|
||||||
pattern=r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]* *\(",
|
pattern=r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?= *\()",
|
||||||
|
),
|
||||||
|
#PatternRule(
|
||||||
|
# name=r'delimiter',
|
||||||
|
# pattern=r"\(|\)|\[|\]|{|}|,|;|->|=>|=|\?|(?<!:):(?!=:)",
|
||||||
|
#),
|
||||||
|
RegionRule(
|
||||||
|
name=r'paren',
|
||||||
|
start=r'\(',
|
||||||
|
grammar=None,
|
||||||
|
end=r'\)',
|
||||||
|
),
|
||||||
|
RegionRule(
|
||||||
|
name=r'brace',
|
||||||
|
start=r'{',
|
||||||
|
grammar=None,
|
||||||
|
end=r'}',
|
||||||
|
),
|
||||||
|
RegionRule(
|
||||||
|
name=r'bracket',
|
||||||
|
start=r'\[',
|
||||||
|
grammar=None,
|
||||||
|
end=r'\]',
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'delimiter',
|
name=r'delimiter',
|
||||||
pattern=r"\(|\)|\[|\]|{|}|,|;|->|=>|=|\?|(?<!:):(?!=:)",
|
pattern=r",|;|->|=>|=|\?|(?<!:):(?!=:)",
|
||||||
),
|
),
|
||||||
PatternRule(
|
PatternRule(
|
||||||
name=r'unary_operator',
|
name=r'unary_operator',
|
||||||
|
|
16
test3.py
16
test3.py
|
@ -1,5 +1,5 @@
|
||||||
import sys
|
import sys
|
||||||
import lex2, lex2_perl
|
import lex2, lex2_perl, highlight2
|
||||||
|
|
||||||
color_list = []
|
color_list = []
|
||||||
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
||||||
|
@ -87,15 +87,7 @@ for path in paths:
|
||||||
|
|
||||||
grammar = lex2_perl.PerlGrammar()
|
grammar = lex2_perl.PerlGrammar()
|
||||||
lexer = lex2.Lexer('lexer', grammar)
|
lexer = lex2.Lexer('lexer', grammar)
|
||||||
|
highlighter = highlight2.Highlighter(lexer)
|
||||||
|
|
||||||
lexer.lex(lines)
|
highlighter.highlight(lines)
|
||||||
y = 0
|
highlighter.display(token_colors)
|
||||||
for token in lexer:
|
|
||||||
while token.y > y:
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
y += 1
|
|
||||||
#color_name = token_colors.get(token.name, 'white')
|
|
||||||
color_name = token_colors[token.name]
|
|
||||||
sys.stdout.write(color_dict[color_name])
|
|
||||||
sys.stdout.write(token.string)
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
|
|
Loading…
Reference in New Issue