parent
75d354c2f5
commit
683afdf129
|
@ -1,5 +1,6 @@
|
|||
import glob, os, pwd
|
||||
import method, util
|
||||
import glob
|
||||
import os
|
||||
import util
|
||||
|
||||
_completers = {}
|
||||
|
||||
|
@ -16,7 +17,6 @@ def find_common_string(candidates):
|
|||
elif len(candidates) == 1:
|
||||
return candidates[0]
|
||||
else:
|
||||
done = False
|
||||
index = 0
|
||||
test = candidates[0]
|
||||
while True:
|
||||
|
@ -30,7 +30,7 @@ class Completer(object):
|
|||
def __init__(self, application):
|
||||
self.application = application
|
||||
def get_candidates(self, s, w=None):
|
||||
assert "Not implemented"
|
||||
raise Exception("Not implemented")
|
||||
def tab_string(self, s, w=None):
|
||||
'''returns a tuple of three things:
|
||||
1. the new string
|
||||
|
|
45
highlight.py
45
highlight.py
|
@ -1,10 +1,7 @@
|
|||
import re, sys
|
||||
import sys
|
||||
from lex import Token
|
||||
|
||||
color_list = []
|
||||
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
||||
color_list.extend(['\033[3%d;1m' % x for x in range(0, 8)])
|
||||
color_list.extend(['\033[0m'])
|
||||
|
||||
color_names = [
|
||||
'black', 'dred', 'dgreen', 'brown', 'dblue', 'dpurple', 'dcyan', 'lgrey',
|
||||
|
@ -13,23 +10,29 @@ color_names = [
|
|||
]
|
||||
|
||||
color_dict ={}
|
||||
for i in range(0, len(color_list)):
|
||||
color_dict[color_names[i]] = color_list[i]
|
||||
|
||||
def token_match(self, token, name, data=None):
|
||||
return token.fqname() == name and data is None or token.string == data
|
||||
def token_match2(self, token, name, regex):
|
||||
return token.fqname() == name and regex.match(token.string)
|
||||
def token_vmatch(self, token, *pairs):
|
||||
for (name, data) in pairs:
|
||||
if token_match(token, name, data):
|
||||
return True
|
||||
return False
|
||||
def token_vmatch2(self, token, *pairs):
|
||||
for (name, regex) in pairs:
|
||||
if token_match(token, name, regex):
|
||||
return True
|
||||
return False
|
||||
def setup():
|
||||
color_list.extend(['\033[3%dm' % x for x in range(0, 8)])
|
||||
color_list.extend(['\033[3%d;1m' % x for x in range(0, 8)])
|
||||
color_list.extend(['\033[0m'])
|
||||
for i in range(0, len(color_list)):
|
||||
color_dict[color_names[i]] = color_list[i]
|
||||
setup()
|
||||
|
||||
#def token_match(self, token, name, data=None):
|
||||
# return token.fqname() == name and data is None or token.string == data
|
||||
#def token_match2(self, token, name, regex):
|
||||
# return token.fqname() == name and regex.match(token.string)
|
||||
#def token_vmatch(self, token, *pairs):
|
||||
# for (name, data) in pairs:
|
||||
# if token_match(token, name, data):
|
||||
# return True
|
||||
# return False
|
||||
#def token_vmatch2(self, token, *pairs):
|
||||
# for (name, regex) in pairs:
|
||||
# if token_match(token, name, regex):
|
||||
# return True
|
||||
# return False
|
||||
|
||||
class Highlighter(object):
|
||||
def __init__(self, lexer):
|
||||
|
@ -266,8 +269,6 @@ class Highlighter(object):
|
|||
for t in self.tokens[y1]:
|
||||
tx1 = t.x
|
||||
tx2 = t.x + len(t.string)
|
||||
ty = t.y
|
||||
ts = t.string
|
||||
if tx2 <= x1:
|
||||
# '*| ' before the insertion
|
||||
newtokens[y1].append(t)
|
||||
|
|
|
@ -14,6 +14,7 @@ def get_speller():
|
|||
_speller = Speller()
|
||||
return _speller
|
||||
def free():
|
||||
global _speller
|
||||
if _speller:
|
||||
_speller.stop()
|
||||
_speller = None
|
||||
|
|
52
keyinput.py
52
keyinput.py
|
@ -1,8 +1,9 @@
|
|||
import curses, sys, termios
|
||||
import sys
|
||||
import termios
|
||||
|
||||
# this is a huge map of ASCII keycode sequences it should include all
|
||||
# the "standard" ones for a US 104 key keyboard. this module may need
|
||||
# to support some kind of subclassing in order to be localizable.
|
||||
# this is a huge map of ASCII keycode sequences it should include all the
|
||||
# "standard" ones for a US 104 key keyboard. this module may need to support
|
||||
# ome kind of subclassing in order to be localizable.
|
||||
#
|
||||
# of course, i'd be crazy to try to localize a curses app
|
||||
MAP = { 0: "C-@",
|
||||
|
@ -75,36 +76,35 @@ MAP = { 0: "C-@",
|
|||
32: "SPACE",
|
||||
127: "DELETE" }
|
||||
|
||||
# add the meta/control-char combinations
|
||||
for key in MAP.iterkeys():
|
||||
if key == 27:
|
||||
# we don't want to define ESC-ESC
|
||||
continue
|
||||
MAP[27][key] = "M-%s" % (MAP[key])
|
||||
def setup():
|
||||
# add the meta/control-char combinations
|
||||
for key in MAP:
|
||||
if key == 27:
|
||||
# we don't want to define ESC-ESC
|
||||
continue
|
||||
MAP[27][key] = "M-%s" % (MAP[key])
|
||||
|
||||
# add meta character stuff
|
||||
for i in range(33, 126):
|
||||
if i == 79 or i == 91:
|
||||
# these keys are used in other sequences
|
||||
continue
|
||||
# 7bit meta sequences
|
||||
MAP[27][i] = "M-%s" % (chr(i))
|
||||
# 8bit meta characters
|
||||
MAP[128+i] = "M-%s" % (chr(i))
|
||||
MAP[255] = "M-DELETE"
|
||||
# add meta character stuff
|
||||
for i in range(33, 126):
|
||||
if i == 79 or i == 91:
|
||||
# these keys are used in other sequences
|
||||
continue
|
||||
# 7bit meta sequences
|
||||
MAP[27][i] = "M-%s" % (chr(i))
|
||||
# 8bit meta characters
|
||||
MAP[128+i] = "M-%s" % (chr(i))
|
||||
MAP[255] = "M-DELETE"
|
||||
setup()
|
||||
|
||||
def disable_control_chars():
|
||||
#terminal settings are for chumps
|
||||
# terminal settings are for chumps
|
||||
attr = termios.tcgetattr(sys.stdin)
|
||||
|
||||
global OLD_ATTR
|
||||
OLD_ATTR = attr
|
||||
|
||||
# don't listen to allow input START/STOP (C-s,C-q)
|
||||
attr[0] = attr[0] & ~(termios.IXON | termios.IXOFF)
|
||||
|
||||
# remove as many signal handlers as we can; we want to
|
||||
# leave C-d and C-z probably
|
||||
# remove as many signal handlers as we can; we want to leave C-d and C-z
|
||||
# probably
|
||||
for pos in range(0,len(attr[6])):
|
||||
if pos == termios.VEOF or pos == termios.VSUSP:
|
||||
continue
|
||||
|
|
7
lex.py
7
lex.py
|
@ -1,5 +1,5 @@
|
|||
import curses, re
|
||||
import regex, util
|
||||
import re
|
||||
import regex
|
||||
from point import Point
|
||||
|
||||
def escape(s):
|
||||
|
@ -475,7 +475,6 @@ class OverrideRegionRule(RegionRule):
|
|||
|
||||
class Grammar(object):
|
||||
rules = []
|
||||
grammar = Grammar()
|
||||
|
||||
class Lexer(object):
|
||||
def __init__(self, mode, grammar):
|
||||
|
@ -490,7 +489,7 @@ class Lexer(object):
|
|||
return self.lines[self.y] + '\n'
|
||||
def lex_all(self, lines):
|
||||
lextokens = [[] for l in lines]
|
||||
for t in lexer.lex(lines):
|
||||
for t in self.lex(lines):
|
||||
lextokens[t.y].append(t)
|
||||
return lextokens
|
||||
|
||||
|
|
Loading…
Reference in New Issue