parent
feabc84e55
commit
c6514c3032
26
mode_sql.py
26
mode_sql.py
|
@ -1,10 +1,10 @@
|
|||
import color, mode2
|
||||
import color, mode2, tab2
|
||||
from lex2 import Grammar, PatternRule, NocasePatternRule, RegionRule
|
||||
from mode_python import StringGrammar
|
||||
|
||||
class SqlGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'comment', r'--.*$'),
|
||||
PatternRule(r'comment', r'--.*\n$'),
|
||||
RegionRule(r'comment', '/\*', Grammar, '\*/'),
|
||||
PatternRule(r'delimiter', r'[();,\.:\$\[\]]'),
|
||||
NocasePatternRule(r'attribute', r'(?:check|exists|unique|not null|default|primary key|minvalue|foreign key|references)(?![A-Za-z0-9_])'),
|
||||
|
@ -16,10 +16,32 @@ class SqlGrammar(Grammar):
|
|||
RegionRule(r'string', "'", StringGrammar, "'"),
|
||||
RegionRule(r'quoted', '"', StringGrammar, '"'),
|
||||
PatternRule(r'bareword', r'[A-Za-z0-9_]+'),
|
||||
PatternRule(r'empty', r'^ *\n$'),
|
||||
PatternRule(r'eol', r'; *\n'),
|
||||
PatternRule(r'continuation', r'\n'),
|
||||
]
|
||||
|
||||
class SqlTabber(tab2.StackTabber):
|
||||
def is_base(self, y):
|
||||
if y == 0:
|
||||
return True
|
||||
highlighter = self.mode.window.buffer.highlights[self.mode.name()]
|
||||
if not highlighter.tokens[y]:
|
||||
return False
|
||||
t = highlighter.tokens[y][0]
|
||||
return t.name == 'function'
|
||||
def _handle_other_token(self, currlvl, y, i):
|
||||
token = self.get_token(y, i)
|
||||
if self.is_rightmost_token(y, i):
|
||||
if not self._empty() and token.name == 'continuation':
|
||||
self._opt_append('cont', currlvl + 4)
|
||||
elif token.name == 'eol':
|
||||
self._opt_pop("cont")
|
||||
return currlvl
|
||||
|
||||
class Sql(mode2.Fundamental):
|
||||
grammar = SqlGrammar
|
||||
tabbercls = SqlTabber
|
||||
opentokens = ('delimiter',)
|
||||
opentags = {'(': ')', '[': ']', '{': '}'}
|
||||
closetokens = ('delimiter',)
|
||||
|
|
48
tab_xml.py
48
tab_xml.py
|
@ -1,48 +0,0 @@
|
|||
import tab
|
||||
|
||||
class XMLTabber(tab.TokenStackTabber):
|
||||
close_tags = {'}': '{',
|
||||
')': '(',
|
||||
']': '['}
|
||||
|
||||
def stack_append_const(self, c, n):
|
||||
if self.tab_stack[-1][0] != c:
|
||||
self.stack_append((c, self.tab_stack[-1][1] + n))
|
||||
def stack_pop_const(self, *c_args):
|
||||
if self.tab_stack[-1][0] in c_args:
|
||||
self.stack_pop()
|
||||
|
||||
def base_indentation_level(self, y):
|
||||
return False
|
||||
|
||||
def handle_token(self, prev_token, token, next_token, y=None):
|
||||
buffer = self.mode.window.buffer
|
||||
name = token.name
|
||||
s = token.string
|
||||
|
||||
if name == 'opentag':
|
||||
if next_token is None:
|
||||
x = len(s) + 2
|
||||
else:
|
||||
x = next_token.start - token.start + 1
|
||||
# x is an offset from the current indention level
|
||||
self.stack_append_const('cont', x)
|
||||
elif name == 'gtc':
|
||||
self.stack_pop_const('cont')
|
||||
if prev_token is None:
|
||||
self.line_depth = self.tab_stack[-1][1]
|
||||
elif name == 'gt':
|
||||
self.stack_pop_const('cont')
|
||||
if prev_token is None:
|
||||
self.line_depth = self.tab_stack[-1][1]
|
||||
if self.tab_stack[-1][0] == 'close':
|
||||
self.stack_pop_const('close')
|
||||
else:
|
||||
self.stack_append(('tag', self.tab_stack[-1][1] + 4))
|
||||
elif name == 'ltc':
|
||||
self.stack_pop_const('cont')
|
||||
self.stack_pop_const('tag')
|
||||
l = self.tab_stack[-1][1]
|
||||
self.stack_append(('close', l))
|
||||
if prev_token is None:
|
||||
self.line_depth = l
|
Loading…
Reference in New Issue