parent
acfcb09dea
commit
0d085f4b4d
|
@ -2,6 +2,15 @@
|
|||
-- these tables are distinct from the esub tables
|
||||
-- they are used to proxy communications between the IRS and the BDS
|
||||
|
||||
create function foo() returns integer as '
|
||||
declare
|
||||
x integer := 30;
|
||||
begin
|
||||
raise notice ''blarg''; --urk
|
||||
RETURN x;
|
||||
end;
|
||||
' language plgpsql;
|
||||
|
||||
-- this table stores all the various username/password pairs we use
|
||||
-- the one with the most recent date is assumed to be current
|
||||
CREATE TABLE e2_password (
|
||||
|
|
29
lex2.py
29
lex2.py
|
@ -24,6 +24,7 @@ class Token(object):
|
|||
names = self.parent.domain()
|
||||
else:
|
||||
names = []
|
||||
if self.name != 'middle':
|
||||
names.append(self.rule.name)
|
||||
return names
|
||||
def fqlist(self):
|
||||
|
@ -301,8 +302,17 @@ class DualRegionRule(Rule):
|
|||
self.middle = middle
|
||||
self.grammar2 = grammar2
|
||||
self.end = end
|
||||
self.start_re = re.compile(start)
|
||||
#self.start_re = re.compile(start)
|
||||
self.start_re = self._compile_start()
|
||||
self._set_group(group)
|
||||
|
||||
def _compile_start(self):
|
||||
return re.compile(self.start)
|
||||
def _compile_middle(self, d):
|
||||
return re.compile(self.middle % d)
|
||||
def _compile_end(self, d):
|
||||
return re.compile(self.end % d)
|
||||
|
||||
def _add_from_regex(self, name, lexer, parent, m, matchd={}):
|
||||
s = m.group(0)
|
||||
token = self.make_token(lexer, s, name, parent, matchd)
|
||||
|
@ -343,7 +353,8 @@ class DualRegionRule(Rule):
|
|||
d1 = parent.matchd
|
||||
assert parent.name == 'start'
|
||||
null_t = None
|
||||
middle_re = re.compile(self.middle % d1)
|
||||
#middle_re = re.compile(self.middle % d1)
|
||||
middle_re = self._compile_middle(d1)
|
||||
d2 = {}
|
||||
|
||||
# ok, so as long as we aren't done (we haven't found an end token),
|
||||
|
@ -428,7 +439,8 @@ class DualRegionRule(Rule):
|
|||
#assert parent.name == 'middle'
|
||||
d3 = parent.matchd
|
||||
null_t = None
|
||||
end_re = re.compile(self.end % d3)
|
||||
#end_re = re.compile(self.end % d3)
|
||||
end_re = self._compile_end(d3)
|
||||
|
||||
# ok, so as long as we aren't done (we haven't found an end token),
|
||||
# keep reading input
|
||||
|
@ -509,6 +521,10 @@ class Grammar:
|
|||
for rule in self.rules:
|
||||
if hasattr(rule, 'grammar') and rule.grammar is None:
|
||||
rule.grammar = self
|
||||
if hasattr(rule, 'grammar1') and rule.grammar is None:
|
||||
rule.grammar = self
|
||||
if hasattr(rule, 'grammar2') and rule.grammar is None:
|
||||
rule.grammar = self
|
||||
|
||||
grammars = {}
|
||||
grammars['null'] = Grammar()
|
||||
|
@ -600,3 +616,10 @@ class Lexer:
|
|||
else:
|
||||
raise StopIteration
|
||||
|
||||
class NocaseDualRegionRule(DualRegionRule):
|
||||
def _compile_start(self):
|
||||
return re.compile(self.start, re.IGNORECASE)
|
||||
def _compile_middle(self, d):
|
||||
return re.compile(self.middle % d, re.IGNORECASE)
|
||||
def _compile_end(self, d):
|
||||
return re.compile(self.end % d, re.IGNORECASE)
|
||||
|
|
69
mode_sql.py
69
mode_sql.py
|
@ -1,18 +1,60 @@
|
|||
import color, mode2, tab2
|
||||
from lex2 import Grammar, PatternRule, NocasePatternRule, RegionRule
|
||||
from lex2 import Grammar, PatternRule, NocasePatternRule, RegionRule, NocaseRegionRule, DualRegionRule, NocaseDualRegionRule
|
||||
from mode_python import StringGrammar
|
||||
|
||||
class PlPgSqlGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'comment', r'--.*\n$'),
|
||||
RegionRule(r'comment', '/\*', Grammar, '\*/'),
|
||||
PatternRule(r'delimiter', r':=|[():;,\.\$\[\]]'),
|
||||
|
||||
NocasePatternRule(r'attribute', r'(?:check|exists|unique|not null|default|primary key|minvalue|foreign key|references)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'keyword', r'(?:declare|begin|end|raise notice|return)'),
|
||||
NocasePatternRule(r'operator', r'(?:case|when|then|else|end|not|and|or|is not|is|in|not in)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'keyword', r'(?:create database|create index|create sequence|create table|create trigger|create view|select|insert|update|delete|drop database|drop index|drop sequence|drop table|drop trigger|drop view|create user|alter user|drop user|drop function|grant|revoke|create function|create or replace function|create or replace view|create language|create operator|create type)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'pseudokeyword', r'(?:returns|language|right join|left join|inner join|outer join|join|where|null|true|false|into|values|as|from|order by|asc|desc|limit|distinct|cascade|using|on)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'type', r'(?:void|row|serial|varchar|float|integer|int|text|timestamptz|timestamp|datetz|date|timetz|time|boolean|bool)(?![A-Za-z0-9_])'),
|
||||
PatternRule(r'builtin', r'(?:nextval|current_timestamp|current_time|current_date)(?![A-Za-z0-9_])'),
|
||||
RegionRule(r'string', "''", StringGrammar, "''"),
|
||||
RegionRule(r'quoted', '"', StringGrammar, '"'),
|
||||
PatternRule(r'bareword', r'[A-Za-z0-9_]+'),
|
||||
PatternRule(r'empty', r'^ *\n$'),
|
||||
PatternRule(r'eol', r'\n'),
|
||||
]
|
||||
|
||||
class FunctionGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'comment', r'--.*\n$'),
|
||||
RegionRule(r'comment', '/\*', Grammar, '\*/'),
|
||||
PatternRule(r'delimiter', r':=|[():;,\.\$\[\]]'),
|
||||
|
||||
PatternRule(r'name', r'[a-zA-Z_][a-zA-Z0-9_]*(?=\()'),
|
||||
NocasePatternRule(r'keyword', r'(?:as|returns|language)'),
|
||||
NocasePatternRule(r'type', r'(?:void|row|serial|varchar|float|integer|int|text|timestamptz|timestamp|datetz|date|timetz|time|boolean|bool)(?![A-Za-z0-9_])'),
|
||||
|
||||
NocasePatternRule(r'language', r'(?<=language ) *[a-zA-Z_][a-zA-Z0-9_]+'),
|
||||
|
||||
RegionRule(r'definition', "'", PlPgSqlGrammar, "'(?!')"),
|
||||
PatternRule(r'bareword', r'[A-Za-z0-9_]+'),
|
||||
PatternRule(r'empty', r'^ *\n$'),
|
||||
PatternRule(r'eol', r'\n'),
|
||||
]
|
||||
|
||||
class SqlGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'comment', r'--.*\n$'),
|
||||
RegionRule(r'comment', '/\*', Grammar, '\*/'),
|
||||
PatternRule(r'delimiter', r'[();,\.:\$\[\]]'),
|
||||
PatternRule(r'delimiter', r':=|[():;,\.\$\[\]]'),
|
||||
|
||||
NocaseRegionRule(r'function', r'create function', FunctionGrammar, r';'),
|
||||
NocaseRegionRule(r'function', r'create or replace function', FunctionGrammar, r';'),
|
||||
|
||||
NocasePatternRule(r'attribute', r'(?:check|exists|unique|not null|default|primary key|minvalue|foreign key|references)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'operator', r'(?:case|when|then|else|end|not|and|or|is not|is|in|not in)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'keyword', r'(?:create database|create index|create sequence|create table|create trigger|create view|select|insert|update|delete|drop database|drop index|drop sequence|drop table|drop trigger|drop view|create user|alter user|drop user|drop function|grant|revoke|create function|create or replace function|create or replace view|create language|create operator|create type)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'pseudokeyword', r'(?:returns|language|right join|left join|inner join|outer join|join|where|null|true|false|into|values|as|from|order by|asc|desc|limit|distinct|cascade|using|on)(?![A-Za-z0-9_])'),
|
||||
NocasePatternRule(r'type', r'(?:void|row|serial|varchar|float|integer|int|text|timestamptz|timestamp|datetz|date|timetz|time|boolean|bool)(?![A-Za-z0-9_])'),
|
||||
PatternRule(r'function', r'(?:nextval|current_timestamp|current_time|current_date)(?![A-Za-z0-9_])'),
|
||||
PatternRule(r'builtin', r'(?:nextval|current_timestamp|current_time|current_date)(?![A-Za-z0-9_])'),
|
||||
RegionRule(r'string', "'", StringGrammar, "'"),
|
||||
RegionRule(r'quoted', '"', StringGrammar, '"'),
|
||||
PatternRule(r'bareword', r'[A-Za-z0-9_]+'),
|
||||
|
@ -34,6 +76,14 @@ class SqlTabber(tab2.StackTabber):
|
|||
token = self.get_token(y, i)
|
||||
if token.name == 'delimiter' and token.string == ';':
|
||||
self._opt_pop('cont')
|
||||
elif token.name == 'keyword':
|
||||
if token.string == 'declare':
|
||||
self._opt_append('declare', currlvl + 4)
|
||||
elif token.string == 'begin':
|
||||
currlvl -= 4
|
||||
elif token.string == 'end':
|
||||
self._opt_pop('declare')
|
||||
currlvl = self.get_curr_level()
|
||||
|
||||
if self.is_rightmost_token(y, i):
|
||||
if not self._empty() and token.name == 'continuation':
|
||||
|
@ -61,7 +111,7 @@ class Sql(mode2.Fundamental):
|
|||
'keyword': color.build('cyan', 'default'),
|
||||
'pseudokeyword': color.build('cyan', 'default'),
|
||||
'type': color.build('green', 'default'),
|
||||
'function': color.build('yellow', 'default'),
|
||||
'builtin': color.build('yellow', 'default'),
|
||||
'quoted': color.build('yellow', 'default'),
|
||||
'string.start': color.build('green', 'default'),
|
||||
'string.null': color.build('green', 'default'),
|
||||
|
@ -69,6 +119,17 @@ class Sql(mode2.Fundamental):
|
|||
'string.octal': color.build('magenta', 'default'),
|
||||
'string.end': color.build('green', 'default'),
|
||||
'bareword': color.build('default', 'default'),
|
||||
|
||||
'function.start': color.build('cyan', 'default'),
|
||||
'function.null': color.build('default', 'default'),
|
||||
'function.name': color.build('magenta', 'default'),
|
||||
'function.language': color.build('magenta', 'default'),
|
||||
'function.end': color.build('default', 'default'),
|
||||
|
||||
'function.definition.start': color.build('magenta', 'default'),
|
||||
'function.definition.bareword': color.build('magenta', 'default'),
|
||||
'function.definition.null': color.build('magenta', 'default'),
|
||||
'function.definition.end': color.build('magenta', 'default'),
|
||||
}
|
||||
def name(self):
|
||||
return "Sql"
|
||||
|
|
Loading…
Reference in New Issue