mode changes/fixes

--HG--
branch : pmacs2
This commit is contained in:
moculus 2007-07-12 23:06:33 +00:00
parent 5dcfd8406c
commit a1217a957d
8 changed files with 73 additions and 52 deletions

View File

@ -23,9 +23,9 @@ class BDSGrammar(Grammar):
class BDS(mode2.Fundamental):
grammar = BDSGrammar
opentoken = 'delimiter'
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetokens = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -162,9 +162,9 @@ class CTabber(tab2.StackTabber):
class C(mode2.Fundamental):
tabbercls = CTabber
grammar = CGrammar
opentoken = 'delimiter'
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetokens = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -55,9 +55,9 @@ class JavascriptTabber(tab2.StackTabber):
class Javascript(mode2.Fundamental):
grammar = JavascriptGrammar
tabbercls = JavascriptTabber
opentoken = 'delimiter'
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetokens = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -170,9 +170,9 @@ class PerlTabber(tab2.StackTabber):
class Perl(mode2.Fundamental):
tabbercls = PerlTabber
grammar = PerlGrammar
opentoken = 'delimiter'
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetoken = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -167,9 +167,9 @@ class PythonTabber(tab2.StackTabber):
class Python(mode2.Fundamental):
tabbercls = PythonTabber
grammar = PythonGrammar
opentoken = 'delimiter'
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetokens = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -1,4 +1,4 @@
import color, mode2
import color, mode2, tab2
from lex2 import Grammar, PatternRule, RegionRule
class StringGrammar(Grammar):
@ -15,7 +15,7 @@ class ShGrammar(Grammar):
PatternRule(r'reserved', r"(?:case|done|do|elif|else|esac|fi|for|function|if|in|select|then|until|while|time)(?![a-zA-Z0-9_=/])"),
PatternRule(r'builtin', r"(?:source|alias|bg|bind|break|builtin|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|fc|fg|getops|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|readonly|read|return|set|shift|shopt|suspend|test|times|trap|type|ulimit|umask|unalias|unset|wait)(?![a-zA-Z0-9_=/])"),
PatternRule(r'operator', r"(?:-eq|-ne|-gt|-lt|-ge|-le| = | != )"),
PatternRule(r'delimiter', r"[][\(\);\{\}|&><]"),
PatternRule(r'delimiter', r";;|[\[\]\(\);\{\}|&><]"),
RegionRule(r'eval', '`', StringGrammar, '`'),
#RegionRule(r'eval2', r'\$\(', None, r'\)'),
RegionRule(r'eval2', r'\$\(', StringGrammar, r'\)'),
@ -31,8 +31,40 @@ class ShGrammar(Grammar):
PatternRule(r'eol', r'\n$'),
]
class ShTabber(tab2.StackTabber):
def is_base(self, y):
if y == 0:
return True
highlighter = self.mode.window.buffer.highlights[self.mode.name()]
if not highlighter.tokens[y]:
return False
t = highlighter.tokens[y][0]
return t.name == 'function'
def _handle_close_token(self, currlvl, y, i):
s = self.get_token(y, i).string
if s == ')' and self.markers and self._peek_name() == "case":
# we have to ignore ) when used in "case" statements.
return currlvl
else:
return tab2.StackTabber._handle_close_token(self, currlvl, y, i)
def _handle_other_token(self, currlvl, y, i):
token = self.get_token(y, i)
fqname = token.fqname()
if token.name == 'continuation':
self._opt_append("cont", currlvl + 4)
elif token.name == 'eol':
self._opt_pop("cont")
return currlvl
class Sh(mode2.Fundamental):
grammar = ShGrammar()
grammar = ShGrammar
tabbercls = ShTabber
opentokens = ('delimiter', 'reserved',)
opentags = {'(': ')', '[': ']', '{': '}',
'do': 'done', 'then': 'fi', 'case': 'esac'}
closetokens = ('delimiter', 'reserved',)
closetags = {')': '(', ']': '[', '}': '{',
'done': 'do', 'fi': 'then', 'esac': 'case'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)
self.colors = {

View File

@ -1,39 +1,28 @@
import color, mode2
from lex2 import Grammar, PatternRule, NocasePatternRule, RegionRule
class StringGrammar(Grammar):
rules = [
PatternRule(
name=r'octal',
pattern=r'\\[0-7]{3}',
),
PatternRule(
name=r'escaped',
pattern=r'\\.',
),
]
from mode_python import StringGrammar
class SqlGrammar(Grammar):
rules = [
PatternRule(name=r'comment', pattern=r'--.*$'),
RegionRule(name=r'comment', start='/\*', grammar=Grammar(), end='\*/'),
PatternRule(name=r'delimiter', pattern=r'[();,\.:\$\[\]]'),
NocasePatternRule(name=r'attribute', pattern=r'(?:check|exists|unique|not null|default|primary key|minvalue|foreign key|references)(?![A-Za-z0-9_])'),
NocasePatternRule(name=r'operator', pattern=r'(?:case|when|then|else|end|not|and|or|is not|is|in|not in)(?![A-Za-z0-9_])'),
NocasePatternRule(name=r'keyword', pattern=r'(?:create database|create index|create sequence|create table|create trigger|create view|select|insert|update|delete|drop database|drop index|drop sequence|drop table|drop trigger|drop view|create user|alter user|drop user|drop function|grant|revoke|create function|create or replace function|create or replace view|create language|create operator|create type)(?![A-Za-z0-9_])'),
NocasePatternRule(name=r'pseudokeyword', pattern=r'(?:returns|language|right join|left join|inner join|outer join|join|where|null|true|false|into|values|as|from|order by|asc|desc|limit|distinct|cascade|using|on)(?![A-Za-z0-9_])'),
NocasePatternRule(name=r'type', pattern=r'(?:void|row|serial|varchar|float|integer|int|text|timestamptz|timestamp|datetz|date|timetz|time|boolean|bool)(?![A-Za-z0-9_])'),
PatternRule(name=r'function', pattern=r'(?:nextval|current_timestamp|current_time|current_date)(?![A-Za-z0-9_])'),
RegionRule(name=r'string', start="'", grammar=StringGrammar(), end="'"),
RegionRule(name=r'quoted', start='"', grammar=StringGrammar(), end='"'),
PatternRule(name=r'bareword', pattern=r'[A-Za-z0-9_]+'),
PatternRule(r'comment', r'--.*$'),
RegionRule(r'comment', '/\*', Grammar, '\*/'),
PatternRule(r'delimiter', r'[();,\.:\$\[\]]'),
NocasePatternRule(r'attribute', r'(?:check|exists|unique|not null|default|primary key|minvalue|foreign key|references)(?![A-Za-z0-9_])'),
NocasePatternRule(r'operator', r'(?:case|when|then|else|end|not|and|or|is not|is|in|not in)(?![A-Za-z0-9_])'),
NocasePatternRule(r'keyword', r'(?:create database|create index|create sequence|create table|create trigger|create view|select|insert|update|delete|drop database|drop index|drop sequence|drop table|drop trigger|drop view|create user|alter user|drop user|drop function|grant|revoke|create function|create or replace function|create or replace view|create language|create operator|create type)(?![A-Za-z0-9_])'),
NocasePatternRule(r'pseudokeyword', r'(?:returns|language|right join|left join|inner join|outer join|join|where|null|true|false|into|values|as|from|order by|asc|desc|limit|distinct|cascade|using|on)(?![A-Za-z0-9_])'),
NocasePatternRule(r'type', r'(?:void|row|serial|varchar|float|integer|int|text|timestamptz|timestamp|datetz|date|timetz|time|boolean|bool)(?![A-Za-z0-9_])'),
PatternRule(r'function', r'(?:nextval|current_timestamp|current_time|current_date)(?![A-Za-z0-9_])'),
RegionRule(r'string', "'", StringGrammar, "'"),
RegionRule(r'quoted', '"', StringGrammar, '"'),
PatternRule(r'bareword', r'[A-Za-z0-9_]+'),
]
class Sql(mode2.Fundamental):
grammar = SqlGrammar()
opentoken = 'delimiter'
grammar = SqlGrammar
opentokens = ('delimiter',)
opentags = {'(': ')', '[': ']', '{': '}'}
closetoken = 'delimiter'
closetokens = ('delimiter',)
closetags = {')': '(', ']': '[', '}': '{'}
def __init__(self, w):
mode2.Fundamental.__init__(self, w)

View File

@ -129,9 +129,9 @@ class StackTabber(Tabber):
token = self.get_token(y, i)
s = token.string
if token.name == self.mode.closetoken and s in self.mode.closetags:
if token.name in self.mode.closetokens and s in self.mode.closetags:
currlvl = self._handle_close_token(currlvl, y, i)
elif token.name == self.mode.opentoken and s in self.mode.opentags:
elif token.name in self.mode.opentokens and s in self.mode.opentags:
currlvl = self._handle_open_token(currlvl, y, i)
else:
currlvl = self._handle_other_token(currlvl, y, i)