parent
533e7b53b7
commit
350d9b6cc5
4
lex.py
4
lex.py
|
@ -23,6 +23,8 @@ class Token(object):
|
|||
#self._fqname = self.mkfqname()
|
||||
assert parent is None or hasattr(parent, 'name'), 'oh no %r' % parent
|
||||
|
||||
def isa(self, *names):
|
||||
return self.name in names
|
||||
def match(self, name, string):
|
||||
return self.name == name and self.string == string
|
||||
def matchs(self, name, strings):
|
||||
|
@ -33,6 +35,8 @@ class Token(object):
|
|||
return True
|
||||
return False
|
||||
|
||||
def fqisa(self, *names):
|
||||
return self.fqname() in names
|
||||
def fqmatch(self, name, string):
|
||||
return self.fqname() == name and self.string == string
|
||||
def fqmatchs(self, name, strings):
|
||||
|
|
|
@ -145,7 +145,8 @@ class Interact(Method):
|
|||
cmd = vargs['cmd']
|
||||
a = w.application
|
||||
a.close_buffer_by_name(bname)
|
||||
b = XTermBuffer(a, 'bash', ['-c', cmd], name=bname, modename=self.modename)
|
||||
b = XTermBuffer(a, 'bash', ['-c', cmd], name=bname,
|
||||
modename=self.modename)
|
||||
a.add_buffer(b)
|
||||
Window(b, a)
|
||||
if a.window().buffer is not b:
|
||||
|
|
51
mode/c.py
51
mode/c.py
|
@ -36,32 +36,32 @@ class MacroGrammar(Grammar):
|
|||
|
||||
class CGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule('c.spaces', r' +'),
|
||||
PatternRule('spaces', r' +'),
|
||||
|
||||
PatternMatchRule('x', r'(\()( *)(' + word + r')(\**)( *)(\))( *)(?=[a-zA-Z0-9_\(])',
|
||||
'c.delimiter', 'c.spaces', 'c.type', 'c.operator',
|
||||
'c.spaces', 'c.delimiter', 'c.spaces'),
|
||||
'delimiter', 'spaces', 'c.type', 'c.operator',
|
||||
'spaces', 'delimiter', 'spaces'),
|
||||
|
||||
PatternRule(r"c.delimiter", r"\.|\(|\)|\[|\]|{|}|@|,|:|`|;|=(?!=)|\?|->"),
|
||||
PatternRule('c.eol', r"\n$"),
|
||||
PatternRule("delimiter", r"\.|\(|\)|\[|\]|{|}|@|,|:|`|;|=(?!=)|\?|->"),
|
||||
PatternRule('eol', r"\n$"),
|
||||
PatternMatchRule('x', r'(struct|enum|union)( +)(' + word + ')',
|
||||
'c.builtin', 'c.spaces', 'c.type'),
|
||||
'c.builtin', 'spaces', 'c.type'),
|
||||
|
||||
PatternRule('c.builtin', r"(?:break|case|continue|default|do|else|for|goto|if|return|sizeof|switch|while)(?!" + chr2 + ")"),
|
||||
PatternRule('c.builtin', r"(?:signed|register|extern|const|static|enum|struct|typedef|union|unsigned|volatile)(?!" + chr2 + ")"),
|
||||
PatternRule('c.type', r"(?:auto|char|double|float|int|long|short|void|volatile)(?!" + chr2 + ")"),
|
||||
|
||||
PatternMatchRule('x', '(' + word + ')( +)(\**)(' + word + ')( *)(\()',
|
||||
'c.type', 'c.spaces', 'c.operator', 'c.function',
|
||||
'c.spaces', 'c.delimiter'),
|
||||
'c.type', 'spaces', 'c.operator', 'c.function',
|
||||
'spaces', 'delimiter'),
|
||||
PatternMatchRule('x', '(' + word + ')(\*+)( +)(' + word + ')( *)(\()',
|
||||
'c.type', 'c.operator', 'c.spaces', 'c.function',
|
||||
'c.spaces', 'c.delimiter'),
|
||||
'c.type', 'c.operator', 'spaces', 'c.function',
|
||||
'spaces', 'delimiter'),
|
||||
|
||||
PatternMatchRule('x', '(' + word + ')( +)(\**)(' + word + ')',
|
||||
'c.type', 'c.spaces', 'c.operator', 'c.identifier'),
|
||||
'c.type', 'spaces', 'c.operator', 'c.identifier'),
|
||||
PatternMatchRule('x', '(' + word + ')(\*+)( +)(' + word + ')',
|
||||
'c.type', 'c.operator', 'c.spaces', 'c.identifier'),
|
||||
'c.type', 'c.operator', 'spaces', 'c.identifier'),
|
||||
|
||||
|
||||
PatternRule('c.function', word + r'(?= *\()'),
|
||||
|
@ -81,7 +81,7 @@ class CGrammar(Grammar):
|
|||
RegionRule('c.macrocomment', '#if +(?:0|NULL|FALSE)', Grammar, '#endif'),
|
||||
PatternRule('c.char', r"'.'|'\\.'|'\\[0-7]{3}'"),
|
||||
PatternMatchRule('x', r'(# *include)( +)(.+)(\n|$)',
|
||||
'c.macro.start', 'c.spaces', 'c.header', 'c.macro.end'),
|
||||
'c.macro.start', 'spaces', 'c.header', 'c.macro.end'),
|
||||
PatternRule('c.identifier', word),
|
||||
OverridePatternRule('c.comment', r'/\* *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *\*/$'),
|
||||
OverridePatternRule('c.comment', r'// *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
|
||||
|
@ -92,14 +92,14 @@ MacroGrammar.rules.extend(CGrammar.rules)
|
|||
class CTabber2(tab.StackTabber2):
|
||||
open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
|
||||
close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
|
||||
control_tokens = {'keyword': {'if': 1, 'else': 1, 'while': 1, 'do': 1, 'for': 1}}
|
||||
control_tokens = {'c.keyword': {'if': 1, 'else': 1, 'while': 1, 'do': 1, 'for': 1}}
|
||||
end_at_eof = False
|
||||
end_at_tokens = {'delimiter': {';': 1}}
|
||||
nocontinue_tokens = {'delimiter': {';': 1, ',': 1}}
|
||||
start_free_tokens = {'string.start': 'string.end'}
|
||||
end_free_tokens = {'string.end': 'string.start'}
|
||||
start_macro_tokens = {'macro.start': 'macro.end'}
|
||||
end_macro_tokens = {'macro.end': 'macro.start'}
|
||||
start_free_tokens = {'c.string.start': 'c.string.end'}
|
||||
end_free_tokens = {'c.string.end': 'c.string.start'}
|
||||
start_macro_tokens = {'c.macro.start': 'c.macro.end'}
|
||||
end_macro_tokens = {'c.macro.end': 'c.macro.start'}
|
||||
def is_base(self, y):
|
||||
if y == 0:
|
||||
return True
|
||||
|
@ -108,7 +108,7 @@ class CTabber2(tab.StackTabber2):
|
|||
# this assumes that people aren't gonna use these macros inside of
|
||||
# blocks, which is probably ok.
|
||||
t = tokens[0]
|
||||
if t.fqname() == 'macro.start' and t.string in ('#define', '#include'):
|
||||
if t.fqmatchs('c.macro.start', '#define', '#include'):
|
||||
return True
|
||||
|
||||
# detecting function declarations is annoying; this assumes that people
|
||||
|
@ -119,10 +119,10 @@ class CTabber2(tab.StackTabber2):
|
|||
# four types of tokens below
|
||||
decl = False
|
||||
for t in tokens:
|
||||
if t.name in ('keyword', 'identifier', 'structname', 'enumname'):
|
||||
if t.isa('c.keyword', 'c.identifier', 'c.type'):
|
||||
decl = True
|
||||
continue
|
||||
if decl and t.name == 'function':
|
||||
if decl and t.name == 'c.function':
|
||||
break
|
||||
else:
|
||||
decl = False
|
||||
|
@ -131,8 +131,11 @@ class CTabber2(tab.StackTabber2):
|
|||
def _is_indent(self, t):
|
||||
return t.name == 'spaces'
|
||||
def _is_ignored(self, t):
|
||||
return t.fqname() in ('spaces', 'eol', 'comment', 'comment.start',
|
||||
'comment.data', 'comment.null', 'comment.end')
|
||||
return t.fqisa('spaces', 'eol', 'c.comment', 'c.comment.start',
|
||||
'c.comment.data', 'c.comment.null', 'c.comment.end')
|
||||
#return t.fqname() in ('spaces', 'eol', 'c.comment', 'c.comment.start',
|
||||
# 'c.comment.data', 'c.comment.null',
|
||||
# 'c.comment.end')
|
||||
|
||||
class CCheckSyntax(Exec):
|
||||
'''Build this C program (using the mode's make cmd)'''
|
||||
|
@ -170,7 +173,7 @@ class C(Fundamental):
|
|||
closetokens = ('delimiter',)
|
||||
closetags = {')': '(', ']': '[', '}': '{'}
|
||||
actions = [CCheckSyntax, CMake]
|
||||
format = "%(flag)s %(bname)-18s (%(mname)s) %(indent)s %(cursor)s/%(mark)s %(perc)s [%(func)s]"
|
||||
format = "%(flag)s %(bname)s (%(mname)s) %(indent)s %(cursor)s %(perc)s [%(func)s]"
|
||||
commentc = '//'
|
||||
|
||||
colors = {
|
||||
|
|
|
@ -70,13 +70,6 @@ class GforthLoadFile(Interact):
|
|||
path = w.buffer.path
|
||||
b.pipe_write('s" ' + path + '" included\n')
|
||||
|
||||
class CodePipeGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule('comment', r"\\(?: .*)?\n$"),
|
||||
PatternRule('addr', r'\$[0-9A-F]+'),
|
||||
PatternRule('addr2', r'\[[a-z]+\]'),
|
||||
]
|
||||
|
||||
br = RegionRule('banner', r'^Gforth \d+\.\d+\.\d+', LineGrammar, r"^Type `bye' to exit\n$")
|
||||
class ForthPipeGrammar(Grammar):
|
||||
rules = [br] + ForthGrammar.rules
|
||||
|
|
65
mode/lua.py
65
mode/lua.py
|
@ -1,8 +1,12 @@
|
|||
import commands
|
||||
import time
|
||||
from tab import StackTabber
|
||||
from method import Method
|
||||
from mode import Fundamental
|
||||
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule
|
||||
from mode.python import StringGrammar1, StringGrammar2
|
||||
from mode.pipe import Pipe
|
||||
from method.shell import Interact
|
||||
|
||||
chr1 = '[a-zA-Z_]'
|
||||
chr2 = '[a-zA-Z_0-9]'
|
||||
|
@ -10,22 +14,25 @@ word = chr1 + chr2 + '*'
|
|||
|
||||
class LuaGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'comment', r'--.*$'),
|
||||
PatternRule(r'spaces', r' +'),
|
||||
PatternRule(r'eol', r'\n'),
|
||||
PatternRule('comment', r'--.*$'),
|
||||
PatternRule('spaces', r' +'),
|
||||
PatternRule('eol', r'\n'),
|
||||
|
||||
RegionRule(r'string', r"'", StringGrammar1, r"'"),
|
||||
RegionRule(r'string', r'"', StringGrammar2, r'"'),
|
||||
RegionRule('lua.string', r"'", StringGrammar1, r"'"),
|
||||
RegionRule('lua.string', r'"', StringGrammar2, r'"'),
|
||||
|
||||
PatternRule(r'keyword', r'(?:while|until|true|then|return|repeat|or|not|nil|local|in|if|function|for|false|end|elseif|else|do|break|and)(?!'+chr2+')'),
|
||||
PatternMatchRule('x', '(function)( +)('+word+')', 'keyword', 'spaces', 'function'),
|
||||
PatternRule(r'lua_identifier', word),
|
||||
PatternMatchRule('x', '(function)( +)('+word+')',
|
||||
'lua.keyword', 'spaces', 'lua.function'),
|
||||
PatternRule('lua.keyword', '(?:while|until|true|then|return|repeat|or|not|nil|local|in|if|function|for|false|end|elseif|else|done|do|break|and)(?!'+chr2+')'),
|
||||
PatternRule('lua.internal', '_[A-Z]+'),
|
||||
PatternRule('lua.identifier', word),
|
||||
|
||||
PatternRule(r'delimiter', r'(?:[=(){}\[\];:,.])'),
|
||||
PatternRule(r'operator', r'(?:\.\.\.|\.\.|==|~=|<=|>=|<|>)'),
|
||||
PatternRule('delimiter', r'(?:[=(){}\[\];:,.])'),
|
||||
PatternRule('lua.operator', r'(?:\.\.\.|\.\.|==|~=|<=|>=|<|>)'),
|
||||
PatternRule('lua.operator', r'(?:\+|-|/|\*|%|\^)'),
|
||||
|
||||
PatternRule(r"integer", r"(?<![\.0-9a-zA-Z_])(?:0|-?[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"),
|
||||
PatternRule(r"float", r"(?<![\.0-9a-zA-Z_])(?:-?[0-9]+\.[0-9]*|-?\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|-?\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"),
|
||||
PatternRule('lua.integer', r"(?<![\.0-9a-zA-Z_])(?:0|-?[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"),
|
||||
PatternRule('lua.float', r"(?<![\.0-9a-zA-Z_])(?:-?[0-9]+\.[0-9]*|-?\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|-?\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"),
|
||||
]
|
||||
|
||||
class LuaCheckSyntax(Method):
|
||||
|
@ -40,10 +47,32 @@ class LuaCheckSyntax(Method):
|
|||
else:
|
||||
app.data_buffer("*Lua-Check-Syntax*", output)
|
||||
|
||||
class LuaStart(Interact):
|
||||
args = []
|
||||
modename = 'luapipe'
|
||||
def _execute(self, w, **vargs):
|
||||
a = w.application
|
||||
if a.has_buffer_name('*Lua*'):
|
||||
a.switch_buffer(a.get_buffer_by_name('*Lua*'))
|
||||
else:
|
||||
Interact._execute(self, w, bname='*Lua*', cmd='lua')
|
||||
class LuaLoadFile(Interact):
|
||||
args = []
|
||||
modename = 'luapipe'
|
||||
def _execute(self, w, **vargs):
|
||||
a = w.application
|
||||
if a.has_buffer_name('*Lua*'):
|
||||
b = a.get_buffer_by_name('*Lua*')
|
||||
a.switch_buffer(b)
|
||||
else:
|
||||
Interact._execute(self, w, bname='*Lua*', cmd='lua')
|
||||
b = a.get_buffer_by_name('*Lua*')
|
||||
path = w.buffer.path
|
||||
b.pipe_write('dofile("' + path + '");\n')
|
||||
|
||||
class Lua(Fundamental):
|
||||
name = 'Lua'
|
||||
extensions = ['.lua']
|
||||
#tabbercls = mode.lisp.LispTabber
|
||||
grammar = LuaGrammar
|
||||
commentc = '--'
|
||||
opentokens = ('delimiter',)
|
||||
|
@ -51,7 +80,7 @@ class Lua(Fundamental):
|
|||
closetokens = ('delimiter',)
|
||||
closetags = {')': '(', ']': '[', '}': '{'}
|
||||
colors = {}
|
||||
actions = [LuaCheckSyntax]
|
||||
actions = [LuaCheckSyntax, LuaStart, LuaLoadFile]
|
||||
_bindings = {
|
||||
'close-paren': (')',),
|
||||
'close-brace': ('}',),
|
||||
|
@ -59,4 +88,10 @@ class Lua(Fundamental):
|
|||
'lua-check-syntax': ('C-c s',),
|
||||
}
|
||||
|
||||
install = Lua.install
|
||||
class LuaPipe(Pipe):
|
||||
name = 'luapipe'
|
||||
grammar = LuaGrammar
|
||||
|
||||
def install(*args):
|
||||
Lua.install(*args)
|
||||
LuaPipe.install(*args)
|
||||
|
|
Loading…
Reference in New Issue