parent
7fc86c3e9a
commit
ddd8828edd
2
lex.py
2
lex.py
|
@ -202,6 +202,8 @@ class PatternGroupRule(PatternRule):
|
|||
for (tokname, m) in matches:
|
||||
yield self.make_token(lexer, m.group(0), tokname, parent, m.groupdict())
|
||||
raise StopIteration
|
||||
class NocasePatternGroupRule(PatternGroupRule):
|
||||
reflags = re.IGNORECASE
|
||||
|
||||
class RegionRule(Rule):
|
||||
def __init__(self, name, *args):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import time
|
||||
import tab
|
||||
from mode import Fundamental
|
||||
from lex import Grammar, PatternRule, RegionRule, NocasePatternRule, NocaseRegionRule
|
||||
from lex import Grammar, PatternRule, RegionRule, NocasePatternRule, \
|
||||
NocaseRegionRule, NocasePatternGroupRule
|
||||
from mode.python import StringGrammar2
|
||||
from mode.pipe import Pipe
|
||||
from method.shell import Interact
|
||||
|
@ -18,34 +20,36 @@ class ForthGrammar(Grammar):
|
|||
PatternRule(r'comment', r"\\(?: .*)?\n$"),
|
||||
RegionRule(r'comment', r'\((?= |\n)', DataGrammar, r'\)'),
|
||||
NocaseRegionRule(r'comment', r'0 \[if\]', DataGrammar, r'\[(?:endif|then)\]'),
|
||||
PatternRule(r'delimiter', r"[:;\[\]]"),
|
||||
RegionRule(r'string', r'[.cs]" ', StringGrammar2, r'"'),
|
||||
RegionRule(r'string', r'[.s]\\" ', StringGrammar2, r'"'),
|
||||
RegionRule(r'string', r'\.\( ', StringGrammar3, r'\)'),
|
||||
|
||||
# builtin
|
||||
NocasePatternRule(r'forth_builtin', r'(?:true|false|on|off)(?= |\n|$)'),
|
||||
NocasePatternRule(r'builtin', r'(?:true|false|on|off)(?= |\n|$)'),
|
||||
|
||||
# math
|
||||
NocasePatternRule(r'forth_operator', r'(?:\+|-|\*/mod|\*/|\*|/mod|/|mod|negate|abs|min|max|and|or|xor|not|lshift|rshift|invert|2\*|2/|2\+|2-|1\+|1-|8\*|under\+|m\+|m\*/|m\*|um/mod|um\*|fm/mod|sm/rem|d\+|d-|dnegate|dabs|dmin|dmax|d2\*|d2/|f\+|f-|f\*\*|f\*|f/|fnegate|fabs|fmax|fmin|floor|fround|fsqrt|fexpm1|fexp|flnp1|fln|flog|falog|fsincos|fsinh|fsin|fcosh|fcos|ftanh|ftan|fasinh|fasin|facosh|facos|fatan2|fatanh|fatan|f2\*|f2/|1/f|f~rel|f~abs|f~|0<>|0<=|0<|0=|0>=|0>|<>|<=|<|>=|>|=|u<=|u<|u>=|u>|d0<=|d0<>|d0<|d0=|d0>=|d0>|d<=|d<>|d<|d=|d>=|d>|du<=|du<|du>=|du>|within|\?negate|\?dnegate)(?= |\n|$)'),
|
||||
# mathn
|
||||
NocasePatternRule(r'keyword', r'(?:\+|-|\*/mod|\*/|\*|/mod|/|mod|negate|abs|min|max|and|or|xor|not|lshift|rshift|invert|2\*|2/|2\+|2-|1\+|1-|8\*|under\+|m\+|m\*/|m\*|um/mod|um\*|fm/mod|sm/rem|d\+|d-|dnegate|dabs|dmin|dmax|d2\*|d2/|f\+|f-|f\*\*|f\*|f/|fnegate|fabs|fmax|fmin|floor|fround|fsqrt|fexpm1|fexp|flnp1|fln|flog|falog|fsincos|fsinh|fsin|fcosh|fcos|ftanh|ftan|fasinh|fasin|facosh|facos|fatan2|fatanh|fatan|f2\*|f2/|1/f|f~rel|f~abs|f~|0<>|0<=|0<|0=|0>=|0>|<>|<=|<|>=|>|=|u<=|u<|u>=|u>|d0<=|d0<>|d0<|d0=|d0>=|d0>|d<=|d<>|d<|d=|d>=|d>|du<=|du<|du>=|du>|within|\?negate|\?dnegate)(?= |\n|$)'),
|
||||
# stack
|
||||
NocasePatternRule(r'forth_operator', r'(?:drop|nip|dup|over|tuck|swap|rot|-rot|\?dup|pick|roll|2drop|2nip|2dup|2over|2tuck|2swap|2rot|2-rot|3dup|4dup|5dup|3drop|4drop|5drop|8drop|4swap|4rot|4-rot|4tuck|8swap|8dup|>r|r>|r@|rdrop|2>r|2r>|2r@|2rdrop|4>r|4r>|4r@|4rdrop|fdrop|fnip|fdup|fover|ftuck|fswap|frot)(?= |\n|$)'),
|
||||
NocasePatternRule(r'keyword', r'(?:drop|nip|dup|over|tuck|swap|rot|-rot|\?dup|pick|roll|2drop|2nip|2dup|2over|2tuck|2swap|2rot|2-rot|3dup|4dup|5dup|3drop|4drop|5drop|8drop|4swap|4rot|4-rot|4tuck|8swap|8dup|>r|r>|r@|rdrop|2>r|2r>|2r@|2rdrop|4>r|4r>|4r@|4rdrop|fdrop|fnip|fdup|fover|ftuck|fswap|frot)(?= |\n|$)'),
|
||||
# pointer
|
||||
NocasePatternRule(r'forth_operator', r'(?:forthsp|sp@|sp!|fp@|fp!|rp@|rp!|lp@|lp!)(?= |\n|$)'),
|
||||
NocasePatternRule(r'keyword', r'(?:forthsp|sp@|sp!|fp@|fp!|rp@|rp!|lp@|lp!)(?= |\n|$)'),
|
||||
# address
|
||||
NocasePatternRule(r'forth_operator', r'(?:@|!|\+!|c@|c!|2@|2!|f@|f!|sf@|sf!|df@|df!|chars|char\+|cells|cell\+|cell|align|aligned|floats|float\+|float|faligned|falign|sfloats|sfloat\+|sfaligned|sfalign|dfloats|dfloat\+|dfaligned|dfalign|maxaligned|maxalign|cfaligned|cfalign|address-unit-bits|allot|allocate|here|move|erase|cmove>|cmove|fill|blank)(?= |\n|$)'),
|
||||
# conditional
|
||||
NocasePatternRule(r'forth_keyword', r'(?:if|else|endif|then|case|of|endof|endcase|\?dup-if|\?dup-0=-if|ahead|cs-pick|cs-roll|catch|throw|within)(?= |\n|$)'),
|
||||
# iter
|
||||
NocasePatternRule(r'forth_operator', r'(?:begin|while|repeat|until|again|\?do|loop|i|j|k|\+do|u\+do|u-do|-do|do|\+loop|-loop|unloop|leave|\?leave|exit|done|for|next)(?= |\n|$)'),
|
||||
NocasePatternRule(r'keyword', r'(?:@|!|\+!|c@|c!|2@|2!|f@|f!|sf@|sf!|df@|df!|chars|char\+|cells|cell\+|cell|align|aligned|floats|float\+|float|faligned|falign|sfloats|sfloat\+|sfaligned|sfalign|dfloats|dfloat\+|dfaligned|dfalign|maxaligned|maxalign|cfaligned|cfalign|address-unit-bits|allot|allocate|here|move|erase|cmove>|cmove|fill|blank)(?= |\n|$)'),
|
||||
# conditional (*)
|
||||
NocasePatternRule(r'builtin', r'(?:if|else|endif|then|case|of|endof|endcase|\?dup-if|\?dup-0=-if|ahead|cs-pick|cs-roll|catch|throw|within)(?= |\n|$)'),
|
||||
# iter (*)
|
||||
NocasePatternRule(r'builtin', r'(?:begin|while|repeat|until|again|\?do|loop|i|j|k|\+do|u\+do|u-do|-do|do|\+loop|-loop|unloop|leave|\?leave|exit|done|for|next)(?= |\n|$)'),
|
||||
|
||||
# define
|
||||
NocasePatternRule(r'forth_keyword', r'(?:constant|2constant|fconstant|variable|2variable|fvariable|create|user|to|defer|is|does>|immediate|compile-only|compile|restrict|interpret|postpone|execute|literal|create-interpret/compile|interpretation>|<interpretation|compilation>|<compilation|\]|lastxt|comp\'|postpone|find-name|name>int|name\?int|name>comp|name>string|state|c;|cvariable|,|2,|f,|c,|\[(?:ifdef|ifundef|then|endif|then|else|\?do|do|loop|\+loop|next|begin|until|again|while|repeat|comp\'|\'|compile)\])(?= |\n|$)'),
|
||||
# assembly
|
||||
NocasePatternRule(r'forth_keyword', r'(?:assembler|code|end-code|;code|flush-icache|c,)(?= |\n|$)'),
|
||||
NocasePatternRule(r'builtin', r'(?:constant|2constant|fconstant|variable|2variable|fvariable|create|user|to|defer|is|does>|immediate|compile-only|compile|restrict|interpret|postpone|execute|literal|create-interpret/compile|interpretation>|<interpretation|compilation>|<compilation|\]|lastxt|comp\'|postpone|find-name|name>int|name\?int|name>comp|name>string|state|c;|cvariable|,|2,|f,|c,|\[(?:ifdef|ifundef|then|endif|then|else|\?do|do|loop|\+loop|next|begin|until|again|while|repeat|comp\'|\'|compile)\])(?= |\n|$)'),
|
||||
|
||||
# assembly (*)
|
||||
NocasePatternRule(r'builtin', r'(?:assembler|code|end-code|;code|flush-icache|c,)(?= |\n|$)'),
|
||||
|
||||
# xyz
|
||||
PatternRule(r'forth_def', r'(?<=:) +[^ ]+'),
|
||||
NocasePatternGroupRule(r'declaration', r'delimiter', r':',
|
||||
r'spaces', r' +', r'function', r'[^ ]+'),
|
||||
PatternRule(r'delimiter', r"[:;\[\]]"),
|
||||
NocasePatternRule(r'number', r"'[a-z](?= |$)"),
|
||||
NocasePatternRule(r'number', r'%?-?[0-1]+\.?(?= |$)'),
|
||||
NocasePatternRule(r'number', r'[&#]?-?[0-9]+\.?(?= |$)'),
|
||||
|
@ -87,18 +91,42 @@ class ForthPipe(Pipe):
|
|||
modename = 'forthpipe'
|
||||
grammar = ForthPipeGrammar
|
||||
|
||||
dopen = {}
|
||||
for s in ['if', 'case', 'of', '?dup-if', '?dup-0=-if', 'begin', 'while',
|
||||
'until', '?do', 'for', 'code']:
|
||||
dopen[s] = None
|
||||
dclose = {}
|
||||
for s in ['then', 'endif', 'again']:
|
||||
dclose[s] = None
|
||||
class ForthTabber(tab.StackTabber2):
|
||||
open_tokens = {'builtin': dopen, 'delimiter': {':': ';'}}
|
||||
close_tokens = {'builtin': dclose, 'delimiter': set([';'])}
|
||||
continue_tokens = {'nomatch': set()}
|
||||
fixed_indent = True
|
||||
def is_base(self, y):
|
||||
return y == 0
|
||||
def _is_indent(self, t):
|
||||
return t.name == 'spaces'
|
||||
def _is_ignored(self, t):
|
||||
return t.fqname() in ('spaces', 'eol', 'comment', 'comment.start',
|
||||
'comment.data', 'comment.null', 'comment.end')
|
||||
def _handle_other_token(self, y, tokens, start, end, i, t):
|
||||
tab.StackTabber2._handle_other_token(self, y, tokens, start, end, i, t)
|
||||
if t.match('builtin', 'else'):
|
||||
if len(self.stack) > 1:
|
||||
self.curr_level = self.stack[-2].level
|
||||
else:
|
||||
self.curr_level = 0
|
||||
|
||||
class Forth(Fundamental):
|
||||
modename = 'FORTH'
|
||||
extensions = ['.fs', '.fi', '.fb']
|
||||
grammar = ForthGrammar
|
||||
commentc = '\\'
|
||||
commentc = '\\ '
|
||||
actions = [GforthStart, GforthLoadFile]
|
||||
tabbercls = ForthTabber
|
||||
colors = {
|
||||
'forth_def': ('blue', 'default', 'bold'),
|
||||
'forth_word': ('yellow', 'default', 'bold'),
|
||||
'forth_keyword': ('magenta', 'default', 'bold'),
|
||||
'forth_operator': ('cyan', 'default', 'bold'),
|
||||
'forth_builtin': ('magenta', 'default', 'bold'),
|
||||
}
|
||||
|
||||
def install(*args):
|
||||
|
|
14
tab.py
14
tab.py
|
@ -241,7 +241,7 @@ class Marker2(object):
|
|||
|
||||
class StackTabber2(Tabber):
|
||||
open_tokens = {'delimiter': {'{': '}', '(': ')', '[': ']'}}
|
||||
close_tokens = {'delimiter': {'}': '{', ')': '(', ']': '['}}
|
||||
close_tokens = {'delimiter': set(['}', ')', ']'])}
|
||||
open_scope_tokens = {'delimiter': set(['{'])}
|
||||
close_scope_tokens = {'delimiter': set(['}'])}
|
||||
control_tokens = {'keyword': set(['if', 'else', 'while', 'do', 'for'])}
|
||||
|
@ -253,6 +253,7 @@ class StackTabber2(Tabber):
|
|||
end_free_tokens = {'string.end': 'string.start'}
|
||||
start_macro_tokens = {}
|
||||
end_macro_tokens = {}
|
||||
fixed_indent = False
|
||||
def __init__(self, m):
|
||||
self.mode = m
|
||||
self.name = m.name()
|
||||
|
@ -356,8 +357,7 @@ class StackTabber2(Tabber):
|
|||
return t.name in ('spaces', 'eol', 'comment')
|
||||
|
||||
def _is_close_token(self, t):
|
||||
return (t.name == 'delimiter' and
|
||||
t.string in self.close_tokens['delimiter'])
|
||||
return t.string in self.close_tokens.get(t.name, set())
|
||||
def _handle_close_token(self, y, tokens, start, end, i, t):
|
||||
if not self.stack:
|
||||
raise Exception, "unmatched %r, line %d" % (t.string, y)
|
||||
|
@ -366,7 +366,8 @@ class StackTabber2(Tabber):
|
|||
if marker.name in ('control', 'continue'):
|
||||
self.stack.pop()
|
||||
elif marker.name in self.open_tokens[marker.type_]:
|
||||
if self.open_tokens[marker.type_][marker.name] == t.string:
|
||||
s = self.open_tokens[marker.type_][marker.name]
|
||||
if s in (None, t.string):
|
||||
self.stack.pop()
|
||||
break
|
||||
else:
|
||||
|
@ -393,8 +394,7 @@ class StackTabber2(Tabber):
|
|||
#XYZYZYXYXY
|
||||
|
||||
def _is_open_token(self, t):
|
||||
return (t.name == 'delimiter' and
|
||||
t.string in self.open_tokens['delimiter'])
|
||||
return t.string in self.open_tokens.get(t.name, set())
|
||||
def _handle_open_token(self, y, tokens, start, end, i, t):
|
||||
if i == 0 and self.stack and self.stack[-1].name == 'continue':
|
||||
self.stack.pop()
|
||||
|
@ -403,7 +403,7 @@ class StackTabber2(Tabber):
|
|||
if i == 0 and t.string in self.open_scope_tokens.get(t.name, set()):
|
||||
self._save_curr_level()
|
||||
|
||||
if i == end - start:
|
||||
if i == end - start or self.fixed_indent:
|
||||
level = self._get_next_level()
|
||||
else:
|
||||
level = tokens[i + 1].x + 1
|
||||
|
|
Loading…
Reference in New Issue