branch : pmacs2
This commit is contained in:
moculus 2009-03-30 00:50:27 +00:00
parent fe11590c64
commit ef6c9c92e2
20 changed files with 330 additions and 264 deletions

View File

@ -6,6 +6,9 @@ class DirBuffer(Buffer):
def __init__(self, path, name=None):
Buffer.__init__(self)
self.path = os.path.realpath(path)
self.settings['hide-dot'] = True
self.settings['type-sort'] = False
def changed(self):
return False
def readonly(self):
@ -31,6 +34,9 @@ class DirBuffer(Buffer):
fieldlines = []
maxlens = [0] * 5
for name in names:
if self.settings.get('hide-dot'):
if name.startswith('.') and name not in ('.', '..'):
continue
path = self._make_path(name)
fields = dirutil.path_fields(path, name)
for i in range(0, 5):
@ -39,8 +45,11 @@ class DirBuffer(Buffer):
except:
raise Exception, '%d %r' % (i, fields[i])
fieldlines.append(fields)
fieldlines.sort(cmp=dirutil.path_sort)
if self.settings.get('type-sort'):
fieldlines.sort(cmp=dirutil.path_sort)
else:
fieldlines.sort(cmp=dirutil.path_sort2)
fmt = '%%%ds %%-%ds %%-%ds %%%ds %%%ds %%s' % tuple(maxlens)
lines = []

View File

@ -36,6 +36,11 @@ property, namely the targets of all instructions that alter control
flow (ie, branch instructions). To do this they use a combination of
static analysis and dynamic checks.
.. code-block:: Perl
sub foo {
return map { "$_\n" } @_;
}
.. code-block:: Python
def foo(a, b):
def bar(a, b, c):

View File

@ -34,6 +34,10 @@ BEGIN {
split("abcdefghijklmnopqrstuvwxyz",Lev5,"")
}
function foo(i, j) {
return i * j;
}
/^\*/ {
this_len = match($0,/\*([^*]|$)/); # get number of stars in 1st field
array[this_len]++; # increment index of current leaf

View File

@ -60,12 +60,19 @@ def valid_group(group):
def path_sort(a, b):
try:
x = cmp(a[0][0], b[0][0])
if x != 0:
return -x
if x != 0: return -x
return cmp(a[5], b[5])
except:
raise Exception, repr(a) + ' ' + repr(b)
def path_sort2(a, b):
try:
x = cmp(a[5], b[5])
if x != 0: return x
return cmp(b[0][0], a[0][0])
except:
raise Exception, repr(a) + ' ' + repr(b)
def path_fields(path, name):
# let's escape some troublesome characters
name = re.sub(r'([\a\b\n\r\t\v])', r'\\\1', name)

4
lex.py
View File

@ -436,7 +436,9 @@ class OverrideRegionRule(RegionRule):
name = d['grammar'].lower()
if name in a.modes:
modecls = a.modes[name]
mode = modecls(FakeWindow(lexer.mode.window.application))
b = lexer.mode.window.buffer
fw = FakeWindow(lexer.mode.window.application, b)
mode = modecls(fw)
assert hasattr(mode, 'grammar') and hasattr(mode, 'colors')
if parent is None:

View File

@ -1,12 +1,17 @@
import commands, os
import os.path
from subprocess import Popen, PIPE, STDOUT
import color, default, mode, tab
from lex import Grammar, PatternRule, RegionRule
from mode import Fundamental
import default
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule
from mode.python import StringGrammar2
from tab import StackTabber2
from method import Method, Argument, arg
from method.shell import Exec, Pipe
chr1 = '[a-zA-Z_]'
chr2 = '[a-zA-Z0-9_]'
word = chr1 + chr2 + '*'
class RegexGrammar(Grammar):
rules = [
PatternRule(r'escaped', r'\\.'),
@ -18,10 +23,14 @@ class AwkGrammar(Grammar):
PatternRule(r'comment', r'#.*\n$'),
PatternRule(r'spaces', r' +'),
RegionRule(r'awk_regex', r'/(?! )', RegexGrammar, r'/'),
PatternRule(r'awk_global', r'(?:TEXTDOMAIN|SUBSEP|RLENGTH|RSTART|RT|RS|PROCINFO|ORS|OFS|OFMT|NR|NF|LINT|IGNORECASE|FS|FNR|FILENAME|FIELDWIDTHS|ERRNO|ENVIRON|CONVFMT|BINMODE|ARGV|ARGIND|ARGC)(?![a-zA-Z0-9_])'),
PatternMatchRule('x', r'(function)( +)(' + word + ')',
'keyword', 'spaces', r'function'),
PatternRule(r'awk_global', r'(?:TEXTDOMAIN|SUBSEP|RLENGTH|RSTART|RT|RS|PROCINFO|ORS|OFS|OFMT|NR|NF|LINT|IGNORECASE|FS|FNR|FILENAME|FIELDWIDTHS|ERRNO|ENVIRON|CONVFMT|BINMODE|ARGV|ARGIND|ARGC)(?!' + chr2 + ')'),
PatternRule(r'delimiter', r'(?:[\{\}()\[\]?:;,]|=(?!=)|\+=|-=|\*=|/=|\%=|\^=)'),
PatternRule(r'keyword', r'(?:BEGIN|END|if|else|while|do|for|break|continue|delete|exit)(?![a-zA-Z0-9_])'),
PatternRule(r'builtin', r'(?:close|getline|nextfile|next|printf|print|system|fflush|atan2|cos|exp|int|log|rand|sin|sqrt|srand|asorti|asort|gensub|gsub|index|length|match|split|sprintf|strtonum|substr|sub|tolower|toupper|mktime|strftime|systime|and|compl|lshift|or|xor|rshift|bindtextdomain|dcgettext|dcngettext|function|extension)(?![a-zA-Z0-9_])'),
PatternRule(r'keyword', r'(?:BEGIN|END|function|if|else|while|do|for|break|continue|delete|exit)(?!' + chr2 + ')'),
PatternRule(r'builtin', r'(?:return|close|getline|nextfile|next|printf|print|system|fflush|atan2|cos|exp|int|log|rand|sin|sqrt|srand|asorti|asort|gensub|gsub|index|length|match|split|sprintf|strtonum|substr|sub|tolower|toupper|mktime|strftime|systime|and|compl|lshift|or|xor|rshift|bindtextdomain|dcgettext|dcngettext|extension)(?!' + chr2 + ')'),
PatternRule(r'awk_field', r'\$\d*'),
@ -30,11 +39,12 @@ class AwkGrammar(Grammar):
PatternRule(r'number', r'-?[0-9]+\.?[0-9]*'),
PatternRule(r'number', r'-?\.[0-9]+'),
PatternRule(r'unop', r'!(?![=~])|--|\+\+'),
PatternRule(r'binop', r'(?:&&|\|\||<=|>=|!=|!~|==|\^|%|[-~/+*<>])'),
PatternRule(r'operator', r'!(?![=~])|--|\+\+'),
PatternRule(r'operator', r'(?:&&|\|\||<=|>=|!=|!~|==|\^|%|[-~/+*<>])'),
RegionRule(r'string', r'"', StringGrammar2, r'"'),
PatternRule(r'awk_function', r'[a-zA-Z_][a-zA-Z0-9_]*(?=\()'),
PatternRule(r'awk_identifier', r'[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(r'awk_function', word + '(?=\()'),
PatternRule(r'awk_identifier', word),
PatternRule(r'continuation', r'\\\n$'),
PatternRule(r'eol', r'\n'),
@ -49,26 +59,20 @@ class AwkTabber(StackTabber2):
end_at_eof = True
end_at_tokens = {}
def _is_base(self, y):
if y == 0:
return True
if y == 0: return True
t = self._get_tokens(y)[0]
if t.fqname() == 'awk_regex.start':
return True
elif t.name in ('awk_field', 'awk_global'):
return True
elif t.name == 'keyword' and t.string in ('BEGIN', 'END'):
return True
else:
return False
def _is_indent(self, t):
return t.name == 'spaces'
def _is_ignored(self, t):
return t.name in ('spaces', 'eol', 'comment')
if t.fqname() == 'awk_regex.start': return True
if t.name in ('awk_field', 'awk_global'): return True
if t.name == 'keyword' and t.string in ('BEGIN', 'END'): return True
return False
def _is_indent(self, t): return t.name == 'spaces'
def _is_ignored(self, t): return t.name in ('spaces', 'eol', 'comment')
class AwkFilterFile(Exec):
'''Filter a file through the current buffer's AWK program'''
show_success = True
args = [arg('path', dt="path", p="Filter File: ", dv=default.path_dirname, ld=True, h="file to open")]
args = [arg('path', dt="path", p="Filter File: ", dv=default.path_dirname,
ld=True, h="file to open")]
def _execute(self, w, **vargs):
if not hasattr(w.buffer, 'path'):
w.set_error("Buffer %r has no program" % w.buffer.name())
@ -133,7 +137,7 @@ class AwkFilterInput(Method):
w.application.data_buffer('*Awk-Output*', output, switch_to=True)
w.set_error("awk exited with status %d" % status)
class Awk(mode.Fundamental):
class Awk(Fundamental):
name = 'awk'
tabbercls = AwkTabber
extensions = ['.awk']
@ -148,7 +152,7 @@ class Awk(mode.Fundamental):
'awk_global': ('yellow', 'default', 'bold'),
'awk_function': ('magenta', 'default', 'bold'),
'awk_field': ('yellow', 'default', 'bold'),
'awk_identifier': ('yellow', 'default', 'bold'),
#'awk_identifier': ('yellow', 'default', 'bold'),
'awk_regex.start': ('cyan', 'default', 'bold'),
'awk_regex.null': ('cyan', 'default', 'bold'),
'awk_regex.data': ('cyan', 'default', 'bold'),

View File

@ -1,28 +1,36 @@
import color, mode
from mode import Fundamental
from lex import Grammar, PatternRule, RegionRule
class StringGrammar(Grammar):
class StringGrammar1(Grammar):
rules = [
PatternRule(r'data', r"[^\\']+"),
PatternRule(r'escaped', r'\\.'),
]
class StringGrammar2(Grammar):
rules = [
PatternRule(r'data', r'[^\\"]+'),
PatternRule(r'escaped', r'\\.'),
]
tbbid = r'[A-Z]{2}\d{3}-\d{3}-\d{3}'
class BDSGrammar(Grammar):
rules = [
PatternRule(r'comment', r'#.*$'),
PatternRule(r'bds_section', r'section(?= *\()'),
PatternRule(r'bds_component', r"[A-Z]{2}\d{3}-\d{3}-\d{3}(?= *\()"),
PatternRule(r'bds_component', tbbid + r"(?= *\()"),
PatternRule(r'bds_attr_name', r"[a-z_]+(?==)"),
RegionRule(r'bds_attr_value', r'(?<=)"', StringGrammar, r'"'),
RegionRule(r'bds_attr_value', r"(?<=)'", StringGrammar, r"'"),
RegionRule(r'bds_attr_value', r"(?<=)'", StringGrammar1, r"'"),
RegionRule(r'bds_attr_value', r'(?<=)"', StringGrammar2, r'"'),
PatternRule(r'bds_num', r"-?(?:[0-9][0-9\.]*[0-9]|[0-9])"),
PatternRule(r'bds_alias', r"[a-z0-9]+(?=:[A-Z]{2}\d{3}-\d{3}-\d{3})"),
PatternRule(r'bds_id', r"[A-Z]{2}\d{3}-\d{3}-\d{3}"),
PatternRule(r'bds_alias', r"[a-z0-9]+(?=:" + tbbid + ")"),
PatternRule(r'bds_id', tbbid),
PatternRule(r'bds_func', r"[a-zA-Z0-9_]+(?= *\()"),
RegionRule(r'bds_string', r'"', StringGrammar, r'"'),
RegionRule(r'bds_string', r"'", StringGrammar, r"'"),
RegionRule(r'bds_string', r"'", StringGrammar1, r"'"),
RegionRule(r'bds_string', r'"', StringGrammar2, r'"'),
]
class BDS(mode.Fundamental):
class BDS(Fundamental):
name = 'bds'
extensions = ['.bds']
grammar = BDSGrammar

View File

@ -1,20 +1,17 @@
import color, mode
from point import Point
from mode import Fundamental
from lex import Grammar, PatternRule, RegionRule
class MetadataGrammar(Grammar):
rules = [
PatternRule(r'username', r'[a-zA-Z0-9_]+'),
]
rules = [PatternRule(r'username', r'[a-zA-Z0-9_]+')]
class BlameGrammar(Grammar):
rules = [
RegionRule(r'metadata', r'^[0-9\.]+', MetadataGrammar, r'(?:[0-9]{4}-[0-9]{2}-[0-9]{2}|[0-9]{2}-[A-Z][a-z]{2}-[0-9]{2})'),
RegionRule(r'metadata', r'^[0-9\.]+', MetadataGrammar,
r'(?:\d{4}-\d{2}-\d{2}|\d{2}-[A-Z][a-z]{2}-\d{2})'),
PatternRule(r'data', r'.+$'),
]
class Blame(mode.Fundamental):
class Blame(Fundamental):
name = 'Blame'
grammar = BlameGrammar
colors = {

104
mode/c.py
View File

@ -1,6 +1,8 @@
import os, re
import os.path
from subprocess import Popen, PIPE, STDOUT
import color, default, method, method.shell, mode, tab
from method.shell import Exec
from mode import Fundamental
import tab
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule, OverridePatternRule
from mode.python import StringGrammar2
@ -15,55 +17,61 @@ class ErrorGrammar(Grammar):
PatternRule('continuation', r'\\\n$'),
]
chr1 = '[a-zA-Z_]'
chr2 = '[a-zA-Z0-9_]'
word = chr1 + chr2 + '*'
class MacroGrammar(Grammar):
rules = [
PatternRule('continuation', r'\\\n$'),
PatternRule('name', r'(?<=#define ) *[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule('name', r'(?<=#ifdef ) *[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule('name', r'(?<=#ifndef ) *[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule('name', r'(?<=#undef ) *[a-zA-Z_][a-zA-Z0-9_]*'),
PatternRule(r'concat', r'##[a-zA-Z0-9_]+'),
PatternRule(r'quoted', r'#[a-zA-Z0-9_]+'),
PatternMatchRule(r'xyz', r'(defined)(\()([a-zA-Z_][a-zA-Z0-9_]*)(\))',
r'function', r'delimiter', r'name', r'delimiter'),
PatternRule('name', '(?<=#define ) *' + word),
PatternRule('name', '(?<=#ifdef ) *' + word),
PatternRule('name', '(?<=#ifndef ) *' + word),
PatternRule('name', '(?<=#undef ) *' + word),
PatternRule('concat', '##' + chr2 + '+'),
PatternRule('quoted', '#' + chr2 + '+'),
PatternMatchRule('x', r'(defined)(\()(' + word + r')(\))',
'function', 'delimiter', 'name', 'delimiter'),
]
class CGrammar(Grammar):
rules = [
PatternRule(r'spaces', r' +'),
PatternRule('spaces', r' +'),
PatternRule(r"delimiter", r"\.|\(|\)|\[|\]|{|}|@|,|:|`|;|=(?!=)|\?|->"),
PatternRule(r'eol', r"\n$"),
PatternMatchRule(r'x', r'(struct|enum|union)( +)([a-zA-Z_][a-zA-Z0-9_]*)',
r'builtin', r'spaces', r'type'),
PatternRule('eol', r"\n$"),
PatternMatchRule('x', r'(struct|enum|union)( +)(' + word + ')',
'builtin', 'spaces', 'type'),
PatternRule(r'builtin', r"(?:break|case|continue|default|do|else|for|goto|if|return|sizeof|switch|while)(?![a-zA-Z_])"),
PatternRule(r'builtin', r"(?:signed|register|extern|const|static|enum|struct|typedef|union|unsigned|volatile)(?![a-zA-Z_])"),
PatternRule(r'type', r"(?:auto|char|double|float|int|long|short|void|volatile)(?![a-zA-Z_])"),
PatternRule('builtin', r"(?:break|case|continue|default|do|else|for|goto|if|return|sizeof|switch|while)(?!" + chr2 + ")"),
PatternRule('builtin', r"(?:signed|register|extern|const|static|enum|struct|typedef|union|unsigned|volatile)(?!" + chr2 + ")"),
PatternRule('type', r"(?:auto|char|double|float|int|long|short|void|volatile)(?!" + chr2 + ")"),
PatternMatchRule(r'x', r'([a-zA-Z_][a-zA-Z0-9_]*)(\**)( +)(\**)([a-zA-Z_][a-zA-Z0-9_]*)',
r'type', r'spaces', r'binop', r'spaces', r'identifier'),
PatternMatchRule('x', '(' + word + ')( +)(\**)(' + word + ')',
'type', 'spaces', 'binop', 'identifier'),
PatternMatchRule('x', '(' + word + ')(\*+)( +)(' + word + ')',
'type', 'binop', 'spaces', 'identifier'),
PatternRule(r'function', r'[a-zA-Z_][a-zA-Z0-9_]*(?= *\()'),
PatternRule(r'constant', r"[A-Z_][A-Z0-9_]+(?![a-zA-Z0-9_])"),
PatternRule(r'label', r'[a-zA-Z_][a-zA-Z0-9_]*(?=:)'),
RegionRule(r'error', r'# *error', ErrorGrammar, r'\n$'),
RegionRule(r'macro', r'# *(?:assert|cpu|define|elif|else|endif|error|ident|ifdef|ifndef|if|import|include_next|line|machine|pragma_once|pragma|system|unassert|undef|warning)(?!=[a-zA-Z0-9_])', MacroGrammar, r'\n$'),
RegionRule(r'comment', r'/\*', CommentGrammar, r'\*/'),
PatternRule(r'comment', r'//.*$'),
RegionRule(r'string', '"', StringGrammar2, '"'),
PatternRule('function', word + r'(?= *\()'),
PatternRule('constant', "[A-Z_][A-Z0-9_]+(?!" + chr2 + ")"),
PatternRule('label', word + '(?=:)'),
RegionRule('error', '# *error', ErrorGrammar, r'\n$'),
RegionRule('macro', '# *(?:assert|cpu|define|elif|else|endif|error|ident|ifdef|ifndef|if|import|include_next|line|machine|pragma_once|pragma|system|unassert|undef|warning)(?!=' + chr2 + ')', MacroGrammar, r'\n$'),
RegionRule('comment', r'/\*', CommentGrammar, r'\*/'),
PatternRule('comment', '//.*$'),
RegionRule('string', '"', StringGrammar2, '"'),
PatternRule(r"float", r"-?[0-9]+\.[0-9]*|-?\.[0-9]+|-?(?:[0-9]|[0-9]+\.[0-9]*|-?\.[0-9]+)[eE][\+-]?[0-9]+"),
PatternRule(r"integer", r"(?:0(?![x0-9])|-?[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?"),
PatternRule(r"operator", r"!(?!=)|\+=|-=|\*=|/=|//=|%=|&=\|\^=|>>=|<<=|\*\*="),
PatternRule(r'operator', r"\+|<>|<<|<=|<|-|>>|>=|>|\*\*|&|\*|\||/|\^|==|//|~|!=|%"),
RegionRule(r'macrocomment', r'#if +(?:0|NULL|FALSE)', Grammar, r'#endif'),
PatternRule(r'char', r"'.'|'\\.'|'\\[0-7]{3}'"),
PatternMatchRule(r'x', r'(# *include)( +)(.+)(\n|$)',
r'macro.start', r'spaces', r'header', r'macro.end'),
PatternRule(r'identifier', r"[a-zA-Z_][a-zA-Z0-9_]*"),
OverridePatternRule(r'comment', r'/\* *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *\*/$'),
OverridePatternRule(r'comment', r'// *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
PatternRule('operator', r"\+|<>|<<|<=|<|-|>>|>=|>|\*\*|&|\*|\||/|\^|==|//|~|!=|%"),
RegionRule('macrocomment', '#if +(?:0|NULL|FALSE)', Grammar, '#endif'),
PatternRule('char', r"'.'|'\\.'|'\\[0-7]{3}'"),
PatternMatchRule('x', r'(# *include)( +)(.+)(\n|$)',
'macro.start', 'spaces', 'header', 'macro.end'),
PatternRule('identifier', word),
OverridePatternRule('comment', r'/\* *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *\*/$'),
OverridePatternRule('comment', r'// *@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
]
MacroGrammar.rules.extend(CGrammar.rules)
@ -113,7 +121,7 @@ class CTabber2(tab.StackTabber2):
return t.fqname() in ('spaces', 'eol', 'comment', 'comment.start',
'comment.data', 'comment.null', 'comment.end')
class CCheckSyntax(method.shell.Exec):
class CCheckSyntax(Exec):
'''Build this C program (using the mode's make cmd)'''
show_success = False
args = []
@ -126,7 +134,7 @@ class CCheckSyntax(method.shell.Exec):
self._doit(w, w.buffer.path, w.application.config['c.syntax-cmd'],
cmdname='c-check-syntax')
class CMake(method.shell.Exec):
class CMake(Exec):
'''Build this C program (using the mode's make cmd)'''
show_success = False
args = []
@ -139,7 +147,7 @@ class CMake(method.shell.Exec):
self._doit(w, w.buffer.path, w.application.config['c.make-cmd'],
cmdname='c-make')
class C(mode.Fundamental):
class C(Fundamental):
name = 'C'
extensions = ['.c', '.h', '.cpp']
tabbercls = CTabber2
@ -183,24 +191,20 @@ class C(mode.Fundamental):
}
_bindings = {
'close-paren': (')',),
'close-brace': ('}',),
'close-bracket': (']',),
'close-paren': (')',),
'close-brace': ('}',),
'close-bracket': (']',),
'c-check-syntax': ('C-c s',),
'c-make': ('C-c C-c',),
'c-make': ('C-c C-c',),
}
def get_functions(self): return {}
def get_function_names(self): return []
def get_line_function(self, y): return None
def get_status_names(self):
names = mode.Fundamental.get_status_names(self)
names = Fundamental.get_status_names(self)
c = self.window.logical_cursor()
names['func'] = self.get_line_function(c.y)
return names
def get_functions(self):
return {}
def get_function_names(self):
return []
def get_line_function(self, y):
return None
install = C.install

View File

@ -1,38 +1,37 @@
import color, mode
from mode import Fundamental
from lex import Grammar, PatternRule, RegionRule
class StringGrammar(Grammar):
rules = [
PatternRule(r'cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule(r'cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(r'escaped', r'\\.'),
PatternRule(r'data', r'[^\$\\]+'),
PatternRule('cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule('cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
PatternRule('octal', r'\\[0-7]{3}'),
PatternRule('escaped', r'\\.'),
PatternRule('data', r'[^\$\\]+'),
]
class TagGrammar(Grammar):
rules = [
PatternRule(r'cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule(r'cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
RegionRule(r'string', r'"', StringGrammar, r'"'),
RegionRule(r'string', r"'", StringGrammar, r"'"),
PatternRule(r'namespace', r'[a-zA-Z_]+:'),
PatternRule(r'attrname', r'[^ =>\n]+(?==)'),
PatternRule(r'name', r'[^\[\] =>\n]+'),
PatternRule('cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule('cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
RegionRule('string', '"', StringGrammar, '"'),
RegionRule('string', "'", StringGrammar, "'"),
PatternRule('namespace', '[a-zA-Z_]+:'),
PatternRule('attrname', r'[^ =>\n]+(?==)'),
PatternRule('name', r'[^\[\] =>\n]+'),
]
class TemplateGrammar(Grammar):
rules = [
RegionRule(r'comment', r'#\*', Grammar, r'\*#'),
PatternRule(r'comment', r'##.*\n'),
PatternRule(r'cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule(r'cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
PatternRule(r'cheetah_directive', r'#.*?[#\n]'),
RegionRule(r'cheetah_tag', r'</?', TagGrammar, r'/?>'),
RegionRule('comment', r'#\*', Grammar, r'\*#'),
PatternRule('comment', r'##.*\n'),
PatternRule('cheetah_placeholder', r'\${[a-zA-Z0-9_.\[\]]+}'),
PatternRule('cheetah_placeholder', r'\$[a-zA-Z0-9_.\[\]]+'),
PatternRule('cheetah_directive', r'#.*?[#\n]'),
RegionRule('cheetah_tag', '</?', TagGrammar, '/?>'),
]
class Template(mode.Fundamental):
class Template(Fundamental):
name = 'Cheetah'
extensions = ['.tmpl']
grammar = TemplateGrammar

View File

@ -1,6 +1,3 @@
import mode
class Colortext(mode.Fundamental):
name = 'Colortext'
from mode import Fundamental
class Colortext(Fundamental): name = 'Colortext'
install = Colortext.install

View File

@ -1,16 +1,16 @@
import color, mode
from mode import Fundamental
from lex import Grammar, PatternRule, RegionRule
from mode.python import StringGrammar1, StringGrammar2
class ConfGrammar(Grammar):
rules = [
PatternRule(r'comment', r'#.*$'),
PatternRule(r'comment', r'//.*$'),
RegionRule(r'string', r"'", StringGrammar1, r"'"),
RegionRule(r'string', r'"', StringGrammar2, r'"'),
PatternRule('comment', '#.*$'),
PatternRule('comment', '//.*$'),
RegionRule('string', "'", StringGrammar1, "'"),
RegionRule('string', '"', StringGrammar2, '"'),
]
class Conf(mode.Fundamental):
class Conf(Fundamental):
name = 'conf'
extensions = ['.conf', '.cfg', '.cnf', '.config']
grammar = ConfGrammar

View File

@ -1,8 +1,3 @@
import color, mode
from lex import Grammar, PatternRule, RegionRule
from mode.python import StringGrammar1, StringGrammar2, PythonGrammar
class Console(mode.Fundamental):
name = 'Console'
from mode import Fundamental
class Console(Fundamental): name = 'Console'
install = Console.install

View File

@ -1,12 +1,18 @@
import code, re, string, StringIO, sys, traceback
import color, completer, lex, method, mode
from lex import Grammar, PatternRule
import code
import re
import StringIO
import sys
import traceback
import completer
from method import Method
import method.move
import mode.mini
from lex import Lexer, Grammar, PatternRule
from mode.python import PythonGrammar
from point import Point
PAD = ' '
class ConsoleExec(method.Method):
class ConsoleExec(Method):
def _execute(self, w, **vargs):
if w.application.completion_window_is_open():
w.application.close_completion_buffer()
@ -80,12 +86,12 @@ class ConsoleExec(method.Method):
for w2 in b.windows:
w2.goto_end(force=True)
class ConsoleCancel(method.Method):
class ConsoleCancel(Method):
def execute(self, w, **vargs):
w.application.close_mini_buffer()
if w.application.completion_window_is_open():
w.application.close_completion_buffer()
class ConsoleClear(method.Method):
class ConsoleClear(Method):
def execute(self, w, **vargs):
a = w.application
if not a.has_buffer_name('*Console*'):
@ -93,7 +99,7 @@ class ConsoleClear(method.Method):
b = a.bufferlist.get_buffer_by_name('*Console*')
b.clear()
class ConsoleHistoryPrev(method.Method):
class ConsoleHistoryPrev(Method):
def execute(self, w, **vargs):
if w.mode.hindex <= 0:
w.mode.hindex = 0
@ -102,14 +108,14 @@ class ConsoleHistoryPrev(method.Method):
w.mode.history[-1] = w.buffer.make_string()
w.mode.hindex -= 1
w.buffer.set_data(w.mode.history[w.mode.hindex])
class ConsoleHistoryNext(method.Method):
class ConsoleHistoryNext(Method):
def execute(self, w, **vargs):
if w.mode.hindex == len(w.mode.history) - 1:
return
w.mode.hindex += 1
w.buffer.set_data(w.mode.history[w.mode.hindex])
class ConsoleTab(method.Method):
class ConsoleTab(Method):
def execute(self, w, **vargs):
a = w.application
s = w.buffer.make_string()
@ -119,7 +125,7 @@ class ConsoleTab(method.Method):
w.insert_string_at_cursor(' ' * w.mode.tabwidth)
return
l = lex.Lexer(w.mode, PythonGrammar)
l = Lexer(w.mode, PythonGrammar)
tokens = list(l.lex([s]))
curr_t = None
@ -191,11 +197,11 @@ class ConsoleTab(method.Method):
w.insert_string_at_cursor(s)
mode.mini.use_completion_window(a, name, candidates)
class ConsoleBaseMethod(method.Method):
subcls = method.Method
class ConsoleBase(Method):
subcls = Method
subbuf = '*Console*'
def __init__(self):
method.Method.__init__(self)
Method.__init__(self)
self.submethod = self.subcls()
def _execute(self, w, **vargs):
a = w.application
@ -204,21 +210,18 @@ class ConsoleBaseMethod(method.Method):
w2 = a.bufferlist.get_buffer_by_name(self.subbuf).windows[0]
self.submethod.execute(w2, **vargs)
class ConsolePageUp(ConsoleBaseMethod):
subcls = method.move.PageUp
class ConsolePageDown(ConsoleBaseMethod):
subcls = method.move.PageDown
class ConsoleGotoBeginning(ConsoleBaseMethod):
subcls = method.move.GotoBeginning
class ConsoleGotoEnd(ConsoleBaseMethod):
subcls = method.move.GotoEnd
class ConsolePageUp(ConsoleBase): subcls = method.move.PageUp
class ConsolePageDown(ConsoleBase): subcls = method.move.PageDown
class ConsoleGotoBeginning(ConsoleBase): subcls = method.move.GotoBeginning
class ConsoleGotoEnd(ConsoleBase): subcls = method.move.GotoEnd
class ConsoleMini(mode.Fundamental):
name = 'ConsoleMini'
grammar = PythonGrammar
actions = [ConsoleExec, ConsoleClear, ConsoleCancel, ConsoleHistoryPrev,
ConsoleHistoryNext, ConsoleTab,
ConsolePageUp, ConsolePageDown, ConsoleGotoBeginning, ConsoleGotoEnd]
ConsoleHistoryNext, ConsoleTab,
ConsolePageUp, ConsolePageDown, ConsoleGotoBeginning,
ConsoleGotoEnd]
_bindings = {
'console-exec': ('RETURN',),
'console-clear': ('C-l',),

View File

@ -1,19 +1,6 @@
import color, mode
from mode import Fundamental
from lex import Grammar, PatternRule, NocasePatternRule, RegionRule, NocaseRegionRule
from point import Point
class StringGrammar1(Grammar):
rules = [
PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(r'escaped', r'\\.'),
PatternRule(r'data', r"[^']+"),
]
class StringGrammar2(Grammar):
rules = [
PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(r'escaped', r'\\.'),
PatternRule(r'data', r'[^"]+'),
]
from mode.python import StringGrammar1, StringGrammar2
class CommentGrammar1(Grammar):
rules = [PatternRule(r'data', r'(?:[^*]|\*(?!/))+')]
@ -28,31 +15,33 @@ class KeywordGrammar(Grammar):
RegionRule(r'string', '"', StringGrammar2, r'"'),
]
chr2 = '[-a-z0-9_]'
class CSSGrammar(Grammar):
rules = [
RegionRule(r'comment', '/\*', CommentGrammar1, '\*/'),
RegionRule(r'comment', '<!--', CommentGrammar2, '-->'),
NocasePatternRule(r'css_dimension', r'[+-]?(?:[0-9]+|[0-9]*\.[0-9]+)[-a-z_][-a-z0-9_]*'),
NocasePatternRule(r'css_percentage', r'[+-]?(?:[0-9]+|[0-9]*\.[0-9]+)%%'),
NocasePatternRule(r'css_length', r'[+-]?(?:[0-9]+|[0-9]*\.[0-9]+)(?:em|ex|px|in|cm|mm|pt|pc)'),
NocasePatternRule(r'css_hash', r'#[-a-z0-9_]+'),
NocasePatternRule(r'css_real', r'[+-]?[0-9]*\.[0-9]+'),
NocasePatternRule(r'css_int', r'[+-]?[0-9]+'),
NocasePatternRule(r'css_dimension', r'[+-]?(?:\d+|\d*\.\d+)[-a-z_][-a-z0-9_]*'),
NocasePatternRule(r'css_percentage', r'[+-]?(?:\d+|\d*\.\d+)%%'),
NocasePatternRule(r'css_length', r'[+-]?(?:\d+|\d*\.\d+)(?:em|ex|px|in|cm|mm|pt|pc)'),
NocasePatternRule(r'css_hash', r'#' + chr2 + '+'),
NocasePatternRule(r'css_real', r'[+-]?\d*\.\d+'),
NocasePatternRule(r'css_int', r'[+-]?\d+'),
NocasePatternRule(r'css_rule', r'@(?:page|media|import)'),
NocasePatternRule(r'css_color', r'(?:aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|#[0-9]{6}|#[0-9]{3})'),
NocasePatternRule(r'css_color', r'(?:aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|#\d{6}|#\d{3})(?!' + chr2 + ')'),
NocasePatternRule(r'css_keyword', r'(?:url|rgb|counter)'),
NocaseRegionRule(r'css_keyword', '(?:(?<=url)|(?<=rgb)|(?<=counter))\(', KeywordGrammar, '\)'),
NocasePatternRule(r'css_label', r"\.?[-a-zA-Z0-9_]+(?= *{)"),
NocasePatternRule(r'css_ident', r"-?[a-z_][-a-z0-9_]*"),
NocasePatternRule(r'css_name', r"[-a-z0-9_]+"),
NocasePatternRule(r'css_label', r"\.?" + chr2 + "+(?= *{)"),
NocasePatternRule(r'css_ident', r"-?[a-z_]" + chr2 + "*"),
NocasePatternRule(r'css_name', chr2 + "+"),
NocasePatternRule(r'delimiter', r'[:;,{}()\[\]]|~=|\|=|='),
RegionRule(r'string', "'", StringGrammar1, r"'"),
RegionRule(r'string', '"', StringGrammar2, r'"'),
]
class CSS(mode.Fundamental):
class CSS(Fundamental):
name = 'CSS'
extensions = ['.css']
grammar = CSSGrammar
@ -71,8 +60,8 @@ class CSS(mode.Fundamental):
'css_rule': ('cyan', 'default', 'bold'),
'css_label': ('cyan', 'default', 'bold'),
'css_keyword': ('cyan', 'default', 'bold'),
'css_ident': ('default', 'default', 'bold'),
'css_name': ('default', 'default', 'bold'),
'css_ident': ('default', 'default'),
'css_name': ('default', 'default'),
'css_keyword': ('cyan', 'default', 'bold'),
'css_keyword.start': ('default', 'default', 'bold'),
'css_keyword.null': ('cyan', 'default', 'bold'),

View File

@ -1,5 +1,5 @@
import color, method, mode, re
from lex import Grammar, PatternRule, RegionRule
from mode import Fundamental
from lex import Grammar, PatternRule
class DiffGrammar(Grammar):
rules = [
@ -11,7 +11,7 @@ class DiffGrammar(Grammar):
PatternRule(name=r'common', pattern=r"^.*\n$"),
]
class Diff(mode.Fundamental):
class Diff(Fundamental):
name = 'diff'
extensions = ['.patch', '.diff']
grammar = DiffGrammar()

View File

@ -1,41 +1,63 @@
import commands, dirutil, grp, method, mode, os.path, pwd, re
import buffer, buffer.fs
import window
from window import Window
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule
from point import Point
from method import Method, Argument
class PermGrammar(Grammar):
rules = [
PatternRule(r'perm_sticky', r'[tT]'),
PatternRule(r'perm_setid', r'[sS]'),
PatternRule(r'perm_read', r'r'),
PatternRule(r'perm_write', r'w'),
PatternRule(r'perm_exec', r'x'),
PatternRule('perm_sticky', '[tT]'),
PatternRule('perm_setid', '[sS]'),
PatternRule('perm_read', 'r'),
PatternRule('perm_write', 'w'),
PatternRule('perm_exec', 'x'),
]
ds = r'([^ ]+)( +)([^ ]+)( +)([^ ]+)( +)([A-Za-z]{3} [ 0-9]{2} [0-9]{2}:[0-9]{2})( +)([^\n]+)'
class PathGrammar(Grammar):
rules = [
RegionRule(r'dir_perm', r'(?<=^.)', PermGrammar, r' '),
PatternMatchRule(r'x', ds,
r'dir_fields', r'spaces', r'dir_owner', r'spaces',
r'dir_group', r'spaces', r'dir_size', r'spaces',
r'dir_mtime', r'spaces', r'dir_name', r'spaces'),
RegionRule('dir_perm', '(?<=^.)', PermGrammar, ' '),
PatternMatchRule('x', ds, 'dir_owner', 'spaces', 'dir_group', 'spaces',
'dir_size', 'spaces', 'dir_mtime', 'spaces',
'dir_name', 'spaces'),
]
class DirGrammar(Grammar):
rules = [
RegionRule(r'dir_file', r'^-', PathGrammar, r'\n'),
RegionRule(r'dir_blk', r'^b', PathGrammar, r'\n'),
RegionRule(r'dir_chr', r'^c', PathGrammar, r'\n'),
RegionRule(r'dir_dir', r'^d', PathGrammar, r'\n'),
RegionRule(r'dir_lnk', r'^l', PathGrammar, r'\n'),
RegionRule(r'dir_fifo', r'^p', PathGrammar, r'\n'),
RegionRule(r'dir_sock', r'^s', PathGrammar, r'\n'),
RegionRule(r'dir_unk', r'^\?', PathGrammar, r'\n'),
RegionRule('dir_file', '^-', PathGrammar, r'\n'),
RegionRule('dir_blk', '^b', PathGrammar, r'\n'),
RegionRule('dir_chr', '^c', PathGrammar, r'\n'),
RegionRule('dir_dir', '^d', PathGrammar, r'\n'),
RegionRule('dir_lnk', '^l', PathGrammar, r'\n'),
RegionRule('dir_fifo', '^p', PathGrammar, r'\n'),
RegionRule('dir_sock', '^s', PathGrammar, r'\n'),
RegionRule('dir_unk', '^\?', PathGrammar, r'\n'),
]
class FsSettingBase(Method):
msg = 'Settings have changed'
def _doit(self, w, **vargs): pass
def _execute(self, w, **vargs):
self._doit(w, **vargs)
w.buffer.reload()
w.goto_beginning()
w.set_error(self.msg)
class HideDotFiles(FsSettingBase):
msg = "Dotfiles are hidden"
def _doit(self, w, **vargs): w.buffer.settings['hide-dot'] = True
class ShowDotFiles(FsSettingBase):
msg = "Dotfiles are visible"
def _doit(self, w, **vargs): w.buffer.settings['hide-dot'] = False
class SortType(FsSettingBase):
msg = "Sorting files by type, name"
def _doit(self, w, **vargs): w.buffer.settings['type-sort'] = True
class SortName(FsSettingBase):
msg = "Sorting files by name, type"
def _doit(self, w, **vargs): w.buffer.settings['type-sort'] = False
class RefreshView(Method):
def _execute(self, w, **vargs):
t = dirutil.resolve_token(w)
@ -57,7 +79,7 @@ class DirGrep(Method):
b = buffer.fs.PathListBuffer(bufname, paths)
b.modename = 'dir'
b.open()
window.Window(b, w.application, height=0, width=0)
Window(b, w.application, height=0, width=0)
w.application.add_buffer(b)
w.application.switch_buffer(b)
w.set_error("grep exited with %d" % status)
@ -190,7 +212,7 @@ class Dir(mode.Fundamental):
'dir_mtime': ('green', 'default', 'bold'),
}
actions = [RefreshView, OpenPath, DirGrep, Chmod, Chown, Chgrp, TouchPath,
RemovePath]
RemovePath, HideDotFiles, ShowDotFiles, SortName, SortType]
def __init__(self, w):
mode.Fundamental.__init__(self, w)
self.add_bindings('refresh-view', ('C-c r',))

View File

@ -5,29 +5,33 @@ from mode.lisp import Lisp, LispTabber
class StringGrammar(Grammar):
rules = [
PatternRule(r'octal', r'\\[0-7]{3}'),
PatternRule(r'escaped', r'\\.'),
PatternRule(r'data', r'[^\\"]+'),
PatternRule('octal', r'\\[0-7]{3}'),
PatternRule('escaped', r'\\.'),
PatternRule('data', r'[^\\"]+'),
]
chr1 = '[a-zA-Z_]'
chr2 = '[a-zA-Z0-9-_]'
symb = chr1 + chr2 + '*'
class ELispGrammar(Grammar):
rules = [
PatternRule(r'comment', r';.*$'),
PatternRule(r'delimiter', r'[()]'),
PatternRule(r'spaces', r' +'),
PatternRule(r'eol', r'\n'),
PatternRule(r'elisp_reserved', r'(?:t|nil)(?![^\"\' \t()])'),
PatternRule(r'keyword', r'(?:while|when|unless|setq-default|setq|setcar|require|provide|or|not|mapcar|list|let\*|let|lambda|if|exists|equal|defvar|defun|defstruct|defface|defalias|count|cons|c[ad]+r|apply|and)(?![^\"\' \t()])'),
PatternRule(r'elisp_symbol', r"'[a-zA-Z_][a-zA-Z0-9-_]*"),
PatternRule(r'delimiter', r"'"),
PatternRule(r'elisp_type', r":[a-zA-Z_][a-zA-Z0-9-_]*"),
PatternRule(r'attribute', r"&[a-zA-Z_][a-zA-Z0-9-_]*"),
PatternRule(r"integer", r"(?<![\.0-9a-zA-Z_])(?:0|-?[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"),
PatternRule(r"float", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"),
PatternRule(r"imaginary", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+|(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)[jJ])(?![\.0-9a-zA-Z_])"),
PatternRule(r'elisp_word', r"[^\"' \t()]+"),
RegionRule(r'string', r'"', StringGrammar, r'"'),
PatternRule(r'eol', r'\n$'),
PatternRule('comment', ';.*$'),
PatternRule('delimiter', '[()]'),
PatternRule('spaces', ' +'),
PatternRule('eol', r'\n'),
PatternRule('elisp_reserved', r'(?:t|nil)(?![^\"\' \t()])'),
PatternRule('keyword', r'(?:while|when|unless|setq-default|setq|setcar|require|provide|or|not|mapcar|list|let\*|let|lambda|if|exists|equal|defvar|defun|defstruct|defface|defalias|count|cons|c[ad]+r|apply|and)(?![^\"\' \t()])'),
PatternRule('elisp_symbol', "'" + symb),
PatternRule('delimiter', r"'"),
PatternRule('elisp_type', ":" + symb),
PatternRule('attribute', "&" + symb),
PatternRule("integer", r"(?<![\.0-9a-zA-Z_])(?:0|-?[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?(?![\.0-9a-zA-Z_])"),
PatternRule("float", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)(?![\.0-9a-zA-Z_])"),
PatternRule("imaginary", r"(?<![\.0-9a-zA-Z_])(?:[0-9]+|(?:[0-9]+\.[0-9]*|\.[0-9]+|(?:[0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+)[jJ])(?![\.0-9a-zA-Z_])"),
PatternRule('elisp_word', r"[^\"' \t()]+"),
RegionRule('string', r'"', StringGrammar, r'"'),
PatternRule('eol', r'\n$'),
]
class ELisp(Lisp):

View File

@ -1,5 +1,5 @@
import string
import buffer, method, mode, window
from method import Method
from mode import Fundamental
def use_completion_window(app, s, candidates):
if app.completion_window_is_open():
@ -16,14 +16,14 @@ def use_completion_window(app, s, candidates):
elif len(candidates) > 1:
app.open_completion_buffer(s, sorted(candidates))
class MiniCallback(method.Method):
class MiniCallback(Method):
def execute(self, w, **vargs):
app = w.application
if app.completion_window_is_open():
app.close_completion_buffer()
w.buffer.do_callback()
class MiniTabComplete(method.Method):
class MiniTabComplete(Method):
def execute(self, w, **vargs):
app = w.application
b = w.buffer
@ -37,12 +37,12 @@ class MiniTabComplete(method.Method):
candidates = b.tabber.get_candidates(s1, w)
use_completion_window(app, s2, candidates)
class Mini(mode.Fundamental):
name = 'Mini'
actions = [MiniCallback, MiniTabComplete]
def __init__(self, w):
mode.Fundamental.__init__(self, w)
self.add_bindings('mini-callback', ('RETURN',))
self.add_bindings('mini-tab-complete', ('TAB',))
class Mini(Fundamental):
name = 'Mini'
actions = [MiniCallback, MiniTabComplete]
_bindings = {
'mini-callback': ('RETURN',),
'mini-tab-complete': ('TAB',),
}
install = Mini.install

View File

@ -1,14 +1,20 @@
import color, method, mode
from lex import Grammar, PatternRule, RegionRule, OverrideRegionRule
from method import Method
from mode import Fundamental
from method import Method, WrapParagraph
from mode.text import TextInsertSpace
from mode.python import PythonGrammar
class RSTString(Grammar):
rules = [
def make_string_rules(forbid):
return [
PatternRule('escaped', r'\\.'),
PatternRule('data', r'[^\\' + forbid + ']+'),
]
class LineGrammar(Grammar): rules = [PatternRule('data', r'^.*\n$')]
class RSTString1(Grammar): rules = make_string_rules('*')
class RSTString2(Grammar): rules = make_string_rules('`')
class RSTString3(Grammar): rules = make_string_rules('|')
class RSTGrammar(Grammar):
rules = [
PatternRule(r'escape', r'\\.'),
@ -16,16 +22,16 @@ class RSTGrammar(Grammar):
PatternRule(r'bullet', r'^ *[-*+](?=$| )'),
PatternRule(r'enumeration', r'^ *(?:[0-9]+|#)\.(?=$| )'),
RegionRule(r'strong_emphasis', r'\*\*', RSTString, r'\*\*'),
RegionRule(r'emphasis', r'\*', RSTString, r'\*'),
RegionRule(r'inline_literal', r'``', RSTString, r'``'),
RegionRule(r'interpreted', r'`', RSTString, r'`_?'),
RegionRule(r'strong_emphasis', r'\*\*', RSTString1, r'\*\*'),
RegionRule(r'emphasis', r'\*', RSTString1, r'\*'),
RegionRule(r'inline_literal', r'``', RSTString2, r'``'),
RegionRule(r'interpreted', r'`', RSTString2, r'`_?'),
PatternRule(r'anonymous', r'[a-zA-Z]+__'),
PatternRule(r'reference', r'[a-zA-Z]+_'),
RegionRule(r'inline_internal', r'_`', RSTString, r'`'),
RegionRule(r'inline_internal', r'_`', RSTString2, r'`'),
RegionRule(r'substitution', r'\|(?! )', RSTString, r'\|'),
RegionRule(r'substitution', r'\|(?! )', RSTString3, r'\|'),
PatternRule(r'footnote', r'\[[0-9]+\]_'),
PatternRule(r'citation', r'\[.+?\]_'),
PatternRule(r'rst_url', r'http://[^ ]+'),
@ -34,17 +40,22 @@ class RSTGrammar(Grammar):
PatternRule(r'subtitle', r'^-{3,}\n$'),
PatternRule(r'option', r'^(?:--|-|/)[a-zA-Z]+(?:[ =][-a-zA-Z_]+)?'),
RegionRule(r'table', r'\+-+(\+-+)*\+\n$', Grammar, r'^\n$'),
RegionRule(r'table', r'\+-+(\+-+)*\+\n$', LineGrammar, r'^\n$'),
PatternRule(r'field', r'^:.+?:'),
PatternRule(r'lineblock', r'^\|'),
RegionRule(r'blocktest', r'>>>', PythonGrammar, '^\n'),
OverrideRegionRule(r'code', r'^\.\. code-block:: +(?P<grammar>.+)\n$', None, r'^[^\n ]'),
RegionRule(r'literal_block', r'::\n$', Grammar, r'^(?=[^\n ])'),
OverrideRegionRule(r'code', r'^\.\. code-block:: +(?P<grammar>.+)\n$', None, r'^(?=[^\n ])'),
RegionRule(r'literal_block', r'::\n$', LineGrammar, r'^(?=[^\n ])'),
PatternRule('rst_word', r'[a-zA-Z]+'),
PatternRule('spaces', ' +'),
PatternRule('eol', ' +'),
PatternRule('rst_null', r'.'),
]
class RstWrapParagraph(method.WrapParagraph):
class RstWrapParagraph(WrapParagraph):
limit = 75
class RstInsertSpace(TextInsertSpace):
limit = 75
@ -53,7 +64,7 @@ class RstInsertSpace(TextInsertSpace):
class RstBuild(Method):
pass
class RST(mode.Fundamental):
class RST(Fundamental):
name = 'RST'
extensions = ['.rst']
grammar = RSTGrammar
@ -69,6 +80,7 @@ class RST(mode.Fundamental):
'rst_url': ('blue', 'default', 'bold'),
'table.start': ('cyan', 'default'),
'table.data': ('cyan', 'default'),
'table.null': ('cyan', 'default'),
'bullet': ('magenta', 'default', 'bold'),
@ -78,29 +90,34 @@ class RST(mode.Fundamental):
'blocktest.start': ('cyan', 'default', 'bold'),
'emphasis.start': ('red', 'default'),
'emphasis.data': ('red', 'default'),
'emphasis.null': ('red', 'default'),
'emphasis.end': ('red', 'default'),
'strong_emphasis.start': ('red', 'default', 'bold'),
'strong_emphasis.data': ('red', 'default', 'bold'),
'strong_emphasis.null': ('red', 'default', 'bold'),
'strong_emphasis.end': ('red', 'default', 'bold'),
'interpreted.start': ('magenta', 'default'),
'interpreted.data': ('magenta', 'default'),
'interpreted.null': ('magenta', 'default'),
'interpreted.end': ('magenta', 'default'),
'inline_literal.start': ('magenta', 'default', 'bold'),
'inline_literal.data': ('magenta', 'default', 'bold'),
'inline_literal.null': ('magenta', 'default', 'bold'),
'inline_literal.end': ('magenta', 'default', 'bold'),
'inline_internal.start': ('yellow', 'default'),
'inline_internal.data': ('yellow', 'default'),
'inline_internal.null': ('yellow', 'default'),
'inline_internal.end': ('magenta', 'default'),
'substitution.start': ('cyan', 'default', 'bold'),
'substitution.data': ('cyan', 'default', 'bold'),
'substitution.null': ('cyan', 'default', 'bold'),
'substitution.end': ('cyan', 'default', 'bold'),
'code.start': ('yellow', 'default', 'bold'),
'literal_block.start': ('yellow', 'default', 'bold'),
'literal_block.null': ('green', 'default', 'bold'),
'literal_block.start': ('yellow', 'default'),
'literal_block.data': ('yellow', 'default'),
'literal_block.null': ('yellow', 'default'),
}
actions = [RstInsertSpace, RstWrapParagraph]
config = {