parent
04b672f318
commit
db0479b68d
10
mode/mutt.py
10
mode/mutt.py
|
@ -44,10 +44,10 @@ class Mutt(mode.Fundamental):
|
||||||
config = {
|
config = {
|
||||||
'mutt.margin': 72,
|
'mutt.margin': 72,
|
||||||
}
|
}
|
||||||
def __init__(self, w):
|
_bindings = {
|
||||||
mode.Fundamental.__init__(self, w)
|
'learn-word': ('C-c l',),
|
||||||
self.add_bindings('learn-word', ('C-c l',))
|
'mutt-wrap-paragraph': ('M-q',),
|
||||||
self.add_bindings('mutt-wrap-paragraph', ('M-q',))
|
'mutt-insert-space': ('SPACE',),
|
||||||
self.add_bindings('mutt-insert-space', ('SPACE',))
|
}
|
||||||
|
|
||||||
install = Mutt.install
|
install = Mutt.install
|
||||||
|
|
41
mode/nasm.py
41
mode/nasm.py
|
@ -1,28 +1,29 @@
|
||||||
import color, mode
|
import color, mode
|
||||||
from lex import Grammar, PatternRule, RegionRule
|
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule
|
||||||
|
from mode.python import StringGrammar1, StringGrammar2, StringGrammar3, StringGrammar4
|
||||||
|
|
||||||
class StringGrammar(Grammar):
|
chr1 = '[a-zA-Z_.?]'
|
||||||
rules = [
|
chr2 = '[a-zA-Z0-9_.?$#@~]'
|
||||||
PatternRule(r'octal', r'\\[0-7]{3}'),
|
word = chr1 + chr2 + '*'
|
||||||
PatternRule(r'escaped', r'\\.'),
|
|
||||||
]
|
|
||||||
|
|
||||||
class NasmGrammar(Grammar):
|
class NasmGrammar(Grammar):
|
||||||
rules = [
|
rules = [
|
||||||
PatternRule(r'nasm_keyword', r"(?:section|global|extern)(?![a-zA-Z_])"),
|
PatternRule('continuation', r'\\\n$'),
|
||||||
PatternRule(r'macros', r"%(?:define|undef|assign|strlen|macro|endmacro|if|elif|else|endif|ifdef|ifndef|include|push|pop|stacksize)(?![a-zA-Z_])"),
|
PatternRule('nasm_keyword', "(?:section|global|extern)(?!" + chr2 + ")"),
|
||||||
PatternRule(r'instructions', r"(?:jeq|jne|ja|jmp|push|pushad|pushfd|call|ret|sub|add|pop|popa|popad|popfd|call|and|cwd|cdq|cmp|cmpxchg|cpuid|div|divpd|enter|leave|fadd|fld|fmul|fsqrt|fsub|hlt|imul|inc|int|int3|lea|mov|movd|mul|neg|not|nop|or|sal|sar|shl|shr|shld|shrd|syscall|sysenter|sysexit|test|xchg|xadd|xor)(?![a-zA-Z_])"),
|
PatternRule('macros', "%(?:define|undef|assign|strlen|macro|endmacro|if|elif|else|endif|ifdef|ifndef|include|push|pop|stacksize)(?!" + chr2 + ")"),
|
||||||
PatternRule(r'registers', r"(?:eax|ax|ah|al|ebx|bx|bh|bl|ecx|cx|ch|cl|esi|edi|esp|ebp)(?![a-zA-Z_])"),
|
PatternRule('instructions', "(?:jeq|jne|ja|jmp|push|pushad|pushfd|call|ret|sub|add|pop|popa|popad|popfd|call|and|cwd|cdq|cmp|cmpxchg|cpuid|div|divpd|enter|leave|fadd|fld|fmul|fsqrt|fsub|hlt|imul|inc|int|int3|lea|mov|movd|mul|neg|not|nop|or|sal|sar|shl|shr|shld|shrd|syscall|sysenter|sysexit|test|xchg|xadd|xor)(?!" + chr2 + ")"),
|
||||||
PatternRule(r'prefix', r"(?:dword|word|lock)(?![a-zA-Z_])"),
|
PatternRule('registers', "(?:eax|ax|ah|al|ebx|bx|bh|bl|ecx|cx|ch|cl|esi|edi|esp|ebp)(?!" + chr2 + ")"),
|
||||||
PatternRule(r'nasm_label', r"[a-zA-Z_.][a-zA-Z0-9_.]*:"),
|
PatternRule('prefix', "(?:dword|word|lock)(?!" + chr2 + ")"),
|
||||||
PatternRule(r"identifier", r"[a-zA-Z_][a-zA-Z0-9_]*"),
|
PatternMatchRule('x', '(' + word + ')(:)', 'nasm_label', 'delimiter'),
|
||||||
PatternRule(r"integer", r"(0|[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?"),
|
#PatternRule('nasm_label', word + ":"),
|
||||||
PatternRule(r"float", r"[0-9]+\.[0-9]*|\.[0-9]+|([0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+"),
|
PatternRule("identifier", r'\$?' + word),
|
||||||
RegionRule(r'string', r'"""', StringGrammar, r'"""'),
|
PatternRule("integer", "(0|[1-9][0-9]*|0[0-7]+|0[xX][0-9a-fA-F]+)[lL]?"),
|
||||||
RegionRule(r'string', r"'''", StringGrammar, r"'''"),
|
PatternRule("float", r"[0-9]+\.[0-9]*|\.[0-9]+|([0-9]|[0-9]+\.[0-9]*|\.[0-9]+)[eE][\+-]?[0-9]+"),
|
||||||
RegionRule(r'string', r'"', StringGrammar, r'"'),
|
RegionRule('string', "'''", StringGrammar3, "'''"),
|
||||||
RegionRule(r'string', r"'", StringGrammar, r"'"),
|
RegionRule('string', '"""', StringGrammar4, '"""'),
|
||||||
PatternRule(r'comment', r';.*$'),
|
RegionRule('string', "'", StringGrammar1, "'"),
|
||||||
|
RegionRule('string', '"', StringGrammar2, '"'),
|
||||||
|
PatternRule('comment', ';.*$'),
|
||||||
]
|
]
|
||||||
|
|
||||||
class Nasm(mode.Fundamental):
|
class Nasm(mode.Fundamental):
|
||||||
|
|
|
@ -11,9 +11,12 @@ class StringGrammar(Grammar):
|
||||||
PatternRule(r'hex', r'\\x[0-9A-Fa-f]{2}'),
|
PatternRule(r'hex', r'\\x[0-9A-Fa-f]{2}'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
#class CommentGrammar(Grammar):
|
||||||
|
# rules = []
|
||||||
|
#CommentGrammar.rules.append(RegionRule(r'comment', r'\(\*', CommentGrammar, r'\*\)'))
|
||||||
|
|
||||||
class CommentGrammar(Grammar):
|
class CommentGrammar(Grammar):
|
||||||
rules = []
|
rules = [RegionRule(r'comment', r'\(\*', None, r'\*\)')]
|
||||||
CommentGrammar.rules.append(RegionRule(r'comment', r'\(\*', CommentGrammar, r'\*\)'))
|
|
||||||
|
|
||||||
class OcamlGrammar(Grammar):
|
class OcamlGrammar(Grammar):
|
||||||
rules = [
|
rules = [
|
||||||
|
|
147
mode/php.py
147
mode/php.py
|
@ -1,7 +1,6 @@
|
||||||
import color, mode, tab
|
from mode import Fundamental
|
||||||
from lex import Grammar, PatternRule, RegionRule
|
from lex import Grammar, PatternRule, RegionRule
|
||||||
from mode.python import StringGrammar
|
from mode.python import StringGrammar
|
||||||
#from mode.c import CTabber
|
|
||||||
|
|
||||||
class PHPGrammar(Grammar):
|
class PHPGrammar(Grammar):
|
||||||
rules = [
|
rules = [
|
||||||
|
@ -35,132 +34,7 @@ class PHPGrammar(Grammar):
|
||||||
PatternRule(r"eol", r"\n$"),
|
PatternRule(r"eol", r"\n$"),
|
||||||
]
|
]
|
||||||
|
|
||||||
#class JavaTabber(CTabber):
|
class PHP(Fundamental):
|
||||||
# def is_base(self, y):
|
|
||||||
# if y == 0:
|
|
||||||
# return True
|
|
||||||
#
|
|
||||||
# highlighter = self.mode.window.buffer.highlights[self.mode.name]
|
|
||||||
# if not highlighter.tokens[y]:
|
|
||||||
# return False
|
|
||||||
#
|
|
||||||
# for t in highlighter.tokens[y]:
|
|
||||||
# if t.name == 'null':
|
|
||||||
# pass
|
|
||||||
# elif t.name == 'keyword':
|
|
||||||
# if t.string in ('class', 'interface'):
|
|
||||||
# return True
|
|
||||||
# elif t.string in ('public', 'private', 'protected', 'static',
|
|
||||||
# 'final', 'native', 'synchronized', 'abstract',
|
|
||||||
# 'threadsafe', 'transient'):
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# return False
|
|
||||||
# else:
|
|
||||||
# return False
|
|
||||||
#
|
|
||||||
# return False
|
|
||||||
#
|
|
||||||
# # detecting function declarations is annoying; this assumes that people
|
|
||||||
# # won't put a variable type and name on different lines, but that they
|
|
||||||
# # might do that for function return type and name.
|
|
||||||
# #
|
|
||||||
# # unfortunately, valid function return types might include any of the
|
|
||||||
# # four types of tokens below
|
|
||||||
# decl = False
|
|
||||||
# for t in highlighter.tokens[y]:
|
|
||||||
# if t.name in ('keyword', 'identifier', 'structname', 'enumname'):
|
|
||||||
# decl = True
|
|
||||||
# continue
|
|
||||||
# if decl and t.name == 'function':
|
|
||||||
# break
|
|
||||||
# else:
|
|
||||||
# decl = False
|
|
||||||
# break
|
|
||||||
# if decl:
|
|
||||||
# return True
|
|
||||||
#
|
|
||||||
# return False
|
|
||||||
#
|
|
||||||
# def _handle_open_token(self, currlvl, y, i):
|
|
||||||
# self._opt_pop('cont')
|
|
||||||
# token = self.get_token(y, i)
|
|
||||||
# if token.string == '{':
|
|
||||||
# self._opt_pop('cond')
|
|
||||||
# currlvl = tab.StackTabber._handle_open_token(self, currlvl, y, i)
|
|
||||||
# return currlvl
|
|
||||||
# def _handle_close_token(self, currlvl, y, i):
|
|
||||||
# w = self.mode.tabwidth
|
|
||||||
# self._opt_pop('cont')
|
|
||||||
# currlvl = tab.StackTabber._handle_close_token(self, currlvl, y, i)
|
|
||||||
# token = self.get_token(y, i)
|
|
||||||
# if self.is_rightmost_token(y, i):
|
|
||||||
# if token.string == '}':
|
|
||||||
# self._opt_pop('cond')
|
|
||||||
# self._opt_pop('cont')
|
|
||||||
# elif self._peek_name() == 'cond':
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# self._opt_append('cont', currlvl + w)
|
|
||||||
# return currlvl
|
|
||||||
# def _handle_other_token(self, currlvl, y, i):
|
|
||||||
# w = self.mode.tabwidth
|
|
||||||
# token = self.get_token(y, i)
|
|
||||||
# fqname = token.fqname()
|
|
||||||
# if fqname == 'delimiter' and token.string == ';':
|
|
||||||
# self._opt_pop('cond')
|
|
||||||
# self._opt_pop('cont')
|
|
||||||
# self._opt_pop('cond')
|
|
||||||
#
|
|
||||||
# elif fqname == 'keyword':
|
|
||||||
# if token.string in ('do', 'else', 'for', 'if', 'while'):
|
|
||||||
# self._append('cond', currlvl + w)
|
|
||||||
# elif token.string == 'break':
|
|
||||||
# self._opt_pop('case', 'while', 'for')
|
|
||||||
# elif token.string == 'continue':
|
|
||||||
# self._opt_pop('while', 'for')
|
|
||||||
# elif token.string == 'case':
|
|
||||||
# self._opt_pop('case')
|
|
||||||
# currlvl = self.get_curr_level()
|
|
||||||
# self._opt_append('case', currlvl + w)
|
|
||||||
#
|
|
||||||
# elif fqname == 'string.start':
|
|
||||||
# self._opt_append('string', None)
|
|
||||||
# elif fqname == 'string.end':
|
|
||||||
# self._opt_pop('string')
|
|
||||||
# if self.is_rightmost_token(y, i):
|
|
||||||
# self._opt_append('cont', currlvl + w)
|
|
||||||
#
|
|
||||||
# # TODO: this could be a lot better
|
|
||||||
# elif fqname == 'macro':
|
|
||||||
# currlvl = 0
|
|
||||||
# elif fqname.startswith('macro.start'):
|
|
||||||
# self._opt_append('macro', None)
|
|
||||||
# currlvl = 0
|
|
||||||
# elif fqname.startswith('macro.end'):
|
|
||||||
# self._opt_pop('macro', None)
|
|
||||||
#
|
|
||||||
# elif fqname.startswith('macroblock.start'):
|
|
||||||
# self._opt_append('macroblock', None)
|
|
||||||
# currlvl = 0
|
|
||||||
# elif fqname.startswith('macroblock.end'):
|
|
||||||
# self._opt_pop('macroblock', None)
|
|
||||||
#
|
|
||||||
# if self.is_rightmost_token(y, i):
|
|
||||||
# if self._has_markers() and self._peek_name() == 'cond':
|
|
||||||
# pass
|
|
||||||
# elif(not fqname.startswith('string') and
|
|
||||||
# not fqname.startswith('comment') and
|
|
||||||
# not fqname.startswith('macro') and
|
|
||||||
# not fqname == 'delimiter' and
|
|
||||||
# not fqname == 'header' and
|
|
||||||
# not fqname == 'null' and
|
|
||||||
# not fqname == 'eol' and
|
|
||||||
# token.string not in ('}', ';', '(', '{', '[', ',')):
|
|
||||||
# self._opt_append('cont', currlvl + w)
|
|
||||||
# return currlvl
|
|
||||||
|
|
||||||
class PHP(mode.Fundamental):
|
|
||||||
name = 'PHP'
|
name = 'PHP'
|
||||||
extensions = ['.php']
|
extensions = ['.php']
|
||||||
#tabbercls = JavaTabber
|
#tabbercls = JavaTabber
|
||||||
|
@ -175,14 +49,19 @@ class PHP(mode.Fundamental):
|
||||||
'doccomment.end': ('red', 'default', 'bold'),
|
'doccomment.end': ('red', 'default', 'bold'),
|
||||||
'doccomment.null': ('red', 'default', 'bold'),
|
'doccomment.null': ('red', 'default', 'bold'),
|
||||||
'import': ('blue', 'default', 'bold'),
|
'import': ('blue', 'default', 'bold'),
|
||||||
'php_label': ('magenta', 'default', 'bold'),
|
'php_label': ('magenta', 'default', 'bold'),
|
||||||
'php_builtin': ('magenta', 'default', 'bold'),
|
'php_builtin': ('magenta', 'default', 'bold'),
|
||||||
'php_char': ('green', 'default', 'bold'),
|
'php_char': ('green', 'default', 'bold'),
|
||||||
'php_integer': ('green', 'default', 'bold'),
|
'php_integer': ('green', 'default', 'bold'),
|
||||||
'php_float': ('green', 'default', 'bold'),
|
'php_float': ('green', 'default', 'bold'),
|
||||||
|
}
|
||||||
|
_bindings = {
|
||||||
|
'close-paren': (')',),
|
||||||
|
'close-brace': ('}',),
|
||||||
|
'close-bracket': (']',),
|
||||||
}
|
}
|
||||||
def __init__(self, w):
|
def __init__(self, w):
|
||||||
mode.Fundamental.__init__(self, w)
|
Fundamental.__init__(self, w)
|
||||||
self.add_bindings('close-paren', (')',))
|
self.add_bindings('close-paren', (')',))
|
||||||
self.add_bindings('close-brace', ('}',))
|
self.add_bindings('close-brace', ('}',))
|
||||||
self.add_bindings('close-bracket', (']',))
|
self.add_bindings('close-bracket', (']',))
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import re, string
|
import re
|
||||||
|
from method import Method
|
||||||
import color, method, minibuffer, mode, searchutil
|
from mode import Fundamental
|
||||||
from point import Point
|
import minibuffer, searchutil
|
||||||
|
|
||||||
subgroup_re = re.compile(r'((?:\\\\)*)\\(0|[1-9][0-9]*)')
|
subgroup_re = re.compile(r'((?:\\\\)*)\\(0|[1-9][0-9]*)')
|
||||||
|
|
||||||
class ReplaceOne(method.Method):
|
class ReplaceOne(Method):
|
||||||
'In a replace command, replace the next occurance'
|
'In a replace command, replace the next occurance'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
m = w.buffer.method
|
m = w.buffer.method
|
||||||
|
@ -13,7 +13,7 @@ class ReplaceOne(method.Method):
|
||||||
_find_next(m, False)
|
_find_next(m, False)
|
||||||
_finish(m, w)
|
_finish(m, w)
|
||||||
|
|
||||||
class ReplaceDone(method.Method):
|
class ReplaceDone(Method):
|
||||||
'In a replace command, replace the next occurance and exit'
|
'In a replace command, replace the next occurance and exit'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
m = w.buffer.method
|
m = w.buffer.method
|
||||||
|
@ -21,14 +21,14 @@ class ReplaceDone(method.Method):
|
||||||
_end(w)
|
_end(w)
|
||||||
w.set_error("Replace done")
|
w.set_error("Replace done")
|
||||||
|
|
||||||
class SkipReplace(method.Method):
|
class SkipReplace(Method):
|
||||||
'In a replace command, skip the next occurance'
|
'In a replace command, skip the next occurance'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
m = w.buffer.method
|
m = w.buffer.method
|
||||||
_find_next(m, True)
|
_find_next(m, True)
|
||||||
_finish(m, w)
|
_finish(m, w)
|
||||||
|
|
||||||
class ReplaceAll(method.Method):
|
class ReplaceAll(Method):
|
||||||
'In a replace command, replace all remaining occurances'
|
'In a replace command, replace all remaining occurances'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
m = w.buffer.method
|
m = w.buffer.method
|
||||||
|
@ -38,7 +38,7 @@ class ReplaceAll(method.Method):
|
||||||
_end(w)
|
_end(w)
|
||||||
w.set_error("Replace ended")
|
w.set_error("Replace ended")
|
||||||
|
|
||||||
class CancelReplace(method.Method):
|
class CancelReplace(Method):
|
||||||
'Cancel a currently running replace command'
|
'Cancel a currently running replace command'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
_end(w)
|
_end(w)
|
||||||
|
@ -137,11 +137,11 @@ def _end(w):
|
||||||
w.buffer.method.old_window = None
|
w.buffer.method.old_window = None
|
||||||
assert not w.application.mini_active
|
assert not w.application.mini_active
|
||||||
|
|
||||||
class Replace(mode.Fundamental):
|
class Replace(Fundamental):
|
||||||
name = 'Replace'
|
name = 'Replace'
|
||||||
actions = [ReplaceAll, ReplaceDone, ReplaceOne, SkipReplace, CancelReplace]
|
actions = [ReplaceAll, ReplaceDone, ReplaceOne, SkipReplace, CancelReplace]
|
||||||
def __init__(self, w):
|
def __init__(self, w):
|
||||||
mode.Fundamental.__init__(self, w)
|
Fundamental.__init__(self, w)
|
||||||
|
|
||||||
self.actions = {}
|
self.actions = {}
|
||||||
self.bindings = {}
|
self.bindings = {}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import re, string
|
import re, string
|
||||||
|
import method, minibuffer, searchutil
|
||||||
import color, method, minibuffer, mode, searchutil
|
from method import Method
|
||||||
from point import Point
|
from mode import Fundamental
|
||||||
|
|
||||||
selected_color = 'magenta'
|
selected_color = 'magenta'
|
||||||
unselected_color = 'yellow'
|
unselected_color = 'yellow'
|
||||||
|
@ -19,7 +19,7 @@ def _make_regex(w, s):
|
||||||
except:
|
except:
|
||||||
raise searchutil.IllegalPatternError("failed to compile: %r" % s)
|
raise searchutil.IllegalPatternError("failed to compile: %r" % s)
|
||||||
|
|
||||||
class SearchNext(method.Method):
|
class SearchNext(Method):
|
||||||
'In a search command, move to the next occurance'
|
'In a search command, move to the next occurance'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
w.buffer.method.direction = 'next'
|
w.buffer.method.direction = 'next'
|
||||||
|
@ -34,7 +34,7 @@ class SearchNext(method.Method):
|
||||||
action = InsertSearchString(w.application.last_search)
|
action = InsertSearchString(w.application.last_search)
|
||||||
action.execute(w)
|
action.execute(w)
|
||||||
|
|
||||||
class SearchPrevious(method.Method):
|
class SearchPrevious(Method):
|
||||||
'In a search command, move to the previous occurance'
|
'In a search command, move to the previous occurance'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
w.buffer.method.direction = 'previous'
|
w.buffer.method.direction = 'previous'
|
||||||
|
@ -49,7 +49,7 @@ class SearchPrevious(method.Method):
|
||||||
except searchutil.IllegalPatternError:
|
except searchutil.IllegalPatternError:
|
||||||
w.application.clear_highlighted_ranges('search')
|
w.application.clear_highlighted_ranges('search')
|
||||||
|
|
||||||
class EndSearch(method.Method):
|
class EndSearch(Method):
|
||||||
'End the current search command, leaving the cursor in place'
|
'End the current search command, leaving the cursor in place'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
old_w = w.buffer.method.old_window
|
old_w = w.buffer.method.old_window
|
||||||
|
@ -58,18 +58,18 @@ class EndSearch(method.Method):
|
||||||
old_w.set_mark_point(old_c)
|
old_w.set_mark_point(old_c)
|
||||||
w.set_error("Mark set to search start")
|
w.set_error("Mark set to search start")
|
||||||
|
|
||||||
class CancelSearch(method.Method):
|
class CancelSearch(Method):
|
||||||
'End the current search command, restoring the cursor to the search start'
|
'End the current search command, restoring the cursor to the search start'
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
w.buffer.method.old_window.goto(w.buffer.method.old_cursor)
|
w.buffer.method.old_window.goto(w.buffer.method.old_cursor)
|
||||||
_end(w)
|
_end(w)
|
||||||
w.set_error("Search cancelled")
|
w.set_error("Search cancelled")
|
||||||
|
|
||||||
class SearchDeleteLeft(method.Method):
|
class SearchDeleteLeft(Method):
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
w.left_delete()
|
w.left_delete()
|
||||||
_post_delete(w)
|
_post_delete(w)
|
||||||
class SearchDeleteLeftWord(method.Method):
|
class SearchDeleteLeftWord(Method):
|
||||||
def execute(self, w, **vargs):
|
def execute(self, w, **vargs):
|
||||||
w.delete_left_word()
|
w.delete_left_word()
|
||||||
_post_delete(w)
|
_post_delete(w)
|
||||||
|
@ -91,7 +91,7 @@ def _post_delete(w):
|
||||||
except searchutil.IllegalPatternError:
|
except searchutil.IllegalPatternError:
|
||||||
w.application.clear_highlighted_ranges('search')
|
w.application.clear_highlighted_ranges('search')
|
||||||
|
|
||||||
class InsertSearchString(method.Method):
|
class InsertSearchString(Method):
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
self.name = 'insert-search-string-%s' % (s)
|
self.name = 'insert-search-string-%s' % (s)
|
||||||
self.string = s
|
self.string = s
|
||||||
|
@ -119,12 +119,12 @@ def _end(w):
|
||||||
w.application.clear_highlighted_ranges('search')
|
w.application.clear_highlighted_ranges('search')
|
||||||
w.application.last_search = w.buffer.make_string()
|
w.application.last_search = w.buffer.make_string()
|
||||||
|
|
||||||
class Search(mode.Fundamental):
|
class Search(Fundamental):
|
||||||
name = 'Search'
|
name = 'Search'
|
||||||
actions = [SearchNext, SearchPrevious, EndSearch, CancelSearch,
|
actions = [SearchNext, SearchPrevious, EndSearch, CancelSearch,
|
||||||
SearchDeleteLeft, SearchDeleteLeftWord]
|
SearchDeleteLeft, SearchDeleteLeftWord]
|
||||||
def __init__(self, w):
|
def __init__(self, w):
|
||||||
mode.Fundamental.__init__(self, w)
|
Fundamental.__init__(self, w)
|
||||||
|
|
||||||
# clear out all the defaults that we don't want/need, and add ours
|
# clear out all the defaults that we don't want/need, and add ours
|
||||||
self.actions = {}
|
self.actions = {}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import commands
|
import commands
|
||||||
import color, mode, tab
|
from tab import StackTabber
|
||||||
|
from mode import Fundamental
|
||||||
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule, OverridePatternRule
|
from lex import Grammar, PatternRule, RegionRule, PatternMatchRule, OverridePatternRule
|
||||||
from method import Method
|
from method import Method
|
||||||
|
|
||||||
|
@ -129,7 +130,7 @@ ShGrammar.rules = [
|
||||||
PatternRule(r'eol', r'\n$'),
|
PatternRule(r'eol', r'\n$'),
|
||||||
]
|
]
|
||||||
|
|
||||||
class ShTabber(tab.StackTabber):
|
class ShTabber(StackTabber):
|
||||||
def is_base(self, y):
|
def is_base(self, y):
|
||||||
if y == 0:
|
if y == 0:
|
||||||
return True
|
return True
|
||||||
|
@ -144,7 +145,7 @@ class ShTabber(tab.StackTabber):
|
||||||
# we have to ignore ) when used in "case" statements.
|
# we have to ignore ) when used in "case" statements.
|
||||||
return currlvl
|
return currlvl
|
||||||
else:
|
else:
|
||||||
return tab.StackTabber._handle_close_token(self, currlvl, y, i)
|
return StackTabber._handle_close_token(self, currlvl, y, i)
|
||||||
def _handle_other_token(self, currlvl, y, i):
|
def _handle_other_token(self, currlvl, y, i):
|
||||||
w = self.mode.tabwidth
|
w = self.mode.tabwidth
|
||||||
token = self.get_token(y, i)
|
token = self.get_token(y, i)
|
||||||
|
@ -169,7 +170,7 @@ class ShCheckSyntax(Method):
|
||||||
else:
|
else:
|
||||||
app.data_buffer("*Sh-Check-Syntax*", output)
|
app.data_buffer("*Sh-Check-Syntax*", output)
|
||||||
|
|
||||||
class Sh(mode.Fundamental):
|
class Sh(Fundamental):
|
||||||
name = 'sh'
|
name = 'sh'
|
||||||
paths = ['/etc/profile']
|
paths = ['/etc/profile']
|
||||||
basenames = ['.bashrc', '.bash_profile', '.profile']
|
basenames = ['.bashrc', '.bash_profile', '.profile']
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import color, mode
|
from mode import Fundamental
|
||||||
from lex import Grammar, PatternRule, RegionRule
|
from lex import Grammar, PatternRule, RegionRule
|
||||||
from mode.sh import ShGrammar
|
from mode.sh import ShGrammar
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ class ShellGrammar(Grammar):
|
||||||
PatternRule(r'shell_input', r'^>>>.*$'),
|
PatternRule(r'shell_input', r'^>>>.*$'),
|
||||||
PatternRule(r'shell_mesg', r'^===.*$'),
|
PatternRule(r'shell_mesg', r'^===.*$'),
|
||||||
]
|
]
|
||||||
class Shell(mode.Fundamental):
|
class Shell(Fundamental):
|
||||||
name = 'Shell'
|
name = 'Shell'
|
||||||
grammar = ShellGrammar()
|
grammar = ShellGrammar()
|
||||||
colors = {
|
colors = {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import mode, tab
|
from tab import StackTabber
|
||||||
|
from mode import Fundamental
|
||||||
from lex import Grammar, PatternRule, NocasePatternRule, RegionRule, NocaseRegionRule, OverridePatternRule
|
from lex import Grammar, PatternRule, NocasePatternRule, RegionRule, NocaseRegionRule, OverridePatternRule
|
||||||
from mode.python import StringGrammar1, StringGrammar2
|
from mode.python import StringGrammar1, StringGrammar2
|
||||||
|
|
||||||
|
@ -81,7 +82,7 @@ PlPgSqlGrammar2.rules = [
|
||||||
class SqlGrammar(Grammar):
|
class SqlGrammar(Grammar):
|
||||||
rules = base_rules + function_rules + sql_rules + string_rules + end_rules
|
rules = base_rules + function_rules + sql_rules + string_rules + end_rules
|
||||||
|
|
||||||
class SqlTabber(tab.StackTabber):
|
class SqlTabber(StackTabber):
|
||||||
wst = ('spaces', 'null', 'eol', 'function.body.spaces', 'function.body.eol')
|
wst = ('spaces', 'null', 'eol', 'function.body.spaces', 'function.body.eol')
|
||||||
st = ('spaces', 'null', 'function.body.spaces')
|
st = ('spaces', 'null', 'function.body.spaces')
|
||||||
def is_base(self, y):
|
def is_base(self, y):
|
||||||
|
@ -144,7 +145,7 @@ class SqlTabber(tab.StackTabber):
|
||||||
|
|
||||||
return currlvl
|
return currlvl
|
||||||
|
|
||||||
class Sql(mode.Fundamental):
|
class Sql(Fundamental):
|
||||||
name = 'Sql'
|
name = 'Sql'
|
||||||
extensions = ['.sql']
|
extensions = ['.sql']
|
||||||
grammar = SqlGrammar
|
grammar = SqlGrammar
|
||||||
|
|
15
mode/text.py
15
mode/text.py
|
@ -1,4 +1,5 @@
|
||||||
import color, mode, method, ispell
|
import mode, method, ispell
|
||||||
|
from mode import Fundamental
|
||||||
from lex import Token, Rule, PatternRule, RegionRule, Grammar
|
from lex import Token, Rule, PatternRule, RegionRule, Grammar
|
||||||
|
|
||||||
class WordRule(PatternRule):
|
class WordRule(PatternRule):
|
||||||
|
@ -71,7 +72,7 @@ class LearnWord(method.Method):
|
||||||
w.insert_string_at_cursor(' ')
|
w.insert_string_at_cursor(' ')
|
||||||
w.left_delete()
|
w.left_delete()
|
||||||
|
|
||||||
class Text(mode.Fundamental):
|
class Text(Fundamental):
|
||||||
name = 'Text'
|
name = 'Text'
|
||||||
extensions = ['.txt']
|
extensions = ['.txt']
|
||||||
grammar = TextGrammar
|
grammar = TextGrammar
|
||||||
|
@ -79,10 +80,10 @@ class Text(mode.Fundamental):
|
||||||
config = {
|
config = {
|
||||||
'text.margin': 78,
|
'text.margin': 78,
|
||||||
}
|
}
|
||||||
def __init__(self, w):
|
_bindings = {
|
||||||
mode.Fundamental.__init__(self, w)
|
'learn-word': ('C-c l',),
|
||||||
self.add_bindings('learn-word', ('C-c l',))
|
'text-insert-space': ('SPACE',),
|
||||||
self.add_bindings('text-insert-space', ('SPACE',))
|
'text-wrap-paragraph': ('M-q',),
|
||||||
self.add_bindings('text-wrap-paragraph', ('M-q',))
|
}
|
||||||
|
|
||||||
install = Text.install
|
install = Text.install
|
||||||
|
|
Loading…
Reference in New Issue