diff --git a/mode/lisp.py b/mode/lisp.py index 026da79..9b47dfe 100644 --- a/mode/lisp.py +++ b/mode/lisp.py @@ -14,9 +14,20 @@ class LispGrammar(Grammar): ] class LispTabber(tab2.StackTabber): + wsre = regex.whitespace + wst = ('spaces', 'null', 'eol',) + sre = regex.space + st = ('spaces', 'null',) def _handle_open_token(self, currlvl, y, i): - level = self.get_curr_level() + 4 - self._append(self.get_token(y, i).string, level) + token = self.get_token(y, i) + rtoken = self.get_next_right_token(y, i) + if rtoken is not None and rtoken.string != '(': + rtoken = self.get_next_right_token(y, i + 1) + if rtoken is None: + level = self.get_curr_level() + 4 + else: + level = rtoken.x + self._append(token.string, level) return currlvl class Lisp(mode2.Fundamental): diff --git a/mode/scheme.py b/mode/scheme.py index 49453b3..e007ecf 100644 --- a/mode/scheme.py +++ b/mode/scheme.py @@ -14,7 +14,7 @@ class SchemeGrammar(Grammar): PatternRule(r'abbrev', r"'|`|,\@|,"), # from r5rs - PatternRule(r'keyword', r'(?:=>|unquote-splicing|unquote|syntax-rules|set!|quote|quasiquote|or|map|loop|letrec-syntax|letrec|let-syntax|let\*|let|lambda|if|for-each|else|dynamic-wind|do|delay|define-syntax|define-macro|define|cond|case|call-with-output-file|call-with-input-file|call-with-current-continuation|begin|and)(?![^ )])'), + PatternRule(r'keyword', r'(?:=>|unquote-splicing|unquote|syntax-rules|set!|quote|quasiquote|or|map|loop|letrec-syntax|letrec|let-syntax|let\*|let|lambda|if|for-each|else|dynamic-wind|do|delay|define-syntax|define-macro|define|cond|case|call-with-output-file|call-with-input-file|call-with-current-continuation|begin|and)(?![^\n )])'), PatternRule(r'boolean', r'#[tf]'), PatternRule(r'char', r'#\\space|#\\newline|#\\.'),