2007-07-21 11:40:53 -04:00
|
|
|
import re, sets, string, sys
|
2008-04-25 10:20:40 -04:00
|
|
|
import color, commands, completer, default, method, mode, regex, tab
|
2007-10-21 20:50:11 -04:00
|
|
|
from point import Point
|
2007-10-21 20:52:48 -04:00
|
|
|
from lex import Grammar, PatternRule, ContextPatternRule, RegionRule, OverridePatternRule, PatternGroupRule
|
2007-08-23 10:51:05 -04:00
|
|
|
from method import Argument, Method, WrapParagraph
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
class PodGrammar(Grammar):
|
|
|
|
rules = [
|
|
|
|
RegionRule(r'entry', r'(?<=^=head[1-4]) +.*$', Grammar, '^\n$'),
|
|
|
|
RegionRule(r'entry', r'(?<=^=over) +.*$', Grammar, '^\n$'),
|
|
|
|
RegionRule(r'entry', r'(?<=^=item) +.*$', Grammar, '^\n$'),
|
|
|
|
RegionRule(r'entry', r'(?:(?<=^=begin)|(?<=^=end)) +.*$', Grammar, '^\n$'),
|
|
|
|
RegionRule(r'entry', r'(?<=^=encoding) +.*$', Grammar, '^\n$'),
|
|
|
|
]
|
|
|
|
|
2007-08-17 00:06:26 -04:00
|
|
|
def _make_string_rules(forbidden=None):
|
|
|
|
if forbidden:
|
|
|
|
rule = PatternRule(r'scalar', r"\$[^A-Za-z0-9 \\%s](?![A-Za-z0-9_])" % forbidden)
|
|
|
|
else:
|
|
|
|
rule = ContextPatternRule(r'scalar', r"\$[^A-Za-z0-9 %(delim)s](?![A-Za-z0-9_])", r"\$[^A-Za-z0-9 ](?![A-Za-z0-9_])")
|
2007-08-17 00:11:26 -04:00
|
|
|
|
2007-07-21 11:40:53 -04:00
|
|
|
rules = [
|
|
|
|
PatternRule(r'octal', r'\\[0-7]{3}'),
|
|
|
|
PatternRule(r'escaped', r'\\.'),
|
2007-09-17 16:02:21 -04:00
|
|
|
PatternRule(r'deref', r"\$+[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?:(?:->)?{\$?(?:[a-zA-Z_][a-zA-Z_0-9]*|'(?:\\.|[^'\\])*'|\"(\\.|[^\\\"])*\")}|(?:->)?\[\$?[0-9a-zA-Z_]+\])+"),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'length', r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
|
2007-08-17 00:06:26 -04:00
|
|
|
rule,
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'scalar', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
|
|
|
|
PatternRule(r'cast', r"[\$\@\%\&]{.*?}"),
|
|
|
|
PatternRule(r'array', r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
|
|
|
|
]
|
2007-08-17 00:06:26 -04:00
|
|
|
return rules
|
|
|
|
|
|
|
|
class StringGrammar(Grammar):
|
|
|
|
rules = _make_string_rules()
|
|
|
|
|
|
|
|
class QuotedGrammar1(Grammar):
|
|
|
|
rules = _make_string_rules(')')
|
|
|
|
class QuotedGrammar2(Grammar):
|
|
|
|
rules = _make_string_rules('}')
|
|
|
|
class QuotedGrammar3(Grammar):
|
|
|
|
rules = _make_string_rules('>')
|
|
|
|
class QuotedGrammar4(Grammar):
|
|
|
|
rules = _make_string_rules(']')
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
class PerlGrammar(Grammar):
|
|
|
|
rules = [
|
2008-04-15 18:55:26 -04:00
|
|
|
RegionRule(r'heredoc', r"<<(?P<heredoc>[a-zA-Z_][a-zA-Z0-9_]+)", None, ';\n', StringGrammar, r'^%(heredoc)s$'),
|
2007-07-21 11:40:53 -04:00
|
|
|
RegionRule(r'heredoc', r'<< *"(?P<heredoc>[a-zA-Z0-9_]+)" *;', StringGrammar, r'^%(heredoc)s$'),
|
|
|
|
RegionRule(r'heredoc', r"<< *'(?P<heredoc>[a-zA-Z0-9_]+)' *;", Grammar, r'^%(heredoc)s$'),
|
|
|
|
RegionRule(r'evaldoc', r"<< *`(?P<heredoc>[a-zA-Z0-9_]+)` *;", StringGrammar, r'^%(heredoc)s$'),
|
|
|
|
|
|
|
|
RegionRule(r'endblock', r"^__END__|__DATA__ *$", Grammar, r''),
|
|
|
|
RegionRule(r'pod', r'^=[a-zA-Z0-9_]+', PodGrammar, r'^=cut'),
|
|
|
|
|
|
|
|
OverridePatternRule(r'comment', r'#@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
|
2007-08-17 01:47:44 -04:00
|
|
|
|
|
|
|
#PatternRule(r'prototype', r'\([\\@$%&*;]+\)'),
|
|
|
|
PatternGroupRule(r'prototype', r'delimiter', r'\(', r'prototype', r'[\[\]\\@$%&*;]+', r'delimiter', '\)'),
|
|
|
|
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'comment', r'#.*$'),
|
2008-03-16 01:23:14 -04:00
|
|
|
RegionRule(r'perl_string', r'"', StringGrammar, r'"'),
|
|
|
|
RegionRule(r'perl_string', r"'", Grammar, r"'"),
|
2007-07-21 11:40:53 -04:00
|
|
|
RegionRule(r'evalstring', r"`", StringGrammar, r"`"),
|
|
|
|
PatternRule(r'number', r'0?\.[0-9]+|[0-9]+(?:\.[0-9]+)?'),
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_keyword', r"(?<!->)(?:STDIN|STDERR|STDOUT|continue|do|else|elsif|eval|foreach|for|if|last|my|next|our|package|require|return|sub|undef|unless|until|use|while)(?![a-zA-Z0-9_])"),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'hash_key', r'(?<={)[A-Za-z0-9_]+(?=})'),
|
|
|
|
PatternRule(r'hash_key', r'[A-Za-z0-9_]+(?= *=>)'),
|
|
|
|
PatternRule(r'length', r"\$#[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
|
|
|
|
PatternRule(r'cast', r'[\$\@\%\^\&](?= *{)'),
|
|
|
|
PatternRule(r'scalar', r"\$[\[\]<>ab/'\"_@\?#\$!%^|&*()](?![A-Za-z0-9_])"),
|
|
|
|
PatternRule(r'array', r"@_"),
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_function', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*(?=-> *\()"),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'scalar', r"\$\$*[A-Za-z0-9_](?:[A-Za-z0-9_]|::)*"),
|
|
|
|
PatternRule(r'array', r"@\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_hash', r"%\$*[A-Za-z_](?:[A-Za-z0-9_]|::)*"),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'deref', r"[@%\$&\*](?={)"),
|
|
|
|
|
|
|
|
# match regexes
|
|
|
|
RegionRule(r'match', r'(?:(?<==~)|(?<=!~)|(?<=\()|(?<=split)) *(?P<delim>/)', StringGrammar, r'/[a-z]*'),
|
|
|
|
RegionRule(r'match', r'm *(?P<delim>[^ #a-zA-Z0-9_])', StringGrammar, r'%(delim)s[a-z]*'),
|
|
|
|
RegionRule(r'match', r'm(?P<delim>#)', StringGrammar, r'#[a-z]*'),
|
|
|
|
|
|
|
|
# replace regexes
|
|
|
|
RegionRule(r'replace', r's *(?P<delim>[^ a-zA-Z0-9_])', StringGrammar, r'%(delim)s', StringGrammar, r'%(delim)s[a-z]*'),
|
|
|
|
RegionRule(r'replace', r's(?P<delim>#)', StringGrammar, r'#', StringGrammar, r'#[a-z]*'),
|
|
|
|
|
|
|
|
# translate operator
|
|
|
|
RegionRule(r'translate', r'(?:y|tr) *(?P<delim>[^ a-zA-Z0-9_])', Grammar, r'%(delim)s', Grammar, r'%(delim)s[a-z]*'),
|
|
|
|
RegionRule(r'translate', r'(?:y|tr)#', Grammar, r'#', Grammar, r'#[a-z]*'),
|
|
|
|
|
|
|
|
# some more basic stuff
|
|
|
|
PatternRule(r'package', r"(?<=package )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
|
|
|
|
PatternRule(r'sub', r"(?<=sub )[a-zA-Z_][a-zA-Z_0-9]*"),
|
|
|
|
PatternRule(r'use', r"(?<=use )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
|
|
|
|
PatternRule(r'require', r"(?<=require )(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_label', r'[a-zA-Z_][a-zA-Z0-9_]*:(?!:)'),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'method', r"(?<=->)[a-zA-Z_][a-zA-Z_0-9]*"),
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_function', r"&\$*(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*"),
|
|
|
|
PatternRule(r'perl_builtin', r"(?<!->)&?(?:write|warn|wantarray|waitpid|wait|vec|values|utime|use|untie|unshift|unpack|unlink|undef|umask|ucfirst|uc|truncate|times|time|tied|tie|telldir|tell|syswrite|system|sysseek|sysread|sysopen|syscall|symlink|substr|sub|study|stat|srand|sqrt|sprintf|split|splice|sort|socketpair|socket|sleep|sin|shutdown|shmwrite|shmread|shmget|shmctl|shift|setsockopt|setservent|setpwent|setprotoent|setpriority|setpgrp|setnetent|sethostent|setgrent|send|semop|semget|semctl|select|seekdir|seek|scalar|rmdir|rindex|rewinddir|reverse|return|reset|require|rename|ref|redo|recv|readpipe|readlink|readline|readdir|read|rand|quotemeta|push|prototype|printf|print|pos|pop|pipe|package|pack|our|ord|opendir|open|oct|no|next|my|msgsnd|msgrcv|msgget|msgctl|mkdir|map|lstat|log|lock|localtime|local|listen|link|length|lcfirst|lc|last|kill|keys|join|ioctl|int|index|import|hex|grep|goto|gmtime|glob|getsockopt|getsockname|getservent|getservbyport|getservbyname|getpwuid|getpwnam|getpwent|getprotoent|getprotobynumber|getprotobyname|getpriority|getppid|getpgrp|getpeername|getnetent|getnetbyname|getnetbyaddr|getlogin|gethostent|gethostbyname|gethostbyaddr|getgrnam|getgrgid|getgrent|getc|formline|format|fork|flock|fileno|fcntl|exp|exit|exists|exec|eval|eof|endservent|endpwent|endprotoent|endnetent|endhostent|endgrent|each|dump|do|die|delete|defined|dbmopen|dbmclose|crypt|cos|continue|connect|closedir|close|chroot|chr|chown|chop|chomp|chmod|chdir|caller|bless|binmode|bind|atan2|alarm|accept|abs)(?![a-zA-Z0-9_])"),
|
2007-07-21 11:40:53 -04:00
|
|
|
|
2007-08-17 00:06:26 -04:00
|
|
|
# quote operator: qq(), qx() and qr() usually interpolate
|
|
|
|
RegionRule(r'quoted', r'q[rqx] *(?P<delim>\()', QuotedGrammar1, r'\)'),
|
|
|
|
RegionRule(r'quoted', r'q[rqx] *(?P<delim>{)', QuotedGrammar2, r'}'),
|
|
|
|
RegionRule(r'quoted', r'q[rqx] *(?P<delim><)', QuotedGrammar3, r'>'),
|
|
|
|
RegionRule(r'quoted', r'q[rqx] *(?P<delim>\[)', QuotedGrammar4, r'\]'),
|
|
|
|
RegionRule(r'quoted', r"q[rqx] *(?P<delim>')", Grammar, r"'"),
|
|
|
|
RegionRule(r'quoted', r'q[rqx] *(?P<delim>[^ #])', StringGrammar, r'%(delim)s'),
|
|
|
|
RegionRule(r'quoted', r'q[rqx](?P<delim>#)', StringGrammar, r'#'),
|
|
|
|
|
|
|
|
# quote operator: q() and qw() do not interpolate
|
|
|
|
RegionRule(r'quoted', r'qw? *\(', Grammar, r'\)'),
|
|
|
|
RegionRule(r'quoted', r'qw? *{', Grammar, r'}'),
|
|
|
|
RegionRule(r'quoted', r'qw? *<', Grammar, r'>'),
|
|
|
|
RegionRule(r'quoted', r'qw? *\[', Grammar, r'\]'),
|
|
|
|
RegionRule(r'quoted', r'qw?#', Grammar, r'#'),
|
|
|
|
RegionRule(r'quoted', r'qw? *(?P<delim>[^ #])', Grammar, r'%(delim)s'),
|
2007-07-21 11:40:53 -04:00
|
|
|
|
2008-03-16 01:23:14 -04:00
|
|
|
PatternRule(r'perl_function', r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?= *\()"),
|
|
|
|
PatternRule(r'perl_class', r"(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*(?=->)"),
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
# some basic stuff
|
2007-08-24 03:10:02 -04:00
|
|
|
PatternRule(r'delimiter', r"->|=>|(?<!:):(?!=:)|[,;=\?(){}\[\]\(\)]"),
|
2007-07-21 11:40:53 -04:00
|
|
|
PatternRule(r'operator', r"\+=|-=|\*=|/=|//=|%=|&=\|\^=|>>=|<<=|\*\*="),
|
|
|
|
PatternRule(r'operator', r"\+\+|\+|<=>|<>|<<|<=|<|-|>>|>=|>|\*\*|\*|&&|&|\|\||\||/|\^|==|//|~|=~|!~|!=|%|!|\.|x(?![a-zA-Z_])"),
|
|
|
|
PatternRule(r'noperator', r"(?:xor|or|not|ne|lt|le|gt|ge|eq|cmp|and)(?![a-zA-Z_])"),
|
|
|
|
PatternRule(r'bareword', r'(?:[a-zA-Z_][a-zA-Z_0-9]*::)*[a-zA-Z_][a-zA-Z_0-9]*'),
|
|
|
|
|
|
|
|
PatternRule(r"eol", r"\n$"),
|
|
|
|
]
|
|
|
|
|
2007-10-21 20:55:29 -04:00
|
|
|
class PerlTabber(tab.StackTabber):
|
2007-07-21 11:40:53 -04:00
|
|
|
def is_base(self, y):
|
|
|
|
if y == 0:
|
|
|
|
return True
|
|
|
|
highlighter = self.mode.window.buffer.highlights[self.mode.name()]
|
|
|
|
if not highlighter.tokens[y]:
|
|
|
|
return False
|
|
|
|
t = highlighter.tokens[y][0]
|
2008-03-16 01:23:14 -04:00
|
|
|
return t.name == 'perl_keyword' and t.string == 'sub'
|
2007-07-21 11:40:53 -04:00
|
|
|
def _handle_open_token(self, currlvl, y, i):
|
2007-10-21 20:55:29 -04:00
|
|
|
currlvl = tab.StackTabber._handle_open_token(self, currlvl, y, i)
|
2007-07-21 11:40:53 -04:00
|
|
|
return currlvl
|
|
|
|
def _handle_close_token(self, currlvl, y, i):
|
2008-04-02 19:06:52 -04:00
|
|
|
w = self.mode.tabwidth
|
2007-07-21 11:40:53 -04:00
|
|
|
self._opt_pop('cont')
|
2007-10-21 20:55:29 -04:00
|
|
|
currlvl = tab.StackTabber._handle_close_token(self, currlvl, y, i)
|
2007-07-21 11:40:53 -04:00
|
|
|
token = self.get_token(y, i)
|
|
|
|
if self.is_rightmost_token(y, i):
|
|
|
|
if token.string == '}':
|
|
|
|
self._opt_pop('cont')
|
|
|
|
else:
|
2008-04-02 19:06:52 -04:00
|
|
|
self._opt_append('cont', currlvl + w)
|
2007-07-21 11:40:53 -04:00
|
|
|
return currlvl
|
|
|
|
def _handle_other_token(self, currlvl, y, i):
|
2008-04-02 19:06:52 -04:00
|
|
|
w = self.mode.tabwidth
|
2007-07-21 11:40:53 -04:00
|
|
|
token = self.get_token(y, i)
|
|
|
|
fqname = token.fqname()
|
|
|
|
if fqname == 'delimiter' and token.string == ';':
|
|
|
|
self._opt_pop('cont')
|
|
|
|
elif fqname == 'heredoc.start':
|
|
|
|
self._opt_append('heredoc', None)
|
|
|
|
elif fqname == 'heredoc.end':
|
|
|
|
self._opt_pop('heredoc')
|
|
|
|
self._opt_pop('cont')
|
2007-07-25 17:09:44 -04:00
|
|
|
elif fqname == 'quoted.start':
|
2008-04-02 19:06:52 -04:00
|
|
|
self._opt_append('quoted', currlvl + w)
|
2007-07-25 17:09:44 -04:00
|
|
|
elif fqname == 'quoted.end':
|
|
|
|
self._opt_pop('cont')
|
|
|
|
self._opt_pop('quoted')
|
2007-07-21 11:40:53 -04:00
|
|
|
elif fqname == 'evaldoc.start':
|
|
|
|
self._opt_append('evaldoc', None)
|
|
|
|
elif fqname == 'evaldoc.end':
|
|
|
|
self._opt_pop('evaldoc')
|
|
|
|
self._opt_pop('cont')
|
|
|
|
elif fqname == 'pod.start':
|
|
|
|
self._opt_append('pod', None)
|
|
|
|
elif fqname == 'pod.end':
|
|
|
|
self._opt_pop('pod')
|
|
|
|
currlvl = 0
|
2008-03-16 01:23:14 -04:00
|
|
|
elif fqname == 'perl_string.start':
|
2007-07-21 11:40:53 -04:00
|
|
|
self._opt_append('string', None)
|
2008-03-16 01:23:14 -04:00
|
|
|
elif fqname == 'perl_string.end':
|
2007-07-21 11:40:53 -04:00
|
|
|
self._opt_pop('string')
|
|
|
|
if self.is_rightmost_token(y, i):
|
2008-04-02 19:06:52 -04:00
|
|
|
self._opt_append('cont', currlvl + w)
|
2007-07-21 11:40:53 -04:00
|
|
|
if self.is_rightmost_token(y, i):
|
|
|
|
if(not fqname.startswith('pod') and
|
|
|
|
not fqname.startswith('heredoc') and
|
2008-03-16 01:23:14 -04:00
|
|
|
not fqname.startswith('perl_string') and
|
2007-07-21 11:40:53 -04:00
|
|
|
not fqname.startswith('endblock') and
|
|
|
|
not fqname == 'eol' and
|
|
|
|
not fqname == 'comment' and
|
|
|
|
not fqname == 'null' and
|
|
|
|
token.string not in ('}', ';', '(', '{', '[', ',')):
|
2008-04-02 19:06:52 -04:00
|
|
|
self._opt_append('cont', currlvl + w)
|
2007-07-21 11:40:53 -04:00
|
|
|
return currlvl
|
|
|
|
|
|
|
|
class PerlSetLib(Method):
|
|
|
|
'''Set the path(s) to find perl modules'''
|
|
|
|
args = [Argument("lib", type=type(""), prompt="Location of lib: ",
|
|
|
|
default=default.build_constant("."))]
|
|
|
|
def _execute(self, w, **vargs):
|
2008-05-06 15:12:43 -04:00
|
|
|
w.application.config['perl.lib'] = vargs['lib']
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
class PerlCheckSyntax(Method):
|
|
|
|
'''Check the syntax of a perl file'''
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
app = w.application
|
2008-03-21 01:40:05 -04:00
|
|
|
perllib = w.application.config.get('perl.lib')
|
|
|
|
if perllib:
|
|
|
|
cmd = "perl -c -I '%s' '%s'" % (perllib, w.buffer.path)
|
|
|
|
else:
|
|
|
|
cmd = "perl -c '%s'" % (w.buffer.path)
|
2007-07-21 11:40:53 -04:00
|
|
|
(status, output) = commands.getstatusoutput(cmd)
|
|
|
|
if status == 0:
|
|
|
|
app.set_error("Syntax OK")
|
|
|
|
app.data_buffer("*Perl-Check-Syntax*", output, switch_to=False)
|
|
|
|
else:
|
|
|
|
app.data_buffer("*Perl-Check-Syntax*", output)
|
|
|
|
|
|
|
|
class PerlViewModulePerldoc(Method):
|
|
|
|
'''View documentation about this file using perldoc'''
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
cmd = "perldoc -t -T '%s'" % w.buffer.path
|
|
|
|
(status, output) = commands.getstatusoutput(cmd)
|
|
|
|
w.application.data_buffer("*Perldoc*", output, switch_to=True)
|
|
|
|
|
|
|
|
class PerlViewWordPerldoc(Method):
|
|
|
|
'''View documentation about a package or function using perldoc'''
|
|
|
|
def _try(self, w, word, asfunc=False):
|
|
|
|
if asfunc:
|
2008-03-21 01:40:05 -04:00
|
|
|
cmd = "perldoc -t -T -f '%s'" % (word,)
|
2007-07-21 11:40:53 -04:00
|
|
|
else:
|
2008-03-21 01:40:05 -04:00
|
|
|
cmd = "perldoc -t -T '%s'" % (word,)
|
|
|
|
perllib = w.application.config.get('perl.lib')
|
|
|
|
if perllib:
|
2008-05-06 15:12:43 -04:00
|
|
|
cmd = 'PERL5LIB=%r %s' % (perllib, cmd)
|
2007-07-21 11:40:53 -04:00
|
|
|
(status, data) = commands.getstatusoutput(cmd)
|
|
|
|
if status == 0:
|
|
|
|
return data
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
def _show(self, w, data, word):
|
|
|
|
w.application.data_buffer("*Perldoc*", data, switch_to=True)
|
|
|
|
w.application.set_error('displaying documentation for %r' % word)
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
token = w.get_token()
|
|
|
|
#word = w.get_word(wl=string.letters + string.digits + '_:')
|
|
|
|
word = token.string
|
|
|
|
|
|
|
|
# make sure that the name is (mostly) valid
|
|
|
|
if word is None:
|
|
|
|
w.application.set_error('no word selected')
|
|
|
|
return
|
|
|
|
elif ':' in word and '::' not in word:
|
|
|
|
w.application.set_error('invalid word: %r' % word)
|
|
|
|
return
|
|
|
|
|
2008-05-06 16:02:03 -04:00
|
|
|
# first try it is a package, unless it's a builtin
|
|
|
|
if not token.name == "perl_builtin":
|
|
|
|
parts = word.split('::')
|
|
|
|
while len(parts) > 0:
|
|
|
|
newword = '::'.join(parts)
|
|
|
|
data = self._try(w, newword, asfunc=False)
|
|
|
|
if data:
|
|
|
|
self._show(w, data, newword)
|
|
|
|
return
|
|
|
|
parts.pop(-1)
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
# then try it as a function
|
|
|
|
data = self._try(w, word, asfunc=True)
|
|
|
|
if data:
|
|
|
|
self._show(w, data, word)
|
|
|
|
else:
|
|
|
|
w.application.set_error('nothing found for %r' % word)
|
|
|
|
|
2008-03-20 15:37:55 -04:00
|
|
|
class PerlInitFunctions(Method):
|
|
|
|
'''Jump to a function defined in this module'''
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
w.mode.build_function_map()
|
|
|
|
w.application.set_error("Initialized function map")
|
|
|
|
|
2007-07-21 11:40:53 -04:00
|
|
|
class PerlGotoFunction(Method):
|
|
|
|
'''Jump to a function defined in this module'''
|
|
|
|
args = [Argument("name", type(""), "perlfunction", "Goto Function: ")]
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
name = vargs['name']
|
|
|
|
functions = w.mode.get_functions()
|
|
|
|
if name in functions:
|
|
|
|
w.goto(Point(0, functions[name]))
|
|
|
|
else:
|
|
|
|
w.application.set_error("Function %r was not found" % name)
|
|
|
|
|
|
|
|
class PerlListFunctions(Method):
|
|
|
|
'''Show the user all functions defined in this module'''
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
names = w.mode.get_function_names()
|
|
|
|
output = "\n".join(names) + "\n"
|
|
|
|
w.application.data_buffer("*Perl-List-Functions*", output, switch_to=True)
|
|
|
|
|
|
|
|
class PerlWhichFunction(Method):
|
|
|
|
'''Show the user what function they are in'''
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
cursor = w.logical_cursor()
|
2008-05-08 02:57:23 -04:00
|
|
|
name = w.mode.get_line_function(cursor.y)
|
2007-07-21 11:40:53 -04:00
|
|
|
if name is None:
|
|
|
|
w.application.set_error("None");
|
|
|
|
else:
|
2008-05-08 02:57:23 -04:00
|
|
|
functions = w.mode.get_functions()
|
|
|
|
i = functions[name] + 1
|
2007-07-21 11:40:53 -04:00
|
|
|
w.application.set_error("line %d: %s" % (i, name))
|
|
|
|
|
|
|
|
class PerlHashCleanup(Method):
|
|
|
|
'''Correctly align assignment blocks and literal hashes'''
|
2007-08-21 09:23:45 -04:00
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
cursor = w.logical_cursor()
|
|
|
|
b = w.buffer
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
# so this is where we will store the groups that we find
|
|
|
|
groups_by_line = {}
|
|
|
|
|
|
|
|
# the regex we will try
|
|
|
|
regexes = [regex.perl_hash_cleanup,
|
|
|
|
regex.perl_assign_cleanup]
|
|
|
|
|
|
|
|
# if we aren't in a hash, inform the user and exit
|
|
|
|
line = b.lines[cursor.y]
|
|
|
|
myregex = None
|
|
|
|
for r in regexes:
|
|
|
|
if r.match(line):
|
|
|
|
myregex = r
|
|
|
|
|
|
|
|
if myregex is None:
|
|
|
|
raise Exception, "Not a perl hash line"
|
|
|
|
|
|
|
|
groups_by_line[cursor.y] = myregex.match(line).groups()
|
|
|
|
|
|
|
|
# find the beginning of this hash block
|
|
|
|
start = 0
|
|
|
|
i = cursor.y - 1
|
|
|
|
while i >= 0:
|
|
|
|
line = b.lines[i]
|
|
|
|
m = myregex.match(line)
|
|
|
|
if not m:
|
|
|
|
start = i + 1
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
groups_by_line[i] = m.groups()
|
|
|
|
i -= 1
|
|
|
|
|
|
|
|
# find the end of this hash block
|
|
|
|
end = len(b.lines) - 1
|
|
|
|
i = cursor.y + 1
|
|
|
|
while i < len(b.lines):
|
|
|
|
line = b.lines[i]
|
|
|
|
m = myregex.match(line)
|
|
|
|
if not m:
|
|
|
|
end = i - 1
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
groups_by_line[i] = m.groups()
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# assume that the least indented line is correct
|
|
|
|
indent_w = min([len(groups_by_line[k][0]) for k in groups_by_line])
|
|
|
|
|
|
|
|
# find the longest hash key to base all the other padding on
|
|
|
|
key_w = max([len(groups_by_line[k][1]) for k in groups_by_line])
|
|
|
|
|
|
|
|
# for each line, format it correctly
|
|
|
|
keys = groups_by_line.keys()
|
|
|
|
keys.sort()
|
|
|
|
data = ''
|
|
|
|
for i in keys:
|
|
|
|
indent_pad = ' ' * indent_w
|
|
|
|
key = groups_by_line[i][1]
|
|
|
|
sep = groups_by_line[i][3]
|
|
|
|
value = groups_by_line[i][5]
|
|
|
|
key_pad = ' ' * (key_w - len(key))
|
|
|
|
data += indent_pad + key + key_pad + ' ' + sep + ' ' + value + '\n'
|
|
|
|
|
|
|
|
# remove the old text and add the new
|
|
|
|
start_p = Point(0, start)
|
2007-08-21 09:23:45 -04:00
|
|
|
if end < len(w.buffer.lines) - 1:
|
|
|
|
end_p = Point(0, end + 1)
|
|
|
|
else:
|
|
|
|
end_p = Point(len(w.buffer.lines[end]), end)
|
|
|
|
w.kill(start_p, end_p)
|
|
|
|
w.insert_string(start_p, data)
|
2007-07-21 11:40:53 -04:00
|
|
|
|
2007-08-24 03:10:02 -04:00
|
|
|
class PerlHashCleanup2(Method):
|
|
|
|
#_hash_parts = (
|
|
|
|
# (TokenMatch('null', None),),
|
|
|
|
# (TokenMatch('hash_key', None), TokenMatch('string.start', None)),
|
|
|
|
# (TokenMatch('null', None),),
|
|
|
|
# (TokenMatch('delimiter', '=>'),),
|
|
|
|
# (TokenMatch('null', None),),
|
|
|
|
#)
|
|
|
|
def _hash_matchXXX(self, group, line):
|
|
|
|
i = 0
|
|
|
|
j = 0
|
|
|
|
stages = []
|
|
|
|
while tok_i < len(group):
|
|
|
|
token = group[i]
|
|
|
|
name = token.fqname()
|
|
|
|
data = token.string
|
|
|
|
k = len(stages)
|
|
|
|
if k < len(self._hash_parts):
|
|
|
|
for (name2, data2) in self._hash_parts[k]:
|
|
|
|
if (name2 is None or name == name2 and
|
|
|
|
data2 is None or data == data2):
|
|
|
|
stages.append(line[j:token.x])
|
|
|
|
j = token.x
|
|
|
|
else:
|
|
|
|
stages.append(line[j:])
|
|
|
|
return stages
|
|
|
|
i += 1
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _assign_match(self, group):
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
cursor = w.logical_cursor()
|
|
|
|
tokens = w.buffer.highlights[w.mode.name()].tokens
|
|
|
|
if self._hash_match(tokens[cursor.y]):
|
|
|
|
token_groups = self._parse_hash(w, **vargs)
|
|
|
|
elif self._assign_match(tokens[cursor.y]):
|
|
|
|
token_groups = self._parse_assign(w, **vargs)
|
|
|
|
else:
|
|
|
|
w.set_error("Not a hash line")
|
|
|
|
return
|
|
|
|
|
|
|
|
ys = token_groups.keys()
|
|
|
|
ys.sort()
|
|
|
|
|
|
|
|
segment_groups = []
|
|
|
|
for y in ys:
|
|
|
|
line = w.buffer.lines[y]
|
|
|
|
segments = []
|
|
|
|
i = 0
|
|
|
|
for token in token_groups[y]:
|
|
|
|
segments.append(line[i:token.x])
|
|
|
|
i = token.x
|
|
|
|
segments.append(line[i:])
|
|
|
|
segment_groups.append(segments)
|
|
|
|
|
|
|
|
output = "Lines %d through %d\n%r" % (ys[0] + 1, ys[-1] + 1, segment_groups)
|
|
|
|
w.application.data_buffer("hash-dump", output, switch_to=True)
|
|
|
|
|
|
|
|
def _parse_hash(self, w, **vargs):
|
|
|
|
cursor = w.logical_cursor()
|
|
|
|
tokens = w.buffer.highlights[w.mode.name()].tokens
|
|
|
|
lines = {cursor.y: self._hash_match(tokens[cursor.y])}
|
|
|
|
|
|
|
|
y1 = cursor.y
|
|
|
|
while y1 > 0:
|
|
|
|
match = self._hash_match(tokens[y1 - 1])
|
|
|
|
if not match:
|
|
|
|
break
|
|
|
|
lines[y1 - 1] = match
|
|
|
|
y1 -= 1
|
|
|
|
|
|
|
|
y2 = cursor.y
|
|
|
|
while y2 < len(tokens) - 1:
|
|
|
|
match = self._hash_match(tokens[y2 + 1])
|
|
|
|
if not match:
|
|
|
|
break
|
|
|
|
lines[y2 + 1] = match
|
|
|
|
y2 += 1
|
|
|
|
|
|
|
|
return lines
|
|
|
|
def _parse_assign(self, w, **vargs):
|
|
|
|
pass
|
|
|
|
|
2008-04-02 20:05:17 -04:00
|
|
|
class PerlWrapParagraph(method.WrapParagraph):
|
2007-07-21 11:40:53 -04:00
|
|
|
'''Wrap Comments and POD'''
|
2007-08-22 12:32:32 -04:00
|
|
|
# enumerations for line types
|
|
|
|
LT_COMMENT = 1
|
|
|
|
LT_POD = 2
|
|
|
|
|
|
|
|
margin = 80
|
|
|
|
comment_re = re.compile('( *)(#+)( *)(.*)')
|
|
|
|
|
2007-07-21 11:40:53 -04:00
|
|
|
def _is_newline(self, t):
|
|
|
|
return t.name == 'eol'
|
|
|
|
def _is_space(self, t):
|
|
|
|
return t.name == 'null' and regex.space.match(t.string)
|
|
|
|
|
|
|
|
def _detect_line_type(self, w, y):
|
2007-08-22 12:32:32 -04:00
|
|
|
h = w.buffer.highlights[w.mode.name()]
|
2007-07-21 11:40:53 -04:00
|
|
|
ltype = None
|
2007-08-22 12:32:32 -04:00
|
|
|
for t in h.tokens[y]:
|
|
|
|
fqname = t.fqname()
|
2007-08-23 10:51:05 -04:00
|
|
|
if fqname == 'null' or fqname == 'eol':
|
2007-07-21 11:40:53 -04:00
|
|
|
pass
|
2007-08-22 12:32:32 -04:00
|
|
|
elif fqname.startswith('comment'):
|
|
|
|
if ltype and ltype != 'comment':
|
|
|
|
ltype = None
|
|
|
|
break
|
|
|
|
ltype = self.LT_COMMENT
|
|
|
|
elif fqname.startswith('pod'):
|
|
|
|
if ltype and ltype != 'pod':
|
|
|
|
ltype = None
|
|
|
|
break
|
|
|
|
ltype = self.LT_POD
|
|
|
|
else:
|
|
|
|
ltype = None
|
|
|
|
break
|
|
|
|
return ltype
|
|
|
|
|
|
|
|
def _fix_comments(self, c, w):
|
|
|
|
h = w.buffer.highlights[w.mode.name()]
|
|
|
|
y1 = c.y
|
|
|
|
y2 = c.y
|
|
|
|
while y2 < len(w.buffer.lines) - 1:
|
|
|
|
if self._detect_line_type(w, y2 + 1):
|
|
|
|
y2 += 1
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
lines = w.buffer.lines[y1:y2 + 1]
|
|
|
|
m = self.comment_re.match(lines[0])
|
|
|
|
assert m
|
|
|
|
prepend = m.group(1) + m.group(2)
|
|
|
|
rmargin = self.margin - len(prepend)
|
|
|
|
dpad = m.group(3)
|
|
|
|
|
|
|
|
segments = []
|
|
|
|
for line in lines:
|
|
|
|
m = self.comment_re.match(line)
|
|
|
|
assert m
|
|
|
|
pad, data = m.group(3), m.group(4)
|
|
|
|
if segments and pad == dpad and segments[-1][0] == dpad and segments[-1][1]:
|
|
|
|
data = segments.pop(-1)[1] + ' ' + data
|
|
|
|
i = 0
|
|
|
|
while len(pad) + len(data[i:]) > rmargin:
|
|
|
|
while data[i] == ' ':
|
|
|
|
i += 1
|
|
|
|
j = rmargin - len(pad)
|
|
|
|
while j >= 0 and data[i + j] != ' ':
|
|
|
|
j -= 1
|
|
|
|
if j < 0:
|
|
|
|
j = rmargin - len(pad)
|
|
|
|
segments.append([pad, data[i:i + j]])
|
|
|
|
i += j
|
|
|
|
if data:
|
|
|
|
while data[i] == ' ':
|
|
|
|
i += 1
|
|
|
|
segments.append([pad, data[i:]])
|
2007-07-21 11:40:53 -04:00
|
|
|
else:
|
2007-08-22 12:32:32 -04:00
|
|
|
segments.append(['', ''])
|
|
|
|
|
|
|
|
lines2 = [prepend + x[0] + x[1] for x in segments]
|
|
|
|
p1 = Point(0, y1)
|
|
|
|
p2 = Point(len(w.buffer.lines[y2]), y2)
|
|
|
|
w.buffer.delete(p1, p2)
|
|
|
|
w.buffer.insert_lines(p1, lines2)
|
|
|
|
w.set_error("wrapped comment lines %d-%d" % (y1 + 1, y2 + 1))
|
|
|
|
|
|
|
|
def _fix_pod(self, c, w):
|
|
|
|
w.set_error("pod wrapping not yet supported")
|
2007-07-21 11:40:53 -04:00
|
|
|
|
|
|
|
def _execute(self, w, **vargs):
|
|
|
|
c = w.logical_cursor()
|
|
|
|
ltype = self._detect_line_type(w, c.y)
|
2007-08-22 12:32:32 -04:00
|
|
|
if ltype == self.LT_COMMENT:
|
|
|
|
self._fix_comments(c, w)
|
|
|
|
elif ltype == self.LT_POD:
|
2007-08-23 10:51:05 -04:00
|
|
|
WrapParagraph._execute(self, w, **vargs)
|
2007-07-21 11:40:53 -04:00
|
|
|
else:
|
|
|
|
w.set_error("did not detect comment or pod lines")
|
2007-10-19 02:41:33 -04:00
|
|
|
|
2008-04-25 10:20:40 -04:00
|
|
|
class PerlFunctionCompleter(completer.Completer):
|
|
|
|
def get_candidates(self, s, w=None):
|
|
|
|
old_window = w.buffer.method.old_window
|
|
|
|
functions = old_window.mode.get_functions()
|
|
|
|
return [n for n in functions if n.startswith(s)]
|
|
|
|
|
2008-04-18 23:32:08 -04:00
|
|
|
class Perl(mode.Fundamental):
|
|
|
|
modename = 'Perl'
|
|
|
|
extensions = ['.pl', '.pm']
|
|
|
|
detection = ['perl']
|
|
|
|
tabbercls = PerlTabber
|
|
|
|
grammar = PerlGrammar
|
|
|
|
opentokens = ('delimiter',)
|
|
|
|
opentags = {'(': ')', '[': ']', '{': '}'}
|
|
|
|
closetokens = ('delimiter',)
|
|
|
|
closetags = {')': '(', ']': '[', '}': '{'}
|
|
|
|
colors = {
|
|
|
|
# comments
|
2008-05-03 13:31:30 -04:00
|
|
|
'endblock.start': ('red', 'default', 'bold'),
|
|
|
|
'endblock.null': ('red', 'default', 'bold'),
|
|
|
|
'endblock.end': ('red', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# pod
|
2008-05-03 13:31:30 -04:00
|
|
|
'pod.start': ('red', 'default', 'bold'),
|
|
|
|
'pod.null': ('red', 'default', 'bold'),
|
|
|
|
'pod.entry.start': ('magenta', 'default', 'bold'),
|
|
|
|
'pod.entry.null': ('magenta', 'default', 'bold'),
|
|
|
|
'pod.entry.end': ('magenta', 'default', 'bold'),
|
|
|
|
'pod.end': ('red', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# basic stuff
|
2008-05-03 13:31:30 -04:00
|
|
|
'escaped': ('magenta', 'default', 'bold'),
|
|
|
|
'null': ('default', 'default', 'bold'),
|
|
|
|
'sub': ('cyan', 'default', 'bold'),
|
|
|
|
'prototype': ('magenta', 'default', 'bold'),
|
|
|
|
'operator': ('default', 'default', 'bold'),
|
|
|
|
'noperator': ('magenta', 'default', 'bold'),
|
|
|
|
'endblock': ('red', 'default', 'bold'),
|
|
|
|
'perl_keyword': ('magenta', 'default', 'bold'),
|
|
|
|
'cast': ('yellow', 'default', 'bold'),
|
|
|
|
'scalar': ('yellow', 'default', 'bold'),
|
|
|
|
'array': ('yellow', 'default', 'bold'),
|
|
|
|
'deref': ('yellow', 'default', 'bold'),
|
|
|
|
'perl_hash': ('yellow', 'default', 'bold'),
|
|
|
|
'hash_key': ('green', 'default', 'bold'),
|
|
|
|
'perl_function': ('cyan', 'default', 'bold'),
|
|
|
|
'perl_builtin': ('magenta', 'default', 'bold'),
|
|
|
|
'perl_label': ('cyan', 'default', 'bold'),
|
|
|
|
'package': ('cyan', 'default', 'bold'),
|
|
|
|
'perl_class': ('cyan', 'default', 'bold'),
|
|
|
|
'use': ('cyan', 'default', 'bold'),
|
|
|
|
'require': ('cyan', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# heredoc/evaldoc
|
2008-05-03 13:31:30 -04:00
|
|
|
'heredoc.start': ('green', 'default', 'bold'),
|
|
|
|
'heredoc.null': ('green', 'default', 'bold'),
|
|
|
|
'heredoc.end': ('green', 'default', 'bold'),
|
|
|
|
'evaldoc.start': ('cyan', 'default', 'bold'),
|
|
|
|
'evaldoc.null': ('cyan', 'default', 'bold'),
|
|
|
|
'evaldoc.end': ('cyan', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# strings
|
2008-05-03 13:31:30 -04:00
|
|
|
'perl_string.start': ('green', 'default', 'bold'),
|
|
|
|
'perl_string.null': ('green', 'default', 'bold'),
|
|
|
|
'perl_string.escaped': ('magenta', 'default', 'bold'),
|
|
|
|
'perl_string.deref': ('yellow', 'default', 'bold'),
|
|
|
|
'perl_string.end': ('green', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# `` strings
|
2008-05-03 13:31:30 -04:00
|
|
|
'evalstring.start': ('cyan', 'default', 'bold'),
|
|
|
|
'evalstring.null': ('cyan', 'default', 'bold'),
|
|
|
|
'evalstring.escaped': ('magenta', 'default', 'bold'),
|
|
|
|
'evalstring.deref': ('yellow', 'default', 'bold'),
|
|
|
|
'evalstring.end': ('cyan', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# quoted region
|
2008-05-03 13:31:30 -04:00
|
|
|
'quoted': ('cyan', 'default', 'bold'),
|
|
|
|
'quoted.start': ('cyan', 'default', 'bold'),
|
|
|
|
'quoted.null': ('cyan', 'default', 'bold'),
|
|
|
|
'quoted.escaped': ('magenta', 'default', 'bold'),
|
|
|
|
'quoted.deref': ('yellow', 'default', 'bold'),
|
|
|
|
'quoted.end': ('cyan', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# match regex
|
2008-05-03 13:31:30 -04:00
|
|
|
'match.start': ('cyan', 'default', 'bold'),
|
|
|
|
'match.end': ('cyan', 'default', 'bold'),
|
|
|
|
'match.null': ('cyan', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# replace regex
|
2008-05-03 13:31:30 -04:00
|
|
|
'replace.start': ('cyan', 'default', 'bold'),
|
|
|
|
'replace.middle0': ('cyan', 'default', 'bold'),
|
|
|
|
'replace.end': ('cyan', 'default', 'bold'),
|
|
|
|
'replace.null': ('cyan', 'default', 'bold'),
|
|
|
|
'replace.escaped': ('magenta', 'default', 'bold'),
|
|
|
|
'replace.deref': ('yellow', 'default', 'bold'),
|
|
|
|
'replace.length': ('yellow', 'default', 'bold'),
|
|
|
|
'replace.scalar': ('yellow', 'default', 'bold'),
|
|
|
|
'replace.perl_hash': ('yellow', 'default', 'bold'),
|
|
|
|
'replace.cast': ('yellow', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
# translate regex
|
2008-05-03 13:31:30 -04:00
|
|
|
'translate.start': ('magenta', 'default', 'bold'),
|
|
|
|
'translate.middle0': ('magenta', 'default', 'bold'),
|
|
|
|
'translate.end': ('magenta', 'default', 'bold'),
|
|
|
|
'translate.null': ('magenta', 'default', 'bold'),
|
2008-04-18 23:32:08 -04:00
|
|
|
}
|
|
|
|
config = {
|
|
|
|
'perl.lib': 'lib',
|
|
|
|
}
|
|
|
|
actions = [PerlSetLib, PerlCheckSyntax, PerlHashCleanup,
|
|
|
|
PerlViewModulePerldoc, PerlViewWordPerldoc, PerlWrapParagraph,
|
|
|
|
PerlInitFunctions, PerlGotoFunction, PerlWhichFunction,
|
|
|
|
PerlListFunctions]
|
2008-04-25 10:20:40 -04:00
|
|
|
completers = {
|
|
|
|
'perlfunction': PerlFunctionCompleter(),
|
|
|
|
}
|
2008-04-18 23:32:08 -04:00
|
|
|
def __init__(self, w):
|
|
|
|
mode.Fundamental.__init__(self, w)
|
|
|
|
self.add_bindings('perl-set-lib', ('C-c l',))
|
|
|
|
self.add_bindings('perl-check-syntax', ('C-c s',))
|
|
|
|
self.add_bindings('perl-hash-cleanup', ('C-c h',))
|
|
|
|
self.add_bindings('perl-view-module-perldoc', ('C-c v',))
|
|
|
|
self.add_bindings('perl-view-word-perldoc', ('C-c p',))
|
|
|
|
self.add_bindings('perl-wrap-paragraph', ('M-q',))
|
|
|
|
self.add_bindings('perl-init-functions', ('C-c M-g',))
|
|
|
|
self.add_bindings('perl-goto-function', ('C-c M-g',))
|
|
|
|
self.add_bindings('perl-which-function', ('C-c w',))
|
|
|
|
self.add_bindings('perl-list-functions', ('C-c W',))
|
|
|
|
self.add_bindings('close-paren', (')'))
|
|
|
|
self.add_bindings('close-bracket', (']'))
|
|
|
|
self.add_bindings('close-brace', ('}'))
|
|
|
|
self.functions = None
|
2008-05-08 02:57:23 -04:00
|
|
|
self.funclines = None
|
2008-04-18 23:32:08 -04:00
|
|
|
|
|
|
|
def build_function_map(self):
|
|
|
|
self.functions = {}
|
2008-05-07 23:54:24 -04:00
|
|
|
self.funclines = []
|
|
|
|
highlights = self.window.get_highlighter()
|
|
|
|
curr, stack = None, []
|
|
|
|
for group in highlights.tokens:
|
2008-05-08 02:57:23 -04:00
|
|
|
self.funclines.append(curr)
|
2008-05-07 23:54:24 -04:00
|
|
|
for t in group:
|
|
|
|
if not curr and t.name == 'sub':
|
|
|
|
curr = t.string
|
|
|
|
stack = []
|
|
|
|
self.functions[curr] = t.y
|
|
|
|
elif t.name == 'delimiter':
|
|
|
|
if t.string == '{':
|
|
|
|
stack.append(None)
|
|
|
|
elif t.string == '}':
|
|
|
|
stack.pop()
|
|
|
|
if not stack:
|
|
|
|
curr = None
|
2008-05-08 02:57:23 -04:00
|
|
|
if curr:
|
|
|
|
self.funclines[-1] = curr
|
2008-05-07 23:54:24 -04:00
|
|
|
|
2008-04-18 23:32:08 -04:00
|
|
|
def get_functions(self):
|
|
|
|
if self.functions is None:
|
|
|
|
self.build_function_map()
|
|
|
|
return self.functions
|
|
|
|
def get_function_names(self):
|
|
|
|
functions = self.get_functions()
|
|
|
|
pairs = [[functions[key], key] for key in functions]
|
|
|
|
pairs.sort()
|
|
|
|
names = [x[1] for x in pairs]
|
|
|
|
return names
|
2008-05-08 02:57:23 -04:00
|
|
|
def get_line_function(self, y):
|
|
|
|
if self.funclines is None:
|
|
|
|
self.build_function_map()
|
|
|
|
return self.funclines[y]
|
2008-04-18 23:32:08 -04:00
|
|
|
|
2007-10-19 02:41:33 -04:00
|
|
|
install = Perl.install
|