86 lines
2.7 KiB
Python
Executable File
86 lines
2.7 KiB
Python
Executable File
# 2.3 imports
|
|
from optparse import OptionParser
|
|
|
|
# our imports
|
|
import lex
|
|
|
|
class ShGrammar(lex.Grammar):
|
|
GRAMMAR_LIST = [
|
|
{'name': "method",
|
|
'expr': r"""[a-zA-Z_][a-zA-Z0-9_]*(?=\(\))""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': 'reserved',
|
|
'expr': r"""(?:case|done|do|elif|else|esac|fi|for|function|if|in|select|then|until|while|time)(?![a-zA-Z0-9_=])""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': 'builtin',
|
|
'expr': r"""(?:source|alias|bg|bind|break|builtin|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|fc|fg|getops|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|readonly|read|return|set|shift|shopt|suspend|test|times|trap|type|ulimit|umask|unalias|unset|wait)(?![a-zA-Z0-9_=/])""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': 'operator',
|
|
'expr': r"""(?:-eq|-ne|-gt|-lt|-ge|-le| = | != )""",
|
|
'action': lex.make_token},
|
|
|
|
## {'name': 'redirection',
|
|
## 'expr': r"(?:[1-6] *)?> *(?:&[1-6]|(?:\\.|[^\\\"';| ])+)",
|
|
## 'action': lex.make_token},
|
|
|
|
{'name': 'delimiter',
|
|
'expr': """[][\(\);\{\}|&><]""",
|
|
'action': lex.make_token},
|
|
|
|
## {'name': 'variable0',
|
|
## 'expr': r"""(?:(?<=\n)|^) *[a-zA-Z_][a-zA-Z0-9_]*(?=\=)""",
|
|
## 'action': lex.make_token},
|
|
{'name': 'variable0',
|
|
'expr': r"""(?:(?<=\n) *|^ *| +)[a-zA-Z_][a-zA-Z0-9_]*(?=\=)""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "variable1",
|
|
'expr': r"\${(?:[a-zA-Z0-9_]+|\?\$)}",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "variable2",
|
|
'expr': r"\$[^({][a-zA-Z0-9_]*",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "variable3",
|
|
'expr': r"\$(?=\()",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "eval",
|
|
'expr': r'`(?:\\.|[^\\`])*(?:`|.?$)',
|
|
'action': lex.make_token},
|
|
|
|
{'name': "string1",
|
|
'expr': r'"(?:\\.|[^\\"])*(?:"|.?$)',
|
|
'action': lex.make_token},
|
|
|
|
{'name': "string2",
|
|
'expr': r"'(?:\\.|[^\\'])*(?:'|.?$)",
|
|
'action': lex.make_token},
|
|
|
|
{'name': 'continuation',
|
|
'expr': r"""\\(?= *(\n|$))""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "comment",
|
|
'expr': r'[#].*(?:\n|$)',
|
|
'action': lex.make_token},
|
|
|
|
{'name': 'bareword',
|
|
'expr': r"""[a-zA-Z0-9_-]+""",
|
|
'action': lex.make_token},
|
|
|
|
{'name': "default",
|
|
'expr': r'\\.|.|\n',
|
|
'action': lex.silent}
|
|
]
|
|
|
|
def _default_rules(self):
|
|
"""subclasses can override this to define defaults for a grammar"""
|
|
for rdir in ShGrammar.GRAMMAR_LIST:
|
|
self.add_rule(**rdir)
|
|
|