parent
92cd22884a
commit
11412d350b
|
@ -1,12 +1,59 @@
|
|||
|
||||
Control Flow Integrity
|
||||
======================
|
||||
A study in stupid
|
||||
-----------------
|
||||
|
||||
:Fields: are crazy
|
||||
|
||||
Options suck
|
||||
-a foo ugh
|
||||
-b blow me
|
||||
--blarg sux
|
||||
/V i fucking hate DOS
|
||||
|
||||
| crazy junk
|
||||
| more crazy
|
||||
| junk
|
||||
| dammit
|
||||
huh?
|
||||
|
||||
*emphasis*
|
||||
\*escaped\*
|
||||
**double trouble**
|
||||
`interpreted`
|
||||
``inline literal``
|
||||
reference_
|
||||
`phrase reference`_
|
||||
anonymous__
|
||||
_`inline internal target`
|
||||
|substitution reference|
|
||||
footnote reference [1]_
|
||||
citation reference [CIT2002]_
|
||||
http://docutils.sf.net/
|
||||
|
||||
Control-Flow Integrity is a technique used to insure a security
|
||||
property, namely the targets of all instructions that alter control
|
||||
flow (ie, branch instructions). To do this they use a combination of
|
||||
static analysis and dynamic checks.
|
||||
|
||||
.. code-block:: Python
|
||||
def foo(a, b):
|
||||
def bar(a, b, c):
|
||||
"cat".join(1, 2, 3, 4, 5)
|
||||
t = ""
|
||||
return a + b
|
||||
|
||||
This *is* awesome
|
||||
|
||||
**REALLY AWESOME**
|
||||
|
||||
*italics*
|
||||
|
||||
**bold**
|
||||
|
||||
star\*
|
||||
another``*``
|
||||
|
||||
They then give a number of examples of how to one could use CFI to
|
||||
improve other existing control flow based tools, including SFI and
|
||||
shadow stacks. Finally, they give a brief look in at the formal theory
|
||||
|
@ -20,7 +67,24 @@ program. Particularly their threat model is that the adversary has
|
|||
total control over the data memory. This covers a number of practical
|
||||
attacks, including any that use a "stack smashing" technique to gain
|
||||
control of the program. This includes many (all?) code injection
|
||||
attacks, as well as arc-injection attacks.
|
||||
attacks, as well as arc-injection attacks. Here's some junk::
|
||||
foogy boogy
|
||||
oog egjwg we
|
||||
gew jgewwegje
|
||||
gewjigew jgiewgjew
|
||||
|
||||
Ok we're done.
|
||||
|
||||
More python
|
||||
|
||||
>>> print "hi there"
|
||||
hi there
|
||||
>>> print "fuck you"
|
||||
fuck you
|
||||
>>> 3 + 5
|
||||
8
|
||||
|
||||
Ok we're done "funny".
|
||||
|
||||
Contributions
|
||||
-------------
|
||||
|
@ -54,3 +118,12 @@ binary instrumentation could address a number of the difficulties,
|
|||
notably performance. I also think a better job could be done grouping
|
||||
functions with better use of the control flow information to do a
|
||||
better job partitioning the targets of function pointers.
|
||||
|
||||
Table:
|
||||
|
||||
+----+-----+
|
||||
|foo | bar |
|
||||
+----+-----+
|
||||
|hate life |
|
||||
+----------+
|
||||
|
||||
|
|
22
lex.py
22
lex.py
|
@ -121,7 +121,6 @@ class OverridePatternRule(PatternRule):
|
|||
modecls = a.modes[d['mode']]
|
||||
mode = modecls(FakeWindow(lexer.mode.window.application))
|
||||
if hasattr(mode, 'grammar') and hasattr(mode, 'colors'):
|
||||
#lexer.mode.gstack['%s.start' % d['token']] = mode
|
||||
lexer.mode.gstack['%s' % d['token']] = mode
|
||||
else:
|
||||
raise OverrideError("argh: %r" % mode)
|
||||
|
@ -266,9 +265,8 @@ class RegionRule(Rule):
|
|||
del lexer.mode.gstack[rname]
|
||||
else:
|
||||
mode = lexer.mode
|
||||
if self.pairs[i][0] is not None:
|
||||
grammar = self.pairs[i][0]
|
||||
else:
|
||||
if grammar is None:
|
||||
grammar = lexer.grammar
|
||||
lexer.mstack.append(mode)
|
||||
|
||||
|
@ -387,6 +385,24 @@ class RegionRule(Rule):
|
|||
class NocaseRegionRule(RegionRule):
|
||||
reflags = re.IGNORECASE
|
||||
|
||||
class OverrideRegionRule(RegionRule):
|
||||
def lex(self, lexer, parent, m):
|
||||
assert m
|
||||
d = m.groupdict()
|
||||
if 'grammar' in d:
|
||||
a = lexer.mode.window.application
|
||||
modecls = a.modes[d['grammar'].lower()]
|
||||
mode = modecls(FakeWindow(lexer.mode.window.application))
|
||||
assert hasattr(mode, 'grammar') and hasattr(mode, 'colors')
|
||||
|
||||
if parent is None:
|
||||
path = self.name
|
||||
else:
|
||||
path = parent.domain() + '.' + self.name
|
||||
lexer.mode.gstack[path] = mode
|
||||
|
||||
return RegionRule.lex(self, lexer, parent, m)
|
||||
|
||||
class Grammar(object):
|
||||
rules = []
|
||||
grammar = Grammar()
|
||||
|
|
98
mode/rst.py
98
mode/rst.py
|
@ -1,6 +1,7 @@
|
|||
import color, method, mode
|
||||
from lex import Grammar, PatternRule, RegionRule, PatternGroupRule
|
||||
from lex import Grammar, PatternRule, RegionRule, PatternGroupRule, OverrideRegionRule
|
||||
from mode.text import TextInsertSpace
|
||||
from mode.python import PythonGrammar
|
||||
|
||||
class RSTString(Grammar):
|
||||
rules = [
|
||||
|
@ -9,35 +10,38 @@ class RSTString(Grammar):
|
|||
|
||||
class RSTGrammar(Grammar):
|
||||
rules = [
|
||||
PatternRule(r'title', r'^=+\n$'),
|
||||
PatternRule(r'subtitle', r'^-+\n$'),
|
||||
PatternRule(r'escape', r'\\.'),
|
||||
|
||||
# this is kind of a hack, ugh
|
||||
PatternGroupRule(r'definition', r'term', r'^[^ ]+\n', r'definition', r'^ +'),
|
||||
PatternRule(r'fieldname', r'^:.+?:'),
|
||||
PatternRule(r'optionname', r'^(?:--|-|/)[a-zA-Z](?: ?[-a-zA-Z_]+)'),
|
||||
|
||||
PatternRule(r'lineblock', r'^\|'),
|
||||
RegionRule(r'blocktest', r'>>>', Grammar, '^\n'),
|
||||
PatternRule(r'transition', r'^[-=.,;]{4,}\n'),
|
||||
|
||||
#OverridePatternRule(r'comment', r'#@@:(?P<token>[.a-zA-Z0-9_]+):(?P<mode>[.a-zA-Z0-9_]+) *$'),
|
||||
#OverridePatternRule(r'code', r'^\.\. code-block:: (?P<mode>:.+)\n$'),
|
||||
|
||||
RegionRule(r'emphasis', r'\*', RSTString, r'\*'),
|
||||
RegionRule(r'strong_emphasis', r'\*\*', RSTString, r'\*\*'),
|
||||
RegionRule(r'interpreted', r'`', RSTString, r'`'),
|
||||
RegionRule(r'emphasis', r'\*', RSTString, r'\*'),
|
||||
RegionRule(r'inline_literal', r'``', RSTString, r'``'),
|
||||
RegionRule(r'substitution', r'\|', RSTString, r'\|'),
|
||||
RegionRule(r'interpreted', r'`', RSTString, r'`_?'),
|
||||
|
||||
PatternRule(r'anonymous', r'[a-zA-Z]+__'),
|
||||
PatternRule(r'reference', r'[a-zA-Z]+_'),
|
||||
RegionRule(r'inline_internal', r'_`', RSTString, r'`'),
|
||||
|
||||
RegionRule(r'substitution', r'\|(?! )', RSTString, r'\|'),
|
||||
PatternRule(r'footnote', r'\[[0-9]+\]_'),
|
||||
PatternRule(r'citation', r'\[.+?\]_'),
|
||||
PatternRule(r'rst_url', r'http://[^ ]+'),
|
||||
|
||||
PatternRule(r'title', r'^={3,}\n$'),
|
||||
PatternRule(r'subtitle', r'^-{3,}\n$'),
|
||||
|
||||
PatternRule(r'option', r'^(?:--|-|/)[a-zA-Z]+(?:[ =][-a-zA-Z_]+)?'),
|
||||
RegionRule(r'table', r'\+-+(\+-+)*\+\n$', Grammar, r'^\n$'),
|
||||
|
||||
PatternRule(r'bullet', r'^ *[-*+]'),
|
||||
PatternRule(r'enumeration', r'^ *(?:[0-9]+|#)\.'),
|
||||
|
||||
PatternRule(r'field', r'^:.+?:'),
|
||||
|
||||
PatternRule(r'lineblock', r'^\|'),
|
||||
RegionRule(r'blocktest', r'>>>', PythonGrammar, '^\n'),
|
||||
OverrideRegionRule(r'code', r'^\.\. code-block:: +(?P<grammar>.+)\n$', None, r'^[^\n ]'),
|
||||
RegionRule(r'literal_block', r'::\n$', Grammar, r'^[^\n ]'),
|
||||
|
||||
]
|
||||
|
||||
class RstInsertSpace(TextInsertSpace):
|
||||
|
@ -51,38 +55,48 @@ class RST(mode.Fundamental):
|
|||
grammar = RSTGrammar
|
||||
colors = {
|
||||
'title': ('blue', 'default', 'bold'),
|
||||
#'title_over': ('blue', 'default', 'bold'),
|
||||
#'title_under': ('blue', 'default', 'bold'),
|
||||
'subtitle': ('cyan', 'default', 'bold'),
|
||||
#'subtitle_over': ('cyan', 'default', 'bold'),
|
||||
#'subtitle_under': ('cyan', 'default', 'bold'),
|
||||
|
||||
'definition': ('green', 'default', 'bold'),
|
||||
'fieldname': ('green', 'default', 'bold'),
|
||||
'optionname': ('green', 'default', 'bold'),
|
||||
|
||||
'lineblock': ('cyan', 'default', 'bold'),
|
||||
'blocktest': ('cyan', 'default', 'bold'),
|
||||
'transition': ('cyan', 'default', 'bold'),
|
||||
|
||||
'emphasis.start': ('red', 'default', 'bold'),
|
||||
'emphasis.null': ('red', 'default', 'bold'),
|
||||
'emphasis.end': ('red', 'default', 'bold'),
|
||||
'strong_emphasis.start': ('red', 'default', 'bold'),
|
||||
'strong_emphasis.null': ('red', 'default', 'bold'),
|
||||
'strong_emphasis.end': ('red', 'default', 'bold'),
|
||||
'interpreted': ('magenta', 'default', 'bold'),
|
||||
'inline_literal': ('magenta', 'default', 'bold'),
|
||||
'substitution': ('magenta', 'default', 'bold'),
|
||||
|
||||
'subtitle': ('blue', 'default'),
|
||||
'field': ('green', 'default', 'bold'),
|
||||
'option': ('green', 'default', 'bold'),
|
||||
'anonymous': ('blue', 'default', 'bold'),
|
||||
'reference': ('blue', 'default', 'bold'),
|
||||
'footnote': ('blue', 'default', 'bold'),
|
||||
'citation': ('blue', 'default', 'bold'),
|
||||
'rst_url': ('blue', 'default', 'bold'),
|
||||
|
||||
'table.start': ('cyan', 'default'),
|
||||
'table.null': ('cyan', 'default'),
|
||||
|
||||
'bullet': ('magenta', 'default', 'bold'),
|
||||
'enumeration': ('magenta', 'default', 'bold'),
|
||||
|
||||
'lineblock': ('cyan', 'default', 'bold'),
|
||||
'blocktest.start': ('cyan', 'default', 'bold'),
|
||||
|
||||
'emphasis.start': ('red', 'default'),
|
||||
'emphasis.null': ('red', 'default'),
|
||||
'emphasis.end': ('red', 'default'),
|
||||
'strong_emphasis.start': ('red', 'default', 'bold'),
|
||||
'strong_emphasis.null': ('red', 'default', 'bold'),
|
||||
'strong_emphasis.end': ('red', 'default', 'bold'),
|
||||
'interpreted.start': ('magenta', 'default'),
|
||||
'interpreted.null': ('magenta', 'default'),
|
||||
'interpreted.end': ('magenta', 'default'),
|
||||
'inline_literal.start': ('magenta', 'default', 'bold'),
|
||||
'inline_literal.null': ('magenta', 'default', 'bold'),
|
||||
'inline_literal.end': ('magenta', 'default', 'bold'),
|
||||
'inline_internal.start': ('yellow', 'default'),
|
||||
'inline_internal.null': ('yellow', 'default'),
|
||||
'inline_internal.end': ('magenta', 'default'),
|
||||
'substitution.start': ('cyan', 'default', 'bold'),
|
||||
'substitution.null': ('cyan', 'default', 'bold'),
|
||||
'substitution.end': ('cyan', 'default', 'bold'),
|
||||
|
||||
'code.start': ('yellow', 'default', 'bold'),
|
||||
'literal_block.start': ('yellow', 'default', 'bold'),
|
||||
'literal_block.null': ('green', 'default', 'bold'),
|
||||
|
||||
|
||||
}
|
||||
actions = [RstInsertSpace, RstWrapParagraph]
|
||||
def __init__(self, w):
|
||||
|
|
Loading…
Reference in New Issue