This repository has been archived by the owner on Mar 29, 2018. It is now read-only.
/
nbfparser.py
75 lines (69 loc) · 2.85 KB
/
nbfparser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# Begin -- grammar generated by Yapps
import sys, re
from yapps import runtime
class NoBrainFuckScanner(runtime.Scanner):
patterns = [
('"$"', re.compile('$')),
('\\s+', re.compile('\\s+')),
('\\#[^\n]*', re.compile('\\#[^\n]*')),
('incrementp', re.compile('[hH]+[oO]+')),
('decrementp', re.compile('[hH]+[aA]+')),
('increment', re.compile('[Yy]+[eE]+[sS]+')),
('decrement', re.compile('[Nn]+[oO]+')),
('prints', re.compile('[Mm]+[Yy]+\\s+[Gg]+[Oo]+[dD]+')),
('accepts', re.compile('[hH]+[aA]+[rR]+[dD]+[eE]+[rR]+')),
('begins', re.compile('[Nn]+[Oo]+[tT]+\\s+[Yy]+[eE]+[Tt]+')),
('ends', re.compile("[Ii]+'+[Mm]+\\s+[Cc]+[Oo]+[Mm]+[Ii]+[Nn]+[Gg]+")),
]
def __init__(self, str,*args,**kw):
runtime.Scanner.__init__(self,None,{'\\#[^\n]*':None,'\\s+':None,},str,*args,**kw)
class NoBrainFuck(runtime.Parser):
Context = runtime.Context
def program(self, _parent=None):
_context = self.Context(_parent, self._scanner, 'program', [])
tmp = ""
while self._peek( context=_context) != '"$"':
expression = self.expression(_context)
tmp = tmp + expression
self._scan('"$"', context=_context)
return tmp
def expression(self, _parent=None):
_context = self.Context(_parent, self._scanner, 'expression', [])
_token = self._peek('incrementp', 'decrementp', 'increment', 'decrement', 'prints', 'accepts', 'begins', 'ends', context=_context)
if _token == 'incrementp':
incrementp = self._scan('incrementp', context=_context)
return ">"
elif _token == 'decrementp':
decrementp = self._scan('decrementp', context=_context)
return "<"
elif _token == 'increment':
increment = self._scan('increment', context=_context)
return "+"
elif _token == 'decrement':
decrement = self._scan('decrement', context=_context)
return "-"
elif _token == 'prints':
prints = self._scan('prints', context=_context)
return "."
elif _token == 'accepts':
accepts = self._scan('accepts', context=_context)
return ","
elif _token == 'begins':
begins = self._scan('begins', context=_context)
return "["
else: # == 'ends'
ends = self._scan('ends', context=_context)
return "]"
def parse(rule, text):
P = NoBrainFuck(NoBrainFuckScanner(text))
return runtime.wrap_error_reporter(P, rule)
if __name__ == '__main__':
from sys import argv, stdin
if len(argv) >= 2:
if len(argv) >= 3:
f = open(argv[2],'r')
else:
f = stdin
print parse(argv[1], f.read())
else: print >>sys.stderr, 'Args: <rule> [<filename>]'
# End -- grammar generated by Yapps