1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
|
#!/usr/bin/python3
import ply.yacc as yacc
from .lexer import operators, tokens
from .util import Token
from monkey.graph import Node
# PARSER
precedence = (
('nonassoc', 'FROM'),
('right', 'IMPLIES'),
('right', 'NOT'),
('nonassoc', 'EQU', 'NEQU', 'EQ', 'NEQ', 'UNIV', 'IS', 'EQA', 'NEQA', 'LT', 'LE', 'GT', 'GE', 'LTL', 'LEL', 'GTL', 'GEL'),
('left', 'PLUS', 'MINUS'),
('left', 'STAR', 'DIV', 'IDIV', 'MOD'),
('nonassoc', 'POW'),
('right', 'UMINUS', 'UPLUS'),
('nonassoc', 'UINTEGER', 'UREAL'),
('nonassoc', 'NAME', 'VARIABLE', 'STRING'),
('nonassoc', 'PERIOD'),
('nonassoc', 'LBRACKET', 'RBRACKET', 'LPAREN', 'RPAREN', 'COMMA', 'SEMI', 'PIPE', 'LBRACE', 'RBRACE')
)
def make_token(p, n):
lextoken = p.slice[n]
return Node(data=Token(lextoken.type, lextoken.value, lextoken.lexpos))
def p_text_empty(p):
'text : '
p[0] = Node('text', [])
def p_text_clause(p):
'text : text clause'
p[0] = p[1]
p[0].eout.append(p[2])
def p_clause_head(p):
'clause : head PERIOD'
p[0] = Node('clause', [p[1], make_token(p, 2)])
def p_clause_rule(p):
'clause : head FROM or PERIOD'
p[0] = Node('clause', [p[1], make_token(p, 2), p[3], make_token(p, 4)])
def p_clause_error(p):
'clause : error PERIOD'
p[0] = Node('clause', [Node('error'), make_token(p, 2)])
def p_head(p):
'head : term'
p[0] = Node('head', [p[1]])
def p_or_single(p):
'or : if'
p[0] = p[1]
def p_or_if(p):
'or : or SEMI if'
if p[1].data == 'or':
p[0] = p[1]
else:
p[0] = Node('or', [p[1]])
p[0].eout.append(make_token(p, 2))
p[0].eout.append(p[3])
def p_if_single(p):
'if : and'
p[0] = p[1]
def p_if_and(p):
'if : and IMPLIES if'
p[0] = Node('if', [p[1], make_token(p, 2), p[3]])
def p_and_single(p):
'and : term'
p[0] = p[1]
def p_and_term(p):
'and : and COMMA term'
if p[1].data == 'and':
p[0] = p[1]
else:
p[0] = Node('and', [p[1]])
p[0].eout.append(make_token(p, 2))
p[0].eout.append(p[3])
def p_term_functor(p):
'term : functor LPAREN args RPAREN'
# No whitespace allowed between functor and LPAREN.
t2 = make_token(p, 2)
if p[1].eout[0].data.pos + len(p[1].eout[0].data.val) < t2.data.pos:
raise SyntaxError('whitespace before ' + str(t2))
p[0] = Node('term', [p[1], t2, p[3], make_token(p, 4)])
def p_term_or(p):
'term : LPAREN or RPAREN'
p[0] = Node('term', [make_token(p, 1), p[2], make_token(p, 3)])
def p_term_binary(p):
'''term : term PLUS term
| term MINUS term
| term STAR term
| term POW term
| term DIV term
| term IDIV term
| term MOD term
| term EQU term
| term NEQU term
| term EQ term
| term NEQ term
| term UNIV term
| term IS term
| term EQA term
| term NEQA term
| term LT term
| term LE term
| term GT term
| term GE term
| term LTL term
| term LEL term
| term GTL term
| term GEL term'''
p[0] = Node('term', [p[1], make_token(p, 2), p[3]])
def p_term_unary(p):
'''term : NOT term
| MINUS term %prec UMINUS
| PLUS term %prec UPLUS'''
p[0] = Node('term', [make_token(p, 1), p[2]])
def p_term_list(p):
'term : list'
p[0] = Node('term', [p[1]])
def p_term_simple(p):
'''term : STRING
| NAME
| UINTEGER
| UREAL
| VARIABLE'''
p[0] = Node('term', [make_token(p, 1)])
def p_args_empty(p):
'args : '
p[0] = Node('args', [])
def p_args_single(p):
'args : term'
p[0] = Node('args', [p[1]])
def p_args_term(p):
'args : args COMMA term'
p[0] = p[1]
p[0].eout.append(make_token(p, 2))
p[0].eout.append(p[3])
def p_list(p):
'list : LBRACKET args RBRACKET'
p[0] = Node('list', [make_token(p, 1)] + p[2].eout + [make_token(p, 3)])
def p_list_tail(p):
'list : LBRACKET args PIPE term RBRACKET'
p[0] = Node('list', [make_token(p, 1)] + p[2].eout + [make_token(p, 3), p[4], make_token(p, 5)])
def p_functor(p):
'functor : NAME'
p[0] = Node('functor', [make_token(p, 1)])
def p_error(t):
if t is None:
print('unexpected end of file')
else:
print('{}: unexpected {}'.format(t.lexpos, t.value))
parser = yacc.yacc()
if __name__ == '__main__':
while True:
try:
s = input('> ')
except EOFError:
break
if not s:
continue
ast = parser.parse(s)
print(ast)
|