-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparser.py
More file actions
113 lines (90 loc) · 3.69 KB
/
parser.py
File metadata and controls
113 lines (90 loc) · 3.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import sys
from lexer import *
# Parser object keeps track of current troken and checks if the code matches the grammar.
class Parser:
def __init__(self, lexer, emitter):
self.lexer = lexer
self.emitter = emitter
self.symbols = set() # All variables declared so far
self.labels_declared = set() # All labels declared so far
self.labels_gotod = set() # All labels goto'ed
self.cur_token = None
self.peek_token = None
self.next_token()
self.next_token() # Call twice to init current and peek
# Return true if the current token matches
def check_token(self, kind):
return kind == self.cur_token.kind
# Return true if the next token matches
def check_peek(self, kind):
return kind == self.peek_token.kind
# Try to match current token. If not, error. Advances the current token
def match_token(self, kind):
if not self.check_token(kind):
self.abort("Expected " + kind.name + ", got " + self.cur_token.kind.name)
self.next_token()
# Advances the current token
def next_token(self):
self.cur_token = self.peek_token
self.peek_token = self.lexer.get_token()
# Lexer handles EOF
# Return true if current token is of comparison type
def is_comparison_operator(self):
return self.check_token(TokenType.GT) or self.check_token(TokenType.GTEQ) or \
self.check_token(TokenType.LT) or self.check_token(TokenType.LTEQ) or \
self.check_token(TokenType.EQEQ) or self.check_token(TokenType.NOTEQ)
def abort(self, messages):
sys.exit("Error: " + message)
# Production rules
# program ::= {statement}
def program(self):
self.emitter.header_line("#include <stdio.h>")
self.emitter.header_line("int main(void){")
# Skip excess newlines
while self.check_token(TokenType.NEWLINE):
self.next_token()
# Parse all statements in the program
while not self.check_token(TokenType.EOF):
self.statement()
self.emitter.emit_line("return 0;")
self.emitter.emit_line("}")
# Verify each label referenced in a GOTO is declared
for label in self.labels_gotod:
if label not in self.labels_declared:
self.abort("Attempting to GOTO undeclared label: " + label)
# Define grammar rules
def statement(self):
"""
- Check first token to see what kind of statement it is
"""
#### PRINT (expression | string) ####
if self.check_token(TokenType.PRINT):
self.next_token()
if self.check_token(TokenType.STRING):
# Simple string
self.emitter.emit_line("printf(\"" + self.cur_token.text + "\\n\");")
self.next_token()
else:
# Expect an expression
self.emitter.emit("printf(\"%" + ".2f\\n\", (float)(")
self.expression()
self.emitter.emit_line("));")
#### IF comparison "THEN" block "ENDIF"
elif self.check_token(TokenType.IF):
self.next_token()
self.emitter.emit("if(")
self.comparison()
self.match_token(TokenType.THEN)
self.nl()
self.emitter.emit_line("){")
# Zero or more statements in body
while not self.check_token(TokenType.ENDIF):
# Newline
self.nl()
def nl(self):
print("NEWLINE")
# Require at least one newline
self.match_token(TokenType.NEWLINE)
# Allow extra newlines too
while self.check_token(TokenType.NEWLINE):
self.next_token()