-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathcompiler.py
More file actions
131 lines (94 loc) · 3.32 KB
/
compiler.py
File metadata and controls
131 lines (94 loc) · 3.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys, os, getopt
sys.path.insert(0, 'analisador_lexico/')
import automata
sys.path.insert(0, 'analisador_sintatico/')
from gramatica import Gramatica
from slr import Slr
sys.path.insert(0, 'verificador_tipos/')
from veritypes import VerifTipos
def import_gram(gfile):
f = file(gfile)
conta = 0
prods = list()
for linha in f:
conta += 1
linha = linha.rstrip(os.linesep)
if linha == '' or linha[0] == '#':
continue
regra = tuple(linha.split())
if len(regra) < 2 or regra[1] != '->':
raise Exception('Erro: %s:%d' % (arquivo, conta))
prods.append((regra[0],(regra[2:])))
return prods
def symbols_by_line(tokens_list, token_ln):
token = token_ln[0].get('token')
line_n = str(token_ln[1] + 1)
if not tokens_list:
tokens_list.append([line_n,token])
else:
for line in tokens_list:
if line_n in line[0]:
line.append(token)
break
else:
tokens_list.append([line_n,token])
return
def main(argv):
# importa gramatica
producoes = import_gram('minic.gram')
# arquivo fonte de entrada
source = sys.argv[1]
# arquivo de saída dos tokens gerados
dest = open('output.tokens', 'w')
# tabela de simbolos
symbol_table = list()
tokens = list()
lines = list()
symbols_list = list()
runaway = True
separator = "---------------------------------------------------------"
print "%s\n%s" % (separator, separator)
print "Lexical analysis status:"
with open(source) as fp:
while True:
token_line = automata.get_token(fp)
token = token_line[0]
line = token_line[1]
if not token.has_key('comment'):
if token.has_key('error'):
print token.get('error')
sys.exit()
if token.has_key('token'):
token_id = token.get('token').split(';')[0][1:]
token_attr = token.get('token').split(';', 1)[1][:-1]
dest.write(token.get('token') + '\n')
tokens.append(token_attr)
lines.append(line)
symbols_by_line(symbols_list, token_line)
if token.has_key('eof'):
print " %d lines scanned, everything is fine so far..." % (token.get('eof'))
runaway = False
break
if runaway:
print "%s\n%s" % (separator, separator)
sys.exit()
print "%s\n%s" % (separator, separator)
print "Syntactic analysis status:"
grammar = Gramatica(producoes, producoes[0][0])
slr = Slr(grammar)
result = slr.parse(tokens)
if result.get('result'):
print ' No syntactic errors found, everything remains fine.'
else:
print ' Syntactic error near "%s" at line %s' % (result.get('token'), lines[int(result.get('line'))])
print "%s\n%s" % (separator, separator)
sys.exit()
print "%s\n%s" % (separator, separator)
print "Type checking analysis status:"
vt = VerifTipos(symbols_list);
print vt.matching_result()
print "%s\n%s" % (separator, separator)
if __name__ == '__main__':
main(sys.argv)