148
|
1 import collections, re
|
152
|
2
|
191
|
3 from ppci import CompilerError, SourceLocation, Token
|
148
|
4
|
|
5 """
|
|
6 Lexical analyzer part. Splits the input character stream into tokens.
|
|
7 """
|
|
8
|
|
9 keywords = ['and', 'or', 'not','true', 'false', \
|
|
10 'else', 'if', 'while', 'return', \
|
163
|
11 'function', 'var', 'type', 'const', \
|
148
|
12 'import', 'package' ]
|
|
13
|
|
14 def tokenize(s):
|
|
15 """
|
|
16 Tokenizer, generates an iterator that
|
|
17 returns tokens!
|
|
18
|
|
19 This GREAT example was taken from python re doc page!
|
|
20 """
|
|
21 tok_spec = [
|
|
22 ('REAL', r'\d+\.\d+'),
|
|
23 ('HEXNUMBER', r'0x[\da-fA-F]+'),
|
|
24 ('NUMBER', r'\d+'),
|
|
25 ('ID', r'[A-Za-z][A-Za-z\d_]*'),
|
|
26 ('NEWLINE', r'\n'),
|
|
27 ('SKIP', r'[ \t]'),
|
|
28 ('COMMENTS', r'//.*'),
|
204
|
29 ('LONGCOMMENTBEGIN', r'\/\*'),
|
|
30 ('LONGCOMMENTEND', r'\*\/'),
|
148
|
31 ('LEESTEKEN', r'==|[\.,=:;\-+*\[\]/\(\)]|>=|<=|<>|>|<|{|}'),
|
|
32 ('STRING', r"'.*?'")
|
|
33 ]
|
|
34 tok_re = '|'.join('(?P<%s>%s)' % pair for pair in tok_spec)
|
|
35 gettok = re.compile(tok_re).match
|
|
36 line = 1
|
|
37 pos = line_start = 0
|
|
38 mo = gettok(s)
|
204
|
39 incomment = False
|
148
|
40 while mo is not None:
|
|
41 typ = mo.lastgroup
|
|
42 val = mo.group(typ)
|
|
43 if typ == 'NEWLINE':
|
|
44 line_start = pos
|
|
45 line += 1
|
204
|
46 elif typ == 'COMMENT':
|
148
|
47 pass
|
204
|
48 elif typ == 'LONGCOMMENTBEGIN':
|
|
49 incomment = True
|
|
50 elif typ == 'LONGCOMMENTEND':
|
|
51 incomment = False
|
148
|
52 elif typ == 'SKIP':
|
|
53 pass
|
204
|
54 elif incomment:
|
|
55 pass # Wait until we are not in a comment section
|
148
|
56 else:
|
|
57 if typ == 'ID':
|
|
58 if val in keywords:
|
|
59 typ = val
|
|
60 elif typ == 'LEESTEKEN':
|
|
61 typ = val
|
|
62 elif typ == 'NUMBER':
|
|
63 val = int(val)
|
|
64 elif typ == 'HEXNUMBER':
|
|
65 val = int(val[2:], 16)
|
|
66 typ = 'NUMBER'
|
|
67 elif typ == 'REAL':
|
|
68 val = float(val)
|
|
69 elif typ == 'STRING':
|
|
70 val = val[1:-1]
|
163
|
71 loc = SourceLocation(line, mo.start()-line_start, mo.end() - mo.start())
|
148
|
72 yield Token(typ, val, loc)
|
|
73 pos = mo.end()
|
|
74 mo = gettok(s, pos)
|
|
75 if pos != len(s):
|
|
76 col = pos - line_start
|
|
77 pos = line
|
152
|
78 raise CompilerError('Unexpected character {0}'.format(s[pos]), pos)
|
194
|
79 loc = SourceLocation(line, 0, 0)
|
|
80 yield Token('END', '', loc)
|
148
|
81
|