comparison python/ks/lexer.py @ 146:91af0e40f868

Moved several files
author Windel Bouwman
date Fri, 22 Feb 2013 10:31:58 +0100
parents python/ppci/frontends/ks/lexer.py@af0d7913677a
children 4fd075e8259c
comparison
equal deleted inserted replaced
145:c101826ffe2b 146:91af0e40f868
1 import collections, re
2 from ...core.errors import CompilerException
3
4 """
5 Lexical analyzer part. Splits the input character stream into tokens.
6 """
7
8 # Token is used in the lexical analyzer:
9 Token = collections.namedtuple('Token', 'typ val row col')
10
11 keywords = ['and', 'array', 'begin', 'by', 'case', 'const', 'div', 'do', \
12 'else', 'elsif', 'end', 'false', 'for', 'if', 'import', 'in', 'is', \
13 'mod', 'module', 'nil', 'not', 'of', 'or', 'pointer', 'procedure', \
14 'record', 'repeat', 'return', 'then', 'to', 'true', 'type', 'until', 'var', \
15 'while', 'asm' ]
16
17 def tokenize(s):
18 """
19 Tokenizer, generates an iterator that
20 returns tokens!
21
22 This GREAT example was taken from python re doc page!
23 """
24 tok_spec = [
25 ('REAL', r'\d+\.\d+'),
26 ('HEXNUMBER', r'0x[\da-fA-F]+'),
27 ('NUMBER', r'\d+'),
28 ('ID', r'[A-Za-z][A-Za-z\d_]*'),
29 ('NEWLINE', r'\n'),
30 ('SKIP', r'[ \t]'),
31 ('COMMENTS', r'{.*}'),
32 ('LEESTEKEN', r':=|[\.,=:;\-+*\[\]/\(\)]|>=|<=|<>|>|<'),
33 ('STRING', r"'.*?'")
34 ]
35 tok_re = '|'.join('(?P<%s>%s)' % pair for pair in tok_spec)
36 gettok = re.compile(tok_re).match
37 line = 1
38 pos = line_start = 0
39 mo = gettok(s)
40 while mo is not None:
41 typ = mo.lastgroup
42 val = mo.group(typ)
43 if typ == 'NEWLINE':
44 line_start = pos
45 line += 1
46 elif typ == 'COMMENTS':
47 pass
48 elif typ != 'SKIP':
49 if typ == 'ID':
50 if val in keywords:
51 typ = val
52 elif typ == 'LEESTEKEN':
53 typ = val
54 elif typ == 'NUMBER':
55 val = int(val)
56 elif typ == 'HEXNUMBER':
57 val = int(val[2:], 16)
58 typ = 'NUMBER'
59 elif typ == 'REAL':
60 val = float(val)
61 elif typ == 'STRING':
62 val = val[1:-1]
63 yield Token(typ, val, line, mo.start()-line_start)
64 pos = mo.end()
65 mo = gettok(s, pos)
66 if pos != len(s):
67 col = pos - line_start
68 raise CompilerException('Unexpected character {0}'.format(s[pos]), line, col)
69 yield Token('END', '', line, 0)
70