view python/c3/lexer.py @ 177:460db5669efa

Added clean pass for IR
author Windel Bouwman
date Mon, 22 Apr 2013 23:54:54 +0200
parents 8104fc8b5e90
children 6b2bec5653f1
line wrap: on
line source

import collections, re

from ppci import CompilerError, SourceLocation

"""
 Lexical analyzer part. Splits the input character stream into tokens.
"""

# Token is used in the lexical analyzer:
Token = collections.namedtuple('Token', 'typ val loc')

keywords = ['and', 'or', 'not','true', 'false', \
   'else', 'if', 'while', 'return', \
   'function', 'var', 'type', 'const', \
   'import', 'package' ]

def tokenize(s):
     """
       Tokenizer, generates an iterator that
       returns tokens!

       This GREAT example was taken from python re doc page!
     """
     tok_spec = [
       ('REAL', r'\d+\.\d+'),
       ('HEXNUMBER', r'0x[\da-fA-F]+'),
       ('NUMBER', r'\d+'),
       ('ID', r'[A-Za-z][A-Za-z\d_]*'),
       ('NEWLINE', r'\n'),
       ('SKIP', r'[ \t]'),
       ('COMMENTS', r'//.*'),
       ('LEESTEKEN', r'==|[\.,=:;\-+*\[\]/\(\)]|>=|<=|<>|>|<|{|}'),
       ('STRING', r"'.*?'")
     ]
     tok_re = '|'.join('(?P<%s>%s)' % pair for pair in tok_spec)
     gettok = re.compile(tok_re).match
     line = 1
     pos = line_start = 0
     mo = gettok(s)
     while mo is not None:
       typ = mo.lastgroup
       val = mo.group(typ)
       if typ == 'NEWLINE':
         line_start = pos
         line += 1
       elif typ == 'COMMENTS':
         pass
       elif typ == 'SKIP':
         pass
       else:
         if typ == 'ID':
           if val in keywords:
             typ = val
         elif typ == 'LEESTEKEN':
           typ = val
         elif typ == 'NUMBER':
           val = int(val)
         elif typ == 'HEXNUMBER':
           val = int(val[2:], 16)
           typ = 'NUMBER'
         elif typ == 'REAL':
           val = float(val)
         elif typ == 'STRING':
           val = val[1:-1]
         loc = SourceLocation(line, mo.start()-line_start, mo.end() - mo.start())
         yield Token(typ, val, loc)
       pos = mo.end()
       mo = gettok(s, pos)
     if pos != len(s):
       col = pos - line_start
       pos = line
       raise CompilerError('Unexpected character {0}'.format(s[pos]), pos)
     yield Token('END', '', line)