1import itertools
2
3
4def generate_tokens(tokens):
5    numbers = itertools.count(0)
6    for line in tokens:
7        line = line.strip()
8
9        if not line or line.startswith("#"):
10            continue
11
12        name = line.split()[0]
13        yield (name, next(numbers))
14
15    yield ("N_TOKENS", next(numbers))
16    yield ("NT_OFFSET", 256)
17
18
19def generate_opmap(tokens):
20    for line in tokens:
21        line = line.strip()
22
23        if not line or line.startswith("#"):
24            continue
25
26        pieces = line.split()
27
28        if len(pieces) != 2:
29            continue
30
31        name, op = pieces
32        yield (op.strip("'"), name)
33
34    # Yield independently <>. This is needed so it does not collide
35    # with the token generation in "generate_tokens" because if this
36    # symbol is included in Grammar/Tokens, it will collide with !=
37    # as it has the same name (NOTEQUAL).
38    yield ("<>", "NOTEQUAL")
39