Skip to content

Commit c4ce270

Browse files
committed
Stop passing parser instance to Symbol instances
There's no real reason why `Symbol` types need to take a `Parser` instance and resolve and retain their `SymbolSpec`. This is a breaking change, but only if your `Symbol` subclasses are overloading `__init__`, which they shouldn't normally need to be doing.
1 parent acb3a70 commit c4ce270

File tree

5 files changed

+56
-88
lines changed

5 files changed

+56
-88
lines changed

parsing/ast.py

Lines changed: 1 addition & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -6,28 +6,10 @@
66
"""
77

88
from __future__ import annotations
9-
from typing import TYPE_CHECKING
10-
11-
if TYPE_CHECKING:
12-
from parsing.grammar import SymbolSpec
13-
from parsing.interfaces import Parser
149

1510

1611
class Symbol:
17-
def __init__(self, symSpec: SymbolSpec, parser: Parser):
18-
self.__symSpec = symSpec
19-
self.__parser = parser
20-
21-
def __repr__(self) -> str:
22-
return repr(self.symSpec)
23-
24-
@property
25-
def symSpec(self) -> SymbolSpec:
26-
return self.__symSpec
27-
28-
@property
29-
def parser(self) -> Parser:
30-
return self.__parser
12+
pass
3113

3214

3315
class Nonterm(Symbol):
@@ -74,9 +56,6 @@ def reduceB(self, id):
7456
"%reduce id"
7557
"""
7658

77-
def __init__(self, parser: Parser) -> None:
78-
Symbol.__init__(self, parser.sym_spec(self), parser)
79-
8059
def merge(self, other: Nonterm) -> Nonterm:
8160
"""
8261
Merging happens when there is an ambiguity in the input that allows
@@ -133,9 +112,6 @@ class rparen(Token):
133112
class id(Token):
134113
"%token" """
135114

136-
def __init__(self, parser: Parser) -> None:
137-
Symbol.__init__(self, parser.sym_spec(self), parser)
138-
139115

140116
class Precedence:
141117
"""

parsing/glrparser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def token(self, token: Token) -> None:
132132
def eoi(self) -> None:
133133
"""
134134
Signal end-of-input to the parser."""
135-
token = EndOfInput(self)
135+
token = EndOfInput()
136136
self.token(token)
137137

138138
# Gather the start symbols from the stacks.

parsing/interfaces.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,6 @@ class Parser(abc.ABC):
6262
def __init__(self, spec: Spec) -> None:
6363
raise NotImplementedError
6464

65-
@abc.abstractmethod
66-
def sym_spec(self, sym: Symbol) -> SymbolSpec:
67-
...
68-
6965
@abc.abstractmethod
7066
def token(self, token: Token) -> None:
7167
raise NotImplementedError

parsing/lrparser.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
Epsilon,
1010
ShiftAction,
1111
ReduceAction,
12-
SymbolSpec,
1312
)
1413
from parsing.interfaces import Parser, Spec
1514

@@ -37,9 +36,6 @@ def __init__(self, spec: Spec) -> None:
3736
self.reset()
3837
self.verbose = False
3938

40-
def sym_spec(self, sym: Symbol) -> SymbolSpec:
41-
return self._spec.sym_spec(sym)
42-
4339
@property
4440
def spec(self) -> Spec:
4541
return self._spec
@@ -53,7 +49,7 @@ def start(self) -> list[Symbol] | None:
5349

5450
def reset(self) -> None:
5551
self._start = None
56-
self._stack = [(Epsilon(self), 0)]
52+
self._stack = [(Epsilon(), 0)]
5753

5854
def token(self, token: Token) -> None:
5955
"""Feed a token to the parser."""
@@ -62,7 +58,7 @@ def token(self, token: Token) -> None:
6258

6359
def eoi(self) -> None:
6460
"""Signal end-of-input to the parser."""
65-
token = EndOfInput(self)
61+
token = EndOfInput()
6662
self.token(token)
6763

6864
assert self._stack[-1][0] == token # <$>.
@@ -136,7 +132,7 @@ def _reduce(self, production: Production) -> None:
136132
def _production(
137133
self, production: Production, rhs: list[Symbol]
138134
) -> Nonterm:
139-
sym = production.lhs.nontermType(self)
135+
sym = production.lhs.nontermType()
140136
nRhs = len(rhs)
141137
assert nRhs == len(production.rhs)
142138
r = production.method(sym, *rhs)

parsing/tests/test_basic.py

Lines changed: 51 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -13,33 +13,33 @@ def __init__(self, spec):
1313
spec = parsing.Spec(a)
1414

1515
parser = TestParser(spec)
16-
parser.token(a.TokenId(parser))
17-
parser.token(a.TokenStar(parser))
18-
parser.token(a.TokenId(parser))
19-
parser.token(a.TokenPlus(parser))
20-
parser.token(a.TokenId(parser))
16+
parser.token(a.TokenId())
17+
parser.token(a.TokenStar())
18+
parser.token(a.TokenId())
19+
parser.token(a.TokenPlus())
20+
parser.token(a.TokenId())
2121
parser.eoi()
2222
self.assertEqual(len(parser.start), 1)
2323
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
2424

2525
parser = TestParser(spec)
26-
parser.token(a.TokenId(parser))
27-
parser.token(a.TokenPlus(parser))
28-
parser.token(a.TokenId(parser))
29-
parser.token(a.TokenStar(parser))
30-
parser.token(a.TokenId(parser))
26+
parser.token(a.TokenId())
27+
parser.token(a.TokenPlus())
28+
parser.token(a.TokenId())
29+
parser.token(a.TokenStar())
30+
parser.token(a.TokenId())
3131
parser.eoi()
3232
self.assertEqual(len(parser.start), 1)
3333
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")
3434

3535
parser = TestParser(spec)
36-
parser.token(a.TokenId(parser))
37-
parser.token(a.TokenStar(parser))
38-
parser.token(a.TokenLparen(parser))
39-
parser.token(a.TokenId(parser))
40-
parser.token(a.TokenPlus(parser))
41-
parser.token(a.TokenId(parser))
42-
parser.token(a.TokenRparen(parser))
36+
parser.token(a.TokenId())
37+
parser.token(a.TokenStar())
38+
parser.token(a.TokenLparen())
39+
parser.token(a.TokenId())
40+
parser.token(a.TokenPlus())
41+
parser.token(a.TokenId())
42+
parser.token(a.TokenRparen())
4343
parser.eoi()
4444
self.assertEqual(len(parser.start), 1)
4545
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
@@ -54,33 +54,33 @@ def __init__(self, spec):
5454
spec = parsing.Spec(b, skinny=False)
5555

5656
parser = TestParser(spec)
57-
parser.token(b.id(parser))
58-
parser.token(b.star(parser))
59-
parser.token(b.id(parser))
60-
parser.token(b.plus(parser))
61-
parser.token(b.id(parser))
57+
parser.token(b.id())
58+
parser.token(b.star())
59+
parser.token(b.id())
60+
parser.token(b.plus())
61+
parser.token(b.id())
6262
parser.eoi()
6363
self.assertEqual(len(parser.start), 1)
6464
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
6565

6666
parser = TestParser(spec)
67-
parser.token(b.id(parser))
68-
parser.token(b.plus(parser))
69-
parser.token(b.id(parser))
70-
parser.token(b.star(parser))
71-
parser.token(b.id(parser))
67+
parser.token(b.id())
68+
parser.token(b.plus())
69+
parser.token(b.id())
70+
parser.token(b.star())
71+
parser.token(b.id())
7272
parser.eoi()
7373
self.assertEqual(len(parser.start), 1)
7474
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")
7575

7676
parser = TestParser(spec)
77-
parser.token(b.id(parser))
78-
parser.token(b.star(parser))
79-
parser.token(b.lparen(parser))
80-
parser.token(b.id(parser))
81-
parser.token(b.plus(parser))
82-
parser.token(b.id(parser))
83-
parser.token(b.rparen(parser))
77+
parser.token(b.id())
78+
parser.token(b.star())
79+
parser.token(b.lparen())
80+
parser.token(b.id())
81+
parser.token(b.plus())
82+
parser.token(b.id())
83+
parser.token(b.rparen())
8484
parser.eoi()
8585
self.assertEqual(len(parser.start), 1)
8686
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
@@ -95,13 +95,13 @@ def __init__(self, spec):
9595
spec = parsing.Spec(d, skinny=False)
9696

9797
parser = TestParser(spec)
98-
parser.token(d.id(parser))
99-
parser.token(d.star(parser))
100-
parser.token(d.id(parser))
101-
parser.token(d.plus(parser))
102-
parser.token(d.id(parser))
103-
parser.token(d.star(parser))
104-
parser.token(d.id(parser))
98+
parser.token(d.id())
99+
parser.token(d.star())
100+
parser.token(d.id())
101+
parser.token(d.plus())
102+
parser.token(d.id())
103+
parser.token(d.star())
104+
parser.token(d.id())
105105
parser.eoi()
106106

107107
self.assertEqual(len(parser.start), 1)
@@ -117,11 +117,11 @@ def __init__(self, spec):
117117
spec = parsing.Spec(h, skinny=False)
118118

119119
parser = TestGlrParser(spec)
120-
parser.token(h.TokenI(parser))
121-
parser.token(h.TokenPlus(parser))
122-
parser.token(h.TokenI(parser))
123-
parser.token(h.TokenStar(parser))
124-
parser.token(h.TokenI(parser))
120+
parser.token(h.TokenI())
121+
parser.token(h.TokenPlus())
122+
parser.token(h.TokenI())
123+
parser.token(h.TokenStar())
124+
parser.token(h.TokenI())
125125
parser.eoi()
126126
self.assertEqual(len(parser.start), 1)
127127
self.assertEqual(repr(parser.start[0]), "(i + (i * i))")
@@ -149,11 +149,11 @@ def __init__(self, spec):
149149
spec2 = pickle.loads(specPickle)
150150

151151
parser = TestGlrParser(spec2)
152-
parser.token(b.id(parser))
153-
parser.token(b.star(parser))
154-
parser.token(b.id(parser))
155-
parser.token(b.plus(parser))
156-
parser.token(b.id(parser))
152+
parser.token(b.id())
153+
parser.token(b.star())
154+
parser.token(b.id())
155+
parser.token(b.plus())
156+
parser.token(b.id())
157157
parser.eoi()
158158
self.assertEqual(len(parser.start), 1)
159159
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")

0 commit comments

Comments
 (0)