Skip to content

Commit 6cad3e9

Browse files
committed
debt: Format generated Python parser module
- Removes configuring exclusion of any Python modules from formatting - Ensure entire library is formatted correctly - Drop redundant `or False` logic from conditionals - Drop redundant `else` conditionals after `return` within `if` clause - Skipped formatting on `state_comment` string - Challenging to generate formatted Python multiline string from C#
1 parent 9526c8a commit 6cad3e9

File tree

3 files changed

+2911
-2173
lines changed

3 files changed

+2911
-2173
lines changed

python/gherkin-python.razor

Lines changed: 70 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -4,21 +4,31 @@
44
switch(production.Type)
55
{
66
case ProductionRuleType.Start:
7-
@: self.start_rule(context, '@production.RuleName')
7+
@: self.start_rule(context, "@production.RuleName")
88
break;
99
case ProductionRuleType.End:
10-
@: self.end_rule(context, '@production.RuleName')
10+
@: self.end_rule(context, "@production.RuleName")
1111
break;
1212
case ProductionRuleType.Process:
13-
@: self.build(context, token)
13+
@: self.build(context, token)
1414
break;
1515
}
1616
}
1717
@helper HandleParserError(IEnumerable<string> expectedTokens, State state)
18-
{<text> state_comment = "State: @state.Id - @Raw(state.Comment)"
18+
{<text> state_comment = "State: @state.Id - @Raw(state.Comment)" # fmt: skip
1919
token.detach
20-
expected_tokens = ["@Raw(string.Join("\", \"", expectedTokens))"]
21-
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
20+
expected_tokens = [
21+
</text>
22+
@foreach (var token in expectedTokens)
23+
{<text> "@token",
24+
</text>
25+
}
26+
<text> ]
27+
error = (
28+
UnexpectedEOFException(token, expected_tokens, state_comment)
29+
if token.eof()
30+
else UnexpectedTokenException(token, expected_tokens, state_comment)
31+
)
2232
if self.stop_at_first_error:
2333
raise error
2434
self.add_error(context, error)
@@ -30,23 +40,28 @@ from __future__ import annotations
3040

3141
from collections import deque
3242
from collections.abc import Callable
33-
from typing import cast, TypeVar
43+
from typing import TypeVar, cast
3444

3545
from .ast_builder import AstBuilder
46+
from .errors import (
47+
CompositeParserException,
48+
ParserException,
49+
UnexpectedEOFException,
50+
UnexpectedTokenException,
51+
)
52+
from .parser_types import GherkinDocument
3653
from .token import Token
3754
from .token_matcher import TokenMatcher
3855
from .token_scanner import TokenScanner
39-
from .parser_types import GherkinDocument
40-
from .errors import UnexpectedEOFException, UnexpectedTokenException, ParserException, CompositeParserException
4156

42-
_T = TypeVar('_T')
43-
_U = TypeVar('_U')
44-
_V = TypeVar('_V')
57+
_T = TypeVar("_T")
58+
_U = TypeVar("_U")
59+
_V = TypeVar("_V")
4560

4661
RULE_TYPE = [
47-
'None',
62+
"None",
4863
@foreach(var rule in Model.RuleSet.Where(r => !r.TempRule))
49-
{<text> '@rule.Name.Replace("#", "_")', # @rule.ToString(true)
64+
{<text> "@rule.Name.Replace('#', '_')", # @rule.ToString(true)
5065
</text>}
5166
]
5267

@@ -75,18 +90,18 @@ class @(Model.ParserClassName):
7590
token_scanner_or_str: TokenScanner | str,
7691
token_matcher: TokenMatcher | None = None,
7792
) -> GherkinDocument:
78-
token_scanner = TokenScanner(token_scanner_or_str) if isinstance(token_scanner_or_str, str) else token_scanner_or_str
93+
token_scanner = (
94+
TokenScanner(token_scanner_or_str)
95+
if isinstance(token_scanner_or_str, str)
96+
else token_scanner_or_str
97+
)
7998
self.ast_builder.reset()
8099
if token_matcher is None:
81100
token_matcher = TokenMatcher()
82101
token_matcher.reset()
83-
context = ParserContext(
84-
token_scanner,
85-
token_matcher,
86-
deque(),
87-
[])
102+
context = ParserContext(token_scanner, token_matcher, deque(), [])
88103

89-
self.start_rule(context, '@Model.RuleSet.StartRule.Name')
104+
self.start_rule(context, "@Model.RuleSet.StartRule.Name")
90105
state = 0
91106
token = None
92107
while True:
@@ -95,7 +110,7 @@ class @(Model.ParserClassName):
95110
if token.eof():
96111
break
97112

98-
self.end_rule(context, '@Model.RuleSet.StartRule.Name')
113+
self.end_rule(context, "@Model.RuleSet.StartRule.Name")
99114

100115
if context.errors:
101116
raise CompositeParserException(context.errors)
@@ -123,8 +138,7 @@ class @(Model.ParserClassName):
123138
def read_token(self, context: ParserContext) -> Token:
124139
if context.token_queue:
125140
return context.token_queue.popleft()
126-
else:
127-
return context.token_scanner.read()
141+
return context.token_scanner.read()
128142
@foreach(var rule in Model.RuleSet.TokenRules)
129143
{<text>
130144
def match_@(rule.Name.Replace("#", ""))(self, context: ParserContext, token: Token) -> bool:
@@ -133,20 +147,24 @@ class @(Model.ParserClassName):
133147
@:if token.eof():
134148
@: return False
135149
}
136-
return self.handle_external_error(context, False, token, context.token_matcher.match_@(rule.Name.Replace("#", "")))</text>
150+
return self.handle_external_error(
151+
context, False, token, context.token_matcher.match_@(rule.Name.Replace("#", ""))
152+
)
153+
</text>
137154
}
138155

139156
def match_token(self, state: int, token: Token, context: ParserContext) -> int:
140-
state_map: dict[int, Callable[[Token, ParserContext], int]]= {
157+
state_map: dict[int, Callable[[Token, ParserContext], int]] = {
141158
@foreach(var state in Model.States.Values.Where(s => !s.IsEndState))
142159
{
143160
@: @state.Id: self.match_token_at_@(state.Id),
144161
}
145162
}
146-
if state in state_map:
147-
return state_map[state](token, context)
148-
else:
163+
164+
if state not in state_map:
149165
raise RuntimeError("Unknown state: " + str(state))
166+
167+
return state_map[state](token, context)
150168
@foreach(var state in Model.States.Values.Where(s => !s.IsEndState))
151169
{<text>
152170
# @Raw(state.Comment)
@@ -157,15 +175,24 @@ class @(Model.ParserClassName):
157175
if (transition.LookAheadHint != null)
158176
{
159177
@:if self.lookahead_@(transition.LookAheadHint.Id)(context, token):
160-
}
161178
foreach(var production in transition.Productions)
162179
{
163-
@CallProduction(production)
180+
<text> </text>@CallProduction(production)
164181
}
165182
@:return @transition.TargetState
183+
} else
184+
{
185+
foreach(var production in transition.Productions)
186+
{
187+
<text> </text>@CallProduction(production)
188+
}
189+
@:return @transition.TargetState
190+
}
166191
}
167192

168-
@HandleParserError(state.Transitions.Select(t => "#" + t.TokenType.ToString()).Distinct(), state)</text>
193+
@HandleParserError(state.Transitions.Select(t => "#" + t.TokenType.ToString()).Distinct(), state)
194+
195+
</text>
169196
}
170197
@foreach(var lookAheadHint in Model.RuleSet.LookAheadHints)
171198
{
@@ -180,16 +207,24 @@ class @(Model.ParserClassName):
180207
token.detach
181208
queue.append(token)
182209

183-
if (@foreach(var tokenType in lookAheadHint.ExpectedTokens) {<text>self.@MatchToken(tokenType) or </text>}False):
210+
if @foreach(var tokenType in lookAheadHint.ExpectedTokens) {<text>self.@MatchToken(tokenType)</text>}:
184211
match = True
185212
break
186213

187-
if not (@foreach(var tokenType in lookAheadHint.Skip) {<text>self.@MatchToken(tokenType) or </text>}False):
214+
if not any(
215+
(@foreach(var tokenType in lookAheadHint.Skip) {
216+
<text>
217+
self.@MatchToken(tokenType),</text>}
218+
219+
)
220+
):
188221
break
189222

190223
context.token_queue.extend(queue)
191224

192-
return match</text>
225+
return match
226+
</text>
227+
193228
}
194229

195230
# private

0 commit comments

Comments
 (0)