6
6
import sysconfig
7
7
import tempfile
8
8
import tokenize
9
- from typing import IO , Any , Dict , List , Optional , Set , Tuple
9
+ from typing import IO , Any
10
10
11
11
from pegen .c_generator import CParserGenerator
12
12
from pegen .grammar import Grammar
18
18
19
19
MOD_DIR = pathlib .Path (__file__ ).resolve ().parent
20
20
21
- TokenDefinitions = Tuple [ Dict [int , str ], Dict [str , int ], Set [str ]]
21
+ TokenDefinitions = tuple [ dict [int , str ], dict [str , int ], set [str ]]
22
22
Incomplete = Any # TODO: install `types-setuptools` and remove this alias
23
23
24
24
25
- def get_extra_flags (compiler_flags : str , compiler_py_flags_nodist : str ) -> List [str ]:
25
+ def get_extra_flags (compiler_flags : str , compiler_py_flags_nodist : str ) -> list [str ]:
26
26
flags = sysconfig .get_config_var (compiler_flags )
27
27
py_flags_nodist = sysconfig .get_config_var (compiler_py_flags_nodist )
28
28
if flags is None or py_flags_nodist is None :
@@ -71,11 +71,11 @@ def fixup_build_ext(cmd: Incomplete) -> None:
71
71
72
72
def compile_c_extension (
73
73
generated_source_path : str ,
74
- build_dir : Optional [ str ] = None ,
74
+ build_dir : str | None = None ,
75
75
verbose : bool = False ,
76
76
keep_asserts : bool = True ,
77
77
disable_optimization : bool = False ,
78
- library_dir : Optional [ str ] = None ,
78
+ library_dir : str | None = None ,
79
79
) -> pathlib .Path :
80
80
"""Compile the generated source for a parser generator into an extension module.
81
81
@@ -93,8 +93,7 @@ def compile_c_extension(
93
93
"""
94
94
import setuptools .command .build_ext
95
95
import setuptools .logging
96
-
97
- from setuptools import Extension , Distribution
96
+ from setuptools import Distribution , Extension
98
97
try :
99
98
from setuptools .modified import newer_group
100
99
except ImportError :
@@ -242,7 +241,7 @@ def compile_c_extension(
242
241
243
242
def build_parser (
244
243
grammar_file : str , verbose_tokenizer : bool = False , verbose_parser : bool = False
245
- ) -> Tuple [Grammar , Parser , Tokenizer ]:
244
+ ) -> tuple [Grammar , Parser , Tokenizer ]:
246
245
with open (grammar_file ) as file :
247
246
tokenizer = Tokenizer (tokenize .generate_tokens (file .readline ), verbose = verbose_tokenizer )
248
247
parser = GrammarParser (tokenizer , verbose = verbose_parser )
@@ -293,7 +292,7 @@ def build_c_generator(
293
292
keep_asserts_in_extension : bool = True ,
294
293
skip_actions : bool = False ,
295
294
) -> ParserGenerator :
296
- with open (tokens_file , "r" ) as tok_file :
295
+ with open (tokens_file ) as tok_file :
297
296
all_tokens , exact_tok , non_exact_tok = generate_token_definitions (tok_file )
298
297
with open (output_file , "w" ) as file :
299
298
gen : ParserGenerator = CParserGenerator (
@@ -334,7 +333,7 @@ def build_c_parser_and_generator(
334
333
verbose_c_extension : bool = False ,
335
334
keep_asserts_in_extension : bool = True ,
336
335
skip_actions : bool = False ,
337
- ) -> Tuple [Grammar , Parser , Tokenizer , ParserGenerator ]:
336
+ ) -> tuple [Grammar , Parser , Tokenizer , ParserGenerator ]:
338
337
"""Generate rules, C parser, tokenizer, parser generator for a given grammar
339
338
340
339
Args:
@@ -374,7 +373,7 @@ def build_python_parser_and_generator(
374
373
verbose_tokenizer : bool = False ,
375
374
verbose_parser : bool = False ,
376
375
skip_actions : bool = False ,
377
- ) -> Tuple [Grammar , Parser , Tokenizer , ParserGenerator ]:
376
+ ) -> tuple [Grammar , Parser , Tokenizer , ParserGenerator ]:
378
377
"""Generate rules, python parser, tokenizer, parser generator for a given grammar
379
378
380
379
Args:
0 commit comments