From fd1a3aefe66eeef415f043255c0d5b5a843dc163 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Oct 2025 20:20:49 +0000 Subject: [PATCH 1/2] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v6.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v6.0.0) - [github.com/asottile/reorder-python-imports: v3.12.0 → v3.16.0](https://github.com/asottile/reorder-python-imports/compare/v3.12.0...v3.16.0) - https://github.com/psf/black → https://github.com/psf/black-pre-commit-mirror - [github.com/psf/black-pre-commit-mirror: 23.10.1 → 25.9.0](https://github.com/psf/black-pre-commit-mirror/compare/23.10.1...25.9.0) - [github.com/asottile/pyupgrade: v3.15.0 → v3.21.0](https://github.com/asottile/pyupgrade/compare/v3.15.0...v3.21.0) - [github.com/pre-commit/mirrors-mypy: v1.6.1 → v1.18.2](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.1...v1.18.2) - [github.com/pycqa/flake8: 6.1.0 → 7.3.0](https://github.com/pycqa/flake8/compare/6.1.0...7.3.0) --- .pre-commit-config.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 27fd867..db3ed98 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,18 +1,18 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v6.0.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace exclude: ^(tests/.*) - repo: https://github.com/asottile/reorder-python-imports - rev: v3.12.0 + rev: v3.16.0 hooks: - id: reorder-python-imports -- repo: https://github.com/psf/black - rev: '23.10.1' +- repo: https://github.com/psf/black-pre-commit-mirror + rev: '25.9.0' hooks: - id: black args: @@ -21,13 +21,13 @@ repos: - "120" - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 + rev: v3.21.0 hooks: - id: pyupgrade args: ["--py38-plus"] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.6.1' + rev: 'v1.18.2' hooks: - id: mypy args: @@ -39,7 +39,7 @@ repos: - types-regex - repo: https://github.com/pycqa/flake8 - rev: '6.1.0' # pick a git hash / tag to point to + rev: '7.3.0' # pick a git hash / tag to point to hooks: - id: flake8 args: From f0b6fd4589b91f6483f367d371c85e3849a7fb2e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Oct 2025 20:21:54 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- json5/loader.py | 9 ++++---- json5/model.py | 24 ++++++++------------ json5/parser.py | 49 +++++++++++++++------------------------- json5/tokenizer.py | 8 +++---- tests/test_errors.py | 15 ++++-------- tests/test_json5_load.py | 2 +- 6 files changed, 41 insertions(+), 66 deletions(-) diff --git a/json5/loader.py b/json5/loader.py index a5e7f04..b7cac63 100644 --- a/json5/loader.py +++ b/json5/loader.py @@ -46,14 +46,13 @@ def __init__( self.parse_int: Callable[[str], typing.Any] | None = parse_int self.parse_constant: Callable[[Literal['-Infinity', 'Infinity', 'NaN']], typing.Any] | None = parse_constant self.strict: bool = strict - self.object_pairs_hook: None | ( - Callable[[list[tuple[str | JsonIdentifier, typing.Any]]], typing.Any] - ) = object_pairs_hook + self.object_pairs_hook: None | (Callable[[list[tuple[str | JsonIdentifier, typing.Any]]], typing.Any]) = ( + object_pairs_hook + ) self.parse_json5_identifiers: Callable[[JsonIdentifier], typing.Any] | None = parse_json5_identifiers -class JsonIdentifier(str): - ... +class JsonIdentifier(str): ... def load( diff --git a/json5/model.py b/json5/model.py index 17f2aae..9bd77b3 100644 --- a/json5/model.py +++ b/json5/model.py @@ -41,7 +41,7 @@ class KeyValuePair(NamedTuple): value: Value -def walk(root: Node) -> typing.Generator[Node, None, None]: +def walk(root: Node) -> typing.Generator[Node]: todo = deque([root]) while todo: node: Node = todo.popleft() @@ -49,7 +49,7 @@ def walk(root: Node) -> typing.Generator[Node, None, None]: yield node -def iter_child_nodes(node: Node) -> typing.Generator[Node, None, None]: +def iter_child_nodes(node: Node) -> typing.Generator[Node]: for attr, value in iter_fields(node): if isinstance(value, Node): yield value @@ -59,7 +59,7 @@ def iter_child_nodes(node: Node) -> typing.Generator[Node, None, None]: yield item -def iter_fields(node: Node) -> typing.Generator[tuple[str, Any], None, None]: +def iter_fields(node: Node) -> typing.Generator[tuple[str, Any]]: for field_name in node._fields: try: value = getattr(node, field_name) @@ -133,8 +133,7 @@ class Value(Node): pass -class Key(Node): - ... +class Key(Node): ... class JSONObject(Value): @@ -208,8 +207,7 @@ def __eq__(self, other: Any) -> bool: return hash(self) == hash(other) -class Number(Value): - ... +class Number(Value): ... class Integer(Number): @@ -301,12 +299,10 @@ def __init__( super().__init__(tok=tok, end_tok=tok) -class DoubleQuotedString(String): - ... +class DoubleQuotedString(String): ... -class SingleQuotedString(String): - ... +class SingleQuotedString(String): ... class BooleanLiteral(Value): @@ -348,9 +344,7 @@ def __init__(self, value: str, tok: JSON5Token | None = None, end_tok: JSON5Toke super().__init__(tok=tok, end_tok=tok) # Comments are always a single token -class LineComment(Comment): - ... +class LineComment(Comment): ... -class BlockComment(Comment): - ... +class BlockComment(Comment): ... diff --git a/json5/parser.py b/json5/parser.py index bf36142..5f3e491 100644 --- a/json5/parser.py +++ b/json5/parser.py @@ -48,11 +48,11 @@ def warning(self, *args: Any, **kwargs: Any) -> None: ESCAPE_SEQUENCES = { 'b': '\u0008', - 'f': '\u000C', - 'n': '\u000A', - 'r': '\u000D', + 'f': '\u000c', + 'n': '\u000a', + 'r': '\u000d', 't': '\u0009', - 'v': '\u000B', + 'v': '\u000b', '0': '\u0000', '\\': '\u005c', '"': '\u0022', @@ -97,8 +97,7 @@ def unicode_escape_replace(matchobj: re.Match[str]) -> str: class T_TokenSlice(Protocol): - def __getitem__(self, item: int) -> JSON5Token: - ... + def __getitem__(self, item: int) -> JSON5Token: ... class T_AnyProduction(Protocol): @@ -110,8 +109,7 @@ class T_TextProduction(Protocol): wsc1: list[Comment | str] value: Value - def __getitem__(self, i: Literal[1]) -> Value: - ... + def __getitem__(self, i: Literal[1]) -> Value: ... class T_FirstKeyValuePairProduction(Protocol): @@ -122,22 +120,19 @@ class T_FirstKeyValuePairProduction(Protocol): value: Value _slice: T_TokenSlice - def __getitem__(self, item: int) -> Key | Value: - ... + def __getitem__(self, item: int) -> Key | Value: ... class T_WSCProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> str | Comment: - ... + def __getitem__(self, item: Literal[0]) -> str | Comment: ... class T_CommentProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> str: - ... + def __getitem__(self, item: Literal[0]) -> str: ... class T_KeyValuePairsProduction(Protocol): @@ -161,8 +156,7 @@ class SubsequentKeyValuePairProduction(Protocol): class T_FirstArrayValueProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[1]) -> Value: - ... + def __getitem__(self, item: Literal[1]) -> Value: ... wsc: list[Comment | str] @@ -188,20 +182,17 @@ class T_JsonArrayProduction(Protocol): class T_IdentifierProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> str: - ... + def __getitem__(self, item: Literal[0]) -> str: ... class T_KeyProduction(Protocol): - def __getitem__(self, item: Literal[1]) -> Identifier | DoubleQuotedString | SingleQuotedString: - ... + def __getitem__(self, item: Literal[1]) -> Identifier | DoubleQuotedString | SingleQuotedString: ... class T_NumberProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> str: - ... + def __getitem__(self, item: Literal[0]) -> str: ... class T_ValueNumberProduction(Protocol): @@ -212,22 +203,19 @@ class T_ValueNumberProduction(Protocol): class T_ExponentNotationProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: int) -> str: - ... + def __getitem__(self, item: int) -> str: ... class T_StringTokenProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> str: - ... + def __getitem__(self, item: Literal[0]) -> str: ... class T_StringProduction(Protocol): _slice: T_TokenSlice - def __getitem__(self, item: Literal[0]) -> DoubleQuotedString | SingleQuotedString: - ... + def __getitem__(self, item: Literal[0]) -> DoubleQuotedString | SingleQuotedString: ... class T_ValueProduction(Protocol): @@ -246,8 +234,7 @@ def __getitem__( | Integer | Float | NaN - ): - ... + ): ... T_CallArg = typing.TypeVar('T_CallArg') @@ -642,7 +629,7 @@ class tok: self.errors.append(JSON5DecodeError('Expecting value. Received unexpected EOF', None)) return None - def _token_gen(self, tokens: typing.Iterable[JSON5Token]) -> typing.Generator[JSON5Token, None, None]: + def _token_gen(self, tokens: typing.Iterable[JSON5Token]) -> typing.Generator[JSON5Token]: for tok in tokens: self.last_token = tok self.seen_tokens.append(tok) diff --git a/json5/tokenizer.py b/json5/tokenizer.py index 1bb82ef..41a65f0 100644 --- a/json5/tokenizer.py +++ b/json5/tokenizer.py @@ -91,7 +91,7 @@ class JSONLexer(Lexer): # type: ignore[misc] OCTAL, # Not allowed, but we capture as a token to raise error later } - def tokenize(self, text: str, lineno: int = 1, index: int = 0) -> Generator[JSON5Token, None, None]: + def tokenize(self, text: str, lineno: int = 1, index: int = 0) -> Generator[JSON5Token]: for tok in super().tokenize(text, lineno, index): tok = JSON5Token(tok, text) yield tok @@ -120,7 +120,7 @@ def BLOCK_COMMENT(self, tok: JSON5Token) -> JSON5Token: self.lineno += tok.value.count('\n') return tok - @_("[\u0009\u000A\u000B\u000C\u000D\u0020\u00A0\u2028\u2029\ufeff]+") + @_("[\u0009\u000a\u000b\u000c\u000d\u0020\u00a0\u2028\u2029\ufeff]+") def WHITESPACE(self, tok: JSON5Token) -> JSON5Token: self.lineno += tok.value.count('\n') return tok @@ -147,13 +147,13 @@ def error(self, t: JSON5Token) -> NoReturn: raise JSON5DecodeError(f'Illegal character {t.value[0]!r} at index {self.index}', None) -def tokenize(text: str) -> Generator[JSON5Token, None, None]: +def tokenize(text: str) -> Generator[JSON5Token]: lexer = JSONLexer() tokens = lexer.tokenize(text) return tokens -def reversed_enumerate(tokens: typing.Sequence[JSON5Token]) -> typing.Generator[tuple[int, JSON5Token], None, None]: +def reversed_enumerate(tokens: typing.Sequence[JSON5Token]) -> typing.Generator[tuple[int, JSON5Token]]: for i in reversed(range(len(tokens))): tok = tokens[i] yield i, tok diff --git a/tests/test_errors.py b/tests/test_errors.py index a269481..1048f7d 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -20,8 +20,7 @@ def test_loading_comment_raises_runtime_error_default_loader(): def test_loading_unknown_node_raises_error(): - class Foo: - ... + class Foo: ... f = Foo() with pytest.raises(NotImplementedError): @@ -29,8 +28,7 @@ class Foo: def test_dumping_unknown_node_raises_error(): - class Foo: - ... + class Foo: ... f = Foo() with pytest.raises(NotImplementedError): @@ -38,8 +36,7 @@ class Foo: def test_known_type_in_wsc_raises_error(): - class Foo: - ... + class Foo: ... f = Foo() model = loads('{foo: "bar"}', loader=ModelLoader()) @@ -53,8 +50,7 @@ class Foo: def test_modelizing_unknown_object_raises_error(): - class Foo: - ... + class Foo: ... f = Foo() with pytest.raises(NotImplementedError): @@ -62,8 +58,7 @@ class Foo: def test_model_dumper_raises_error_for_unknown_node(): - class Foo: - ... + class Foo: ... f = Foo() with pytest.raises(NotImplementedError): diff --git a/tests/test_json5_load.py b/tests/test_json5_load.py index f0c52d9..d05a25a 100644 --- a/tests/test_json5_load.py +++ b/tests/test_json5_load.py @@ -315,7 +315,7 @@ def test_escape_unicode(): sig\\u03A3ma: "\\u03A3 is the sum of all things" } """ - assert loads(json_string) == {"sig\u03A3ma": "\u03A3 is the sum of all things"} + assert loads(json_string) == {"sig\u03a3ma": "\u03a3 is the sum of all things"} def test_load_identifier_with_connector_punctuation():