Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 826d4eb

Browse files
sobolevnAA-Turner
andauthored
[3.13] gh-138281: Run ruff on Tools/peg_generator (GH-138282) (#138472)
(cherry picked from commit 0d1f4e1) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
1 parent e8280e3 commit 826d4eb

16 files changed

+247
-226
lines changed

‎.pre-commit-config.yaml‎

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,10 @@ repos:
1414
name: Run Ruff (lint) on Argument Clinic
1515
args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml]
1616
files: ^Tools/clinic/|Lib/test/test_clinic.py
17+
- id: ruff
18+
name: Run Ruff (lint) on Tools/peg_generator/
19+
args: [--exit-non-zero-on-fix, --config=Tools/peg_generator/.ruff.toml]
20+
files: ^Tools/peg_generator/
1721
- id: ruff-format
1822
name: Run Ruff (format) on Doc/
1923
args: [--check]

‎Tools/peg_generator/.ruff.toml‎

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
extend = "../../.ruff.toml" # Inherit the project-wide settings
2+
3+
extend-exclude = [
4+
# Generated files:
5+
"Tools/peg_generator/pegen/grammar_parser.py",
6+
]
7+
8+
[lint]
9+
select = [
10+
"F", # pyflakes
11+
"I", # isort
12+
"UP", # pyupgrade
13+
"RUF100", # Ban unused `# noqa` comments
14+
"PGH004", # Ban blanket `# noqa` comments (only ignore specific error codes)
15+
]
16+
ignore = [
17+
# Use PEP-604 unions rather than tuples for isinstance() checks.
18+
# Makes code slower and more verbose. https://github.com/astral-sh/ruff/issues/7871.
19+
"UP038",
20+
]
21+
unfixable = [
22+
# The autofixes sometimes do the wrong things for these;
23+
# it's better to have to manually look at the code and see how it needs fixing
24+
"F841", # Detects unused variables
25+
"F601", # Detects dictionaries that have duplicate keys
26+
"F602", # Also detects dictionaries that have duplicate keys
27+
]

‎Tools/peg_generator/pegen/__main__.py‎

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import time
1111
import token
1212
import traceback
13-
from typing import Tuple
1413

1514
from pegen.grammar import Grammar
1615
from pegen.parser import Parser
@@ -21,7 +20,7 @@
2120

2221
def generate_c_code(
2322
args: argparse.Namespace,
24-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
23+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
2524
from pegen.build import build_c_parser_and_generator
2625

2726
verbose = args.verbose
@@ -50,7 +49,7 @@ def generate_c_code(
5049

5150
def generate_python_code(
5251
args: argparse.Namespace,
53-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
52+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
5453
from pegen.build import build_python_parser_and_generator
5554

5655
verbose = args.verbose
@@ -185,7 +184,7 @@ def main() -> None:
185184

186185

187186
if __name__ == "__main__":
188-
if sys.version_info < (3, 8):
187+
if sys.version_info < (3, 8):# noqa: UP036
189188
print("ERROR: using pegen requires at least Python 3.8!", file=sys.stderr)
190189
sys.exit(1)
191190
main()

‎Tools/peg_generator/pegen/ast_dump.py‎

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,17 @@
66
TODO: Remove the above-described hack.
77
"""
88

9-
from typing import Any, Optional, Tuple
9+
from typing import Any
1010

1111

1212
def ast_dump(
1313
node: Any,
1414
annotate_fields: bool = True,
1515
include_attributes: bool = False,
1616
*,
17-
indent: Optional[str] = None,
17+
indent: str|None = None,
1818
) -> str:
19-
def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
19+
def _format(node: Any, level: int = 0) -> tuple[str, bool]:
2020
if indent is not None:
2121
level += 1
2222
prefix = "\n" + indent * level
@@ -41,7 +41,7 @@ def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
4141
value, simple = _format(value, level)
4242
allsimple = allsimple and simple
4343
if keywords:
44-
args.append("%s=%s"% (name, value))
44+
args.append(f"{name}={value}")
4545
else:
4646
args.append(value)
4747
if include_attributes and node._attributes:
@@ -54,16 +54,16 @@ def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
5454
continue
5555
value, simple = _format(value, level)
5656
allsimple = allsimple and simple
57-
args.append("%s=%s"% (name, value))
57+
args.append(f"{name}={value}")
5858
if allsimple and len(args) <= 3:
59-
return "%s(%s)"%(node.__class__.__name__, ", ".join(args)), not args
60-
return "%s(%s%s)"% (node.__class__.__name__, prefix, sep.join(args)), False
59+
return "{}({})".format(node.__class__.__name__, ", ".join(args)), not args
60+
return f"{node.__class__.__name__}({prefix}{sep.join(args)})", False
6161
elif isinstance(node, list):
6262
if not node:
6363
return "[]", True
64-
return "[%s%s]"% (prefix, sep.join(_format(x, level)[0] for x in node)), False
64+
return f"[{prefix}{sep.join(_format(x, level)[0] for x in node)}]", False
6565
return repr(node), True
6666

6767
if all(cls.__name__ != "AST" for cls in node.__class__.__mro__):
68-
raise TypeError("expected AST, got %r"%node.__class__.__name__)
68+
raise TypeError(f"expected AST, got {node.__class__.__name__!r}")
6969
return _format(node)[0]

‎Tools/peg_generator/pegen/build.py‎

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import sysconfig
77
import tempfile
88
import tokenize
9-
from typing import IO, Any, Dict, List, Optional, Set, Tuple
9+
from typing import IO, Any
1010

1111
from pegen.c_generator import CParserGenerator
1212
from pegen.grammar import Grammar
@@ -18,11 +18,11 @@
1818

1919
MOD_DIR = pathlib.Path(__file__).resolve().parent
2020

21-
TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]]
21+
TokenDefinitions = tuple[dict[int, str], dict[str, int], set[str]]
2222
Incomplete = Any # TODO: install `types-setuptools` and remove this alias
2323

2424

25-
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]:
25+
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> list[str]:
2626
flags = sysconfig.get_config_var(compiler_flags)
2727
py_flags_nodist = sysconfig.get_config_var(compiler_py_flags_nodist)
2828
if flags is None or py_flags_nodist is None:
@@ -71,11 +71,11 @@ def fixup_build_ext(cmd: Incomplete) -> None:
7171

7272
def compile_c_extension(
7373
generated_source_path: str,
74-
build_dir: Optional[str] = None,
74+
build_dir: str|None = None,
7575
verbose: bool = False,
7676
keep_asserts: bool = True,
7777
disable_optimization: bool = False,
78-
library_dir: Optional[str] = None,
78+
library_dir: str|None = None,
7979
) -> pathlib.Path:
8080
"""Compile the generated source for a parser generator into an extension module.
8181
@@ -93,8 +93,7 @@ def compile_c_extension(
9393
"""
9494
import setuptools.command.build_ext
9595
import setuptools.logging
96-
97-
from setuptools import Extension, Distribution
96+
from setuptools import Distribution, Extension
9897
try:
9998
from setuptools.modified import newer_group
10099
except ImportError:
@@ -242,7 +241,7 @@ def compile_c_extension(
242241

243242
def build_parser(
244243
grammar_file: str, verbose_tokenizer: bool = False, verbose_parser: bool = False
245-
) -> Tuple[Grammar, Parser, Tokenizer]:
244+
) -> tuple[Grammar, Parser, Tokenizer]:
246245
with open(grammar_file) as file:
247246
tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
248247
parser = GrammarParser(tokenizer, verbose=verbose_parser)
@@ -293,7 +292,7 @@ def build_c_generator(
293292
keep_asserts_in_extension: bool = True,
294293
skip_actions: bool = False,
295294
) -> ParserGenerator:
296-
with open(tokens_file, "r") as tok_file:
295+
with open(tokens_file) as tok_file:
297296
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
298297
with open(output_file, "w") as file:
299298
gen: ParserGenerator = CParserGenerator(
@@ -334,7 +333,7 @@ def build_c_parser_and_generator(
334333
verbose_c_extension: bool = False,
335334
keep_asserts_in_extension: bool = True,
336335
skip_actions: bool = False,
337-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
336+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
338337
"""Generate rules, C parser, tokenizer, parser generator for a given grammar
339338
340339
Args:
@@ -374,7 +373,7 @@ def build_python_parser_and_generator(
374373
verbose_tokenizer: bool = False,
375374
verbose_parser: bool = False,
376375
skip_actions: bool = False,
377-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
376+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
378377
"""Generate rules, python parser, tokenizer, parser generator for a given grammar
379378
380379
Args:

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /