Skip to content

Commit

Permalink
Merge upstream lib2to3 from v3.12.2-983-ga82a2f1597c
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Sep 22, 2024
1 parent a17c764 commit b8818b8
Show file tree
Hide file tree
Showing 16 changed files with 71 additions and 20 deletions.
2 changes: 1 addition & 1 deletion cpython
Submodule cpython updated 4212 files
52 changes: 46 additions & 6 deletions fissix/Grammar.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,55 @@ decorated: decorators (classdef | funcdef | async_funcdef)
async_funcdef: ASYNC funcdef
funcdef: 'def' NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: ((tfpdef ['=' test] ',')*
('*' [tname] (',' tname ['=' test])* [',' ['**' tname [',']]] | '**' tname [','])
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])

# The following definition for typedarglist is equivalent to this set of rules:
#
# arguments = argument (',' argument)*
# argument = tfpdef ['=' test]
# kwargs = '**' tname [',']
# args = '*' [tname]
# kwonly_kwargs = (',' argument)* [',' [kwargs]]
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
# typedarglist = arguments ',' '/' [',' [typedargslist_no_posonly]])|(typedargslist_no_posonly)"
#
# It needs to be fully expanded to allow our LL(1) parser to work on it.

typedargslist: tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [
',' [((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
[',' ['**' tname [',']]] | '**' tname [','])
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])]
] | ((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
[',' ['**' tname [',']]] | '**' tname [','])
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])

tname: NAME [':' test]
tfpdef: tname | '(' tfplist ')'
tfplist: tfpdef (',' tfpdef)* [',']
varargslist: ((vfpdef ['=' test] ',')*
('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]] | '**' vname [','])
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])

# The following definition for varargslist is equivalent to this set of rules:
#
# arguments = argument (',' argument )*
# argument = vfpdef ['=' test]
# kwargs = '**' vname [',']
# args = '*' [vname]
# kwonly_kwargs = (',' argument )* [',' [kwargs]]
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly)
#
# It needs to be fully expanded to allow our LL(1) parser to work on it.

varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [
((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])*
[',' ['**' vname [',']]] | '**' vname [','])
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
]] | ((vfpdef ['=' test] ',')*
('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]]| '**' vname [','])
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])

vname: NAME
vfpdef: vname | '(' vfplist ')'
vfplist: vfpdef (',' vfpdef)* [',']
Expand Down
5 changes: 2 additions & 3 deletions fissix/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import logging
import os
import sys
import tempfile
from pathlib import Path

Expand All @@ -16,8 +15,8 @@
from .__version__ import __version__
from .pgen2 import driver, grammar, pgen

__base_version__ = "3.9.0a6+"
__base_revision__ = "v3.9.0a5-508-g7443d42021"
__base_version__ = "3.12.6+"
__base_revision__ = "v3.12.2-983-ga82a2f1597c"

CACHE_DIR = Path(user_cache_dir("fissix", version=__version__))

Expand Down
1 change: 0 additions & 1 deletion fissix/btm_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,6 @@ def reduce_tree(node, parent=None):
else:
# TODO: handle {min, max} repeaters
raise NotImplementedError
pass

# add children
if details_node and new_node is not None:
Expand Down
2 changes: 1 addition & 1 deletion fissix/fixes/fix_metaclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def fixup_parse_tree(cls_node):
# already in the preferred format, do nothing
return

# !%@#! oneliners have no suite node, we have to fake one up
# !%@#! one-liners have no suite node, we have to fake one up
for i, node in enumerate(cls_node.children):
if node.type == token.COLON:
break
Expand Down
2 changes: 1 addition & 1 deletion fissix/fixes/fix_paren.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Fixer that addes parentheses where they are required
"""Fixer that adds parentheses where they are required
This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``."""

Expand Down
1 change: 1 addition & 0 deletions fissix/fixes/fix_urllib.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def build_pattern():


class FixUrllib(FixImports):

def build_pattern(self):
return "|".join(build_pattern())

Expand Down
1 change: 1 addition & 0 deletions fissix/patcomp.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def tokenize_wrapper(input):


class PatternCompiler(object):

def __init__(self, grammar_file=None):
"""Initializer.
Expand Down
1 change: 1 addition & 0 deletions fissix/pgen2/driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@


class Driver(object):

def __init__(self, grammar, convert=None, logger=None):
self.grammar = grammar
if logger is None:
Expand Down
1 change: 1 addition & 0 deletions fissix/pgen2/grammar.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,3 +193,4 @@ def report(self):
if line:
op, name = line.split()
opmap[op] = getattr(token, name)
del line, op, name
5 changes: 4 additions & 1 deletion fissix/pgen2/pgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ class PgenGrammar(grammar.Grammar):


class ParserGenerator(object):

def __init__(self, filename, stream=None):
close_stream = None
if stream is None:
stream = open(filename)
stream = open(filename, encoding="utf-8")
close_stream = stream.close
self.filename = filename
self.stream = stream
Expand Down Expand Up @@ -342,6 +343,7 @@ def raise_error(self, msg, *args):


class NFAState(object):

def __init__(self):
self.arcs = [] # list of (label, NFAState) pairs

Expand All @@ -352,6 +354,7 @@ def addarc(self, next, label=None):


class DFAState(object):

def __init__(self, nfaset, final):
assert isinstance(nfaset, dict)
assert isinstance(next(iter(nfaset)), NFAState)
Expand Down
2 changes: 1 addition & 1 deletion fissix/pgen2/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@

tok_name = {}
for _name, _value in list(globals().items()):
if type(_value) is type(0):
if isinstance(_value, int):
tok_name[_value] = _name


Expand Down
8 changes: 5 additions & 3 deletions fissix/pgen2/tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ def tokenize_loop(readline, tokeneater):


class Untokenizer:

def __init__(self):
self.tokens = []
self.prev_row = 1
Expand Down Expand Up @@ -592,11 +593,12 @@ def generate_tokens(readline):
stashed = tok
continue

if token == "def":
if token in ("def", "for"):
if stashed and stashed[0] == NAME and stashed[1] == "async":

async_def = True
async_def_indent = indents[-1]
if token == "def":
async_def = True
async_def_indent = indents[-1]

yield (
ASYNC,
Expand Down
1 change: 1 addition & 0 deletions fissix/pygram.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@


class Symbols(object):

def __init__(self, grammar):
"""Initializer.
Expand Down
6 changes: 4 additions & 2 deletions fissix/pytree.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,6 +502,7 @@ def generate_matches(self, nodes):


class LeafPattern(BasePattern):

def __init__(self, type=None, content=None, name=None):
"""
Initializer. Takes optional type, content, and name.
Expand Down Expand Up @@ -731,8 +732,8 @@ def generate_matches(self, nodes):
r[self.name] = nodes[:count]
yield count, r
except RuntimeError:
# We fall back to the iterative pattern matching scheme if the recursive
# scheme hits the recursion limit.
# Fall back to the iterative pattern matching scheme if the
# recursive scheme hits the recursion limit (RecursionError).
for count, r in self._iterative_matches(nodes):
if self.name:
r[self.name] = nodes[:count]
Expand Down Expand Up @@ -802,6 +803,7 @@ def _recursive_matches(self, nodes, count):


class NegatedPattern(BasePattern):

def __init__(self, content=None):
"""
Initializer.
Expand Down
1 change: 1 addition & 0 deletions fissix/refactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -691,6 +691,7 @@ class MultiprocessingUnsupported(Exception):


class MultiprocessRefactoringTool(RefactoringTool):

def __init__(self, *args, **kwargs):
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
self.queue = None
Expand Down

0 comments on commit b8818b8

Please sign in to comment.