You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Compare commits
24 Commits
release-py
...
do-not-quo
Author | SHA1 | Date | |
---|---|---|---|
|
ad00b9a4ee | ||
|
551e428086 | ||
|
cd0049933f | ||
|
7f3c1fa3a4 | ||
|
f76c35c902 | ||
|
82963cdf2c | ||
|
a20972dd12 | ||
|
18b5934b2d | ||
|
982abe0980 | ||
|
41d1ba31f3 | ||
|
e03f4cfe43 | ||
|
53a5e03a8f | ||
|
7c99564640 | ||
|
931abc5726 | ||
|
2b3cd696db | ||
|
50697bb79e | ||
|
137dd64a46 | ||
|
9a7eb0ad0a | ||
|
154dabfcef | ||
|
42d26ccbd7 | ||
|
73a4c0be78 | ||
|
92830c2eae | ||
|
090570cd34 | ||
|
16914feb12 |
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: False
|
24
NEWS.md
24
NEWS.md
@@ -1,4 +1,26 @@
|
||||
3.8.0: 2020-10-29
|
||||
3.9.0: 2022-12-22
|
||||
=================
|
||||
|
||||
* deparse generator expressions for Python 3.0 .. 3.2
|
||||
* Python 3.0 list comprehension.
|
||||
* Fix Issues #310, #344, #377, #391, #409, #414
|
||||
* Limited support for 3.8+ f-string "=" specifier
|
||||
* Correct 2.5-7 relative import formatting
|
||||
* Miscellaneous bug fixing
|
||||
* remove \n in lambda
|
||||
* Python 2.6 gramar cleanup
|
||||
* Correct some Python 2.6 chain compare decompilation
|
||||
* Ensure no parenthesis subscript slices
|
||||
* Correct 2.x formatting "slice2" nonterminal
|
||||
* Correct 3.7 imports
|
||||
* Improve "async for" parsing
|
||||
* Handle BUILD_MAP opcode
|
||||
* match Python AT better
|
||||
* Correct 3.7 positional args
|
||||
* PyPy 3.7 and PyPy 3.8 support
|
||||
* Miscellaneous linting, isorting, blacking
|
||||
|
||||
3.8.0: 2021-10-29
|
||||
=================
|
||||
|
||||
* Better handling of invalid bytecode magic
|
||||
|
@@ -75,7 +75,7 @@ entry_points = {
|
||||
]
|
||||
}
|
||||
ftp_url = None
|
||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.1.0"]
|
||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.2.0"]
|
||||
|
||||
license = "GPL3"
|
||||
mailing_list = "python-debugger@googlegroups.com"
|
||||
|
27
admin-tools/check-3.3-3.5-versions.sh
Normal file
27
admin-tools/check-3.3-3.5-versions.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
function finish {
|
||||
cd $owd
|
||||
}
|
||||
owd=$(pwd)
|
||||
trap finish EXIT
|
||||
|
||||
cd $(dirname ${BASH_SOURCE[0]})
|
||||
if ! source ./pyenv-3.3-3.5-versions ; then
|
||||
exit $?
|
||||
fi
|
||||
if ! source ./setup-python-3.3.sh ; then
|
||||
exit $?
|
||||
fi
|
||||
|
||||
cd ..
|
||||
for version in $PYVERSIONS; do
|
||||
echo --- $version ---
|
||||
if ! pyenv local $version ; then
|
||||
exit $?
|
||||
fi
|
||||
make clean && python setup.py develop
|
||||
if ! make check ; then
|
||||
exit $?
|
||||
fi
|
||||
echo === $version ===
|
||||
done
|
@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
|
||||
echo "This script should be *sourced* rather than run directly through bash"
|
||||
exit 1
|
||||
fi
|
||||
export PYVERSIONS='3.6.15 pypy3.6-7.3.1 3.7.16 pypy-3.7-3.9 pypy3.8-7.3.10 pyston-2.3.5 3.8.14'
|
||||
export PYVERSIONS='3.6.15 pypy3.6-7.3.1 3.7.16 pypy3.7-7.3.9 pypy3.8-7.3.10 pyston-2.3.5 3.8.16'
|
||||
|
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
PYTHON_VERSION=3.7.14
|
||||
PYTHON_VERSION=3.7.16
|
||||
|
||||
function checkout_version {
|
||||
local repo=$1
|
||||
|
@@ -7,5 +7,5 @@ PYTHON ?= python
|
||||
test check pytest:
|
||||
@PYTHON_VERSION=`$(PYTHON) -V 2>&1 | cut -d ' ' -f 2 | cut -d'.' -f1,2`; \
|
||||
if [[ $$PYTHON_VERSION > 3.2 ]] || [[ $$PYTHON_VERSION == 2.7 ]] || [[ $$PYTHON_VERSION == 2.6 ]]; then \
|
||||
py.test; \
|
||||
$(PYTHON) -m pytest .; \
|
||||
fi
|
||||
|
@@ -6,4 +6,4 @@ pytest
|
||||
Click~=7.0
|
||||
xdis>=6.0.4
|
||||
configobj~=5.0.6
|
||||
setuptools~=65.3.0
|
||||
setuptools~=65.5.1
|
||||
|
BIN
test/bytecode_3.3_run/02_make_closure.pyc
Normal file
BIN
test/bytecode_3.3_run/02_make_closure.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4_run/02_make_closure.pyc
Normal file
BIN
test/bytecode_3.4_run/02_make_closure.pyc
Normal file
Binary file not shown.
7
test/simple_source/bug27+/01_argument_quoting.py
Normal file
7
test/simple_source/bug27+/01_argument_quoting.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Bug was erroneously putting quotes around Exception on decompilatoin
|
||||
# RUNNABLE!
|
||||
|
||||
"""This program is self-checking!"""
|
||||
z = ["y", Exception]
|
||||
assert z[0] == "y"
|
||||
assert isinstance(z[1], Exception)
|
18
test/simple_source/bug34/02_make_closure.py
Normal file
18
test/simple_source/bug34/02_make_closure.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Related to #426
|
||||
|
||||
# This file is RUNNABLE!
|
||||
"""This program is self-checking!"""
|
||||
|
||||
a = 5
|
||||
class MakeClosureTest():
|
||||
# This function uses MAKE_CLOSURE with annotation args
|
||||
def __init__(self, dev: str, b: bool):
|
||||
super().__init__()
|
||||
self.dev = dev
|
||||
self.b = b
|
||||
self.a = a
|
||||
|
||||
x = MakeClosureTest("dev", True)
|
||||
assert x.dev == "dev"
|
||||
assert x.b == True
|
||||
assert x.a == 5
|
@@ -81,7 +81,7 @@ SKIP_TESTS=(
|
||||
[test_winreg.py]=1 # it fails on its own
|
||||
[test_winsound.py]=1 # it fails on its own
|
||||
|
||||
[test_zipimport_support.py]=1
|
||||
[test_zipimport_support.py]=1 # expected test to raise ImportError
|
||||
[test_zipfile64.py]=1 # Skip Long test
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc
|
||||
|
@@ -22,25 +22,20 @@ SKIP_TESTS=(
|
||||
[test_doctest2.py]=1 # Fails on its own
|
||||
|
||||
[test_format.py]=1 # Control flow "and" vs nested "if"
|
||||
[test_float.py]=1
|
||||
[test_grp.py]=1 # test takes to long, works interactively though
|
||||
[test_io.py]=1 # Test takes too long to run
|
||||
[test_ioctl.py]=1 # Test takes too long to run
|
||||
[test_lib2to3.py]=1 # test takes too long to run: 28 seconds
|
||||
[test_memoryio.py]=1 # FIX
|
||||
[test_multiprocessing.py]=1 # On uncompyle2, takes 24 secs
|
||||
[test_poll.py]=1 # test takes too long to run: 11 seconds
|
||||
[test_regrtest.py]=1 #
|
||||
[test_runpy.py]=1 # Long and fails on its own
|
||||
[test_socket.py]=1 # Runs ok but takes 22 seconds
|
||||
[test_ssl.py]=1 #
|
||||
[test_ssl.py]=1 # Fails on its own
|
||||
[test_subprocess.py]=1 # Runs ok but takes 22 seconds
|
||||
[test_sys_settrace.py]=1 # Line numbers are expected to be different
|
||||
|
||||
[test_traceback.py]=1 # Line numbers change - duh.
|
||||
[test_xpickle.py]=1 # Runs ok but takes 72 seconds
|
||||
[test_zipfile64.py]=1 # Runs ok but takes 204 seconds
|
||||
[test_zipimport.py]=1 #
|
||||
[test_zipimport.py]=1 # expected test to raise ImportError
|
||||
)
|
||||
# 334 unit-test files in about 15 minutes
|
||||
|
||||
|
@@ -29,11 +29,18 @@ Step 2: Run the test:
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getopt, os, py_compile, sys, shutil, tempfile, time
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import py_compile
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
|
||||
from xdis.version_info import PYTHON_VERSION_TRIPLE
|
||||
|
||||
from uncompyle6.main import main
|
||||
from xdis.version_info import PYTHON_VERSION
|
||||
|
||||
|
||||
def get_srcdir():
|
||||
@@ -164,10 +171,10 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
|
||||
if opts["do_compile"]:
|
||||
compiled_version = opts["compiled_version"]
|
||||
if compiled_version and PYTHON_VERSION != compiled_version:
|
||||
if compiled_version and PYTHON_VERSION_TRIPLE != compiled_version:
|
||||
print(
|
||||
"Not compiling: desired Python version is %s but we are running %s"
|
||||
% (compiled_version, PYTHON_VERSION),
|
||||
% (compiled_version, PYTHON_VERSION_TRIPLE),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
|
@@ -199,7 +199,7 @@ class PythonParser(GenericASTBuilder):
|
||||
if instructions[finish].linestart:
|
||||
break
|
||||
pass
|
||||
if start > 0:
|
||||
if start >= 0:
|
||||
err_token = instructions[index]
|
||||
print("Instruction context:")
|
||||
for i in range(start, finish):
|
||||
@@ -213,10 +213,16 @@ class PythonParser(GenericASTBuilder):
|
||||
raise ParserError(None, -1, self.debug["reduce"])
|
||||
|
||||
def get_pos_kw(self, token):
|
||||
"""Return then the number of positional parameters and
|
||||
represented by the attr field of token"""
|
||||
"""
|
||||
Return then the number of positional parameters and keyword
|
||||
parfameters represented by the attr (operand) field of
|
||||
token.
|
||||
|
||||
This appears in CALL_FUNCTION or CALL_METHOD (PyPy) tokens
|
||||
"""
|
||||
# Low byte indicates number of positional paramters,
|
||||
# high byte number of keyword parameters
|
||||
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith("CALL_METHOD")
|
||||
args_pos = token.attr & 0xFF
|
||||
args_kw = (token.attr >> 8) & 0xFF
|
||||
return args_pos, args_kw
|
||||
|
@@ -541,9 +541,9 @@ class Python3Parser(PythonParser):
|
||||
# token found, while this one doesn't.
|
||||
if self.version < (3, 6):
|
||||
call_function = self.call_fn_name(call_fn_tok)
|
||||
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
|
||||
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
|
||||
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s" "%s" % (
|
||||
("expr " * (args_pos - 1) + ("kwarg " * args_kw)),
|
||||
("expr " * (pos_args_count - 1) + ("kwarg " * kw_args_count)),
|
||||
call_function,
|
||||
)
|
||||
else:
|
||||
@@ -552,10 +552,10 @@ class Python3Parser(PythonParser):
|
||||
if call_function.startswith("CALL_FUNCTION_KW"):
|
||||
self.addRule("classdef ::= build_class_kw store", nop_func)
|
||||
if is_pypy:
|
||||
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
|
||||
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
|
||||
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %s%s%s" % (
|
||||
"expr " * (args_pos - 1),
|
||||
"kwarg " * (args_kw),
|
||||
"expr " * (pos_args_count - 1),
|
||||
"kwarg " * (kw_args_count),
|
||||
call_function,
|
||||
)
|
||||
else:
|
||||
@@ -581,7 +581,7 @@ class Python3Parser(PythonParser):
|
||||
|
||||
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
||||
"""
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
pos_args_count, kw_args_count = self.get_pos_kw(token)
|
||||
|
||||
# Additional exprs for * and ** args:
|
||||
# 0 if neither
|
||||
@@ -590,7 +590,7 @@ class Python3Parser(PythonParser):
|
||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||
nak = (len(opname) - len("CALL_FUNCTION")) // 3
|
||||
|
||||
uniq_param = args_kw + args_pos
|
||||
uniq_param = kw_args_count + pos_args_count
|
||||
|
||||
# Note: 3.5+ have subclassed this method; so we don't handle
|
||||
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
|
||||
@@ -599,16 +599,16 @@ class Python3Parser(PythonParser):
|
||||
token.kind = self.call_fn_name(token)
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ ("pos_arg " * pos_args_count)
|
||||
+ ("kwarg " * kw_args_count)
|
||||
+ token.kind
|
||||
)
|
||||
else:
|
||||
token.kind = self.call_fn_name(token)
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ ("pos_arg " * pos_args_count)
|
||||
+ ("kwarg " * kw_args_count)
|
||||
+ "expr " * nak
|
||||
+ token.kind
|
||||
)
|
||||
@@ -616,11 +616,11 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
if "LOAD_BUILD_CLASS" in self.seen_ops:
|
||||
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and args_pos > 1:
|
||||
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and pos_args_count > 1:
|
||||
rule = "classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d" % (
|
||||
("expr " * (args_pos - 1)),
|
||||
("expr " * (pos_args_count - 1)),
|
||||
opname,
|
||||
args_pos,
|
||||
pos_args_count,
|
||||
)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
@@ -845,18 +845,18 @@ class Python3Parser(PythonParser):
|
||||
|
||||
elif opname in ("BUILD_CONST_LIST", "BUILD_CONST_DICT", "BUILD_CONST_SET"):
|
||||
if opname == "BUILD_CONST_DICT":
|
||||
rule = f"""
|
||||
rule = """
|
||||
add_consts ::= ADD_VALUE*
|
||||
const_list ::= COLLECTION_START add_consts {opname}
|
||||
const_list ::= COLLECTION_START add_consts %s
|
||||
dict ::= const_list
|
||||
expr ::= dict
|
||||
"""
|
||||
""" % opname
|
||||
else:
|
||||
rule = f"""
|
||||
rule = """
|
||||
add_consts ::= ADD_VALUE*
|
||||
const_list ::= COLLECTION_START add_consts {opname}
|
||||
const_list ::= COLLECTION_START add_consts %s
|
||||
expr ::= const_list
|
||||
"""
|
||||
""" % opname
|
||||
self.addRule(rule, nop_func)
|
||||
|
||||
elif opname_base in (
|
||||
@@ -955,14 +955,14 @@ class Python3Parser(PythonParser):
|
||||
elif opname_base == "CALL_METHOD":
|
||||
# PyPy and Python 3.7+ only - DRY with parse2
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
pos_args_count, kw_args_count = self.get_pos_kw(token)
|
||||
|
||||
# number of apply equiv arguments:
|
||||
nak = (len(opname_base) - len("CALL_METHOD")) // 3
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ ("pos_arg " * pos_args_count)
|
||||
+ ("kwarg " * kw_args_count)
|
||||
+ "expr " * nak
|
||||
+ opname
|
||||
)
|
||||
@@ -1096,7 +1096,7 @@ class Python3Parser(PythonParser):
|
||||
"""
|
||||
self.addRule(rule, nop_func)
|
||||
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
pos_args_count, kw_args_count, annotate_args = token.attr
|
||||
|
||||
# FIXME: Fold test into add_make_function_rule
|
||||
if self.version < (3, 3):
|
||||
@@ -1105,7 +1105,7 @@ class Python3Parser(PythonParser):
|
||||
j = 2
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
rule_pat = "lambda_body ::= %sload_closure LOAD_LAMBDA %%s%s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
@@ -1113,7 +1113,7 @@ class Python3Parser(PythonParser):
|
||||
if has_get_iter_call_function1:
|
||||
rule_pat = (
|
||||
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
@@ -1129,7 +1129,7 @@ class Python3Parser(PythonParser):
|
||||
rule_pat = (
|
||||
"listcomp ::= %sload_closure LOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1"
|
||||
% ("pos_arg " * args_pos, opname)
|
||||
% ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
@@ -1138,7 +1138,7 @@ class Python3Parser(PythonParser):
|
||||
rule_pat = (
|
||||
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1"
|
||||
% ("pos_arg " * args_pos, opname)
|
||||
% ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
@@ -1149,13 +1149,13 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(
|
||||
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
|
||||
"expr GET_ITER CALL_FUNCTION_1"
|
||||
% ("pos_arg " * args_pos, opname),
|
||||
% ("pos_arg " * pos_args_count, opname),
|
||||
opname,
|
||||
token.attr,
|
||||
customize,
|
||||
)
|
||||
|
||||
if args_kw > 0:
|
||||
if kw_args_count > 0:
|
||||
kwargs_str = "kwargs "
|
||||
else:
|
||||
kwargs_str = ""
|
||||
@@ -1167,36 +1167,40 @@ class Python3Parser(PythonParser):
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE %s"
|
||||
% (
|
||||
kwargs_str,
|
||||
"pos_arg " * args_pos,
|
||||
"annotate_arg " * (annotate_args - 1),
|
||||
"pos_arg " * pos_args_count,
|
||||
"annotate_arg " * (annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
else:
|
||||
rule = "mkfunc ::= %s%sload_closure LOAD_CODE %s" % (
|
||||
kwargs_str,
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
elif self.version == (3, 3):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
elif (3, 3) <= self.version < (3, 6):
|
||||
if annotate_args > 0:
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE LOAD_STR %s"
|
||||
% (
|
||||
kwargs_str,
|
||||
"pos_arg " * args_pos,
|
||||
"annotate_arg " * (annotate_args - 1),
|
||||
"pos_arg " * pos_args_count,
|
||||
"annotate_arg " * (annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
else:
|
||||
rule = "mkfunc ::= %s%sload_closure LOAD_CODE LOAD_STR %s" % (
|
||||
kwargs_str,
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
elif self.version >= (3, 4):
|
||||
|
||||
if self.version >= (3, 4):
|
||||
if not self.is_pypy:
|
||||
load_op = "LOAD_STR"
|
||||
else:
|
||||
@@ -1206,33 +1210,33 @@ class Python3Parser(PythonParser):
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure %s %s"
|
||||
% (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
kwargs_str,
|
||||
"annotate_arg " * (annotate_args - 1),
|
||||
"annotate_arg " * (annotate_args),
|
||||
load_op,
|
||||
opname,
|
||||
)
|
||||
)
|
||||
else:
|
||||
rule = "mkfunc ::= %s%s load_closure LOAD_CODE %s %s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
kwargs_str,
|
||||
load_op,
|
||||
opname,
|
||||
)
|
||||
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
if args_kw == 0:
|
||||
if kw_args_count == 0:
|
||||
rule = "mkfunc ::= %sload_closure load_genexpr %s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
if self.version < (3, 4):
|
||||
rule = "mkfunc ::= %sload_closure LOAD_CODE %s" % (
|
||||
"expr " * args_pos,
|
||||
"expr " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
@@ -1243,10 +1247,10 @@ class Python3Parser(PythonParser):
|
||||
if self.version >= (3, 6):
|
||||
# The semantics of MAKE_FUNCTION in 3.6 are totally different from
|
||||
# before.
|
||||
args_pos, args_kw, annotate_args, closure = token.attr
|
||||
stack_count = args_pos + args_kw + annotate_args
|
||||
pos_args_count, kw_args_count, annotate_args, closure = token.attr
|
||||
stack_count = pos_args_count + kw_args_count + annotate_args
|
||||
if closure:
|
||||
if args_pos:
|
||||
if pos_args_count:
|
||||
rule = "lambda_body ::= %s%s%s%s" % (
|
||||
"expr " * stack_count,
|
||||
"load_closure " * closure,
|
||||
@@ -1279,14 +1283,14 @@ class Python3Parser(PythonParser):
|
||||
if has_get_iter_call_function1:
|
||||
rule_pat = (
|
||||
"generator_exp ::= %sload_genexpr %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
)
|
||||
rule_pat = (
|
||||
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
@@ -1308,7 +1312,7 @@ class Python3Parser(PythonParser):
|
||||
rule_pat = (
|
||||
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1"
|
||||
% ("expr " * args_pos, opname)
|
||||
% ("expr " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
@@ -1316,8 +1320,8 @@ class Python3Parser(PythonParser):
|
||||
|
||||
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
|
||||
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||
("pos_arg " * args_pos),
|
||||
("kwarg " * args_kw),
|
||||
("pos_arg " * pos_args_count),
|
||||
("kwarg " * kw_args_count),
|
||||
opname,
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
@@ -1326,9 +1330,9 @@ class Python3Parser(PythonParser):
|
||||
continue
|
||||
|
||||
if self.version < (3, 6):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
pos_args_count, kw_args_count, annotate_args = token.attr
|
||||
else:
|
||||
args_pos, args_kw, annotate_args, closure = token.attr
|
||||
pos_args_count, kw_args_count, annotate_args, closure = token.attr
|
||||
|
||||
if self.version < (3, 3):
|
||||
j = 1
|
||||
@@ -1338,7 +1342,7 @@ class Python3Parser(PythonParser):
|
||||
if has_get_iter_call_function1:
|
||||
rule_pat = (
|
||||
"generator_exp ::= %sload_genexpr %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
|
||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
@@ -1350,7 +1354,7 @@ class Python3Parser(PythonParser):
|
||||
# Todo: For Pypy we need to modify this slightly
|
||||
rule_pat = (
|
||||
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ("expr " * args_pos, opname)
|
||||
"GET_ITER CALL_FUNCTION_1" % ("expr " * pos_args_count, opname)
|
||||
)
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
@@ -1359,13 +1363,13 @@ class Python3Parser(PythonParser):
|
||||
# FIXME: Fold test into add_make_function_rule
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||
("pos_arg " * args_pos),
|
||||
("kwarg " * args_kw),
|
||||
("pos_arg " * pos_args_count),
|
||||
("kwarg " * kw_args_count),
|
||||
opname,
|
||||
)
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
if args_kw == 0:
|
||||
if kw_args_count == 0:
|
||||
kwargs = "no_kwargs"
|
||||
self.add_unique_rule("no_kwargs ::=", opname, token.attr, customize)
|
||||
else:
|
||||
@@ -1375,13 +1379,13 @@ class Python3Parser(PythonParser):
|
||||
# positional args after keyword args
|
||||
rule = "mkfunc ::= %s %s%s%s" % (
|
||||
kwargs,
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
"LOAD_CODE ",
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mkfunc ::= %s%s%s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
"LOAD_CODE ",
|
||||
opname,
|
||||
)
|
||||
@@ -1389,14 +1393,14 @@ class Python3Parser(PythonParser):
|
||||
# positional args after keyword args
|
||||
rule = "mkfunc ::= %s %s%s%s" % (
|
||||
kwargs,
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
"LOAD_CODE LOAD_STR ",
|
||||
opname,
|
||||
)
|
||||
elif self.version >= (3, 6):
|
||||
# positional args before keyword args
|
||||
rule = "mkfunc ::= %s%s %s%s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
kwargs,
|
||||
"LOAD_CODE LOAD_STR ",
|
||||
opname,
|
||||
@@ -1404,7 +1408,7 @@ class Python3Parser(PythonParser):
|
||||
elif self.version >= (3, 4):
|
||||
# positional args before keyword args
|
||||
rule = "mkfunc ::= %s%s %s%s" % (
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
kwargs,
|
||||
"LOAD_CODE LOAD_STR ",
|
||||
opname,
|
||||
@@ -1412,7 +1416,7 @@ class Python3Parser(PythonParser):
|
||||
else:
|
||||
rule = "mkfunc ::= %s%sexpr %s" % (
|
||||
kwargs,
|
||||
"pos_arg " * args_pos,
|
||||
"pos_arg " * pos_args_count,
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
@@ -1422,8 +1426,8 @@ class Python3Parser(PythonParser):
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
|
||||
% (
|
||||
("pos_arg " * (args_pos)),
|
||||
("call " * (annotate_args - 1)),
|
||||
("pos_arg " * pos_args_count),
|
||||
("call " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
@@ -1431,8 +1435,8 @@ class Python3Parser(PythonParser):
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
|
||||
% (
|
||||
("pos_arg " * (args_pos)),
|
||||
("annotate_arg " * (annotate_args - 1)),
|
||||
("pos_arg " * pos_args_count),
|
||||
("annotate_arg " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
@@ -1443,21 +1447,21 @@ class Python3Parser(PythonParser):
|
||||
if self.version == (3, 3):
|
||||
# 3.3 puts kwargs before pos_arg
|
||||
pos_kw_tuple = (
|
||||
("kwargs " * args_kw),
|
||||
("pos_arg " * (args_pos)),
|
||||
("kwargs " * kw_args_count),
|
||||
("pos_arg " * pos_args_count),
|
||||
)
|
||||
else:
|
||||
# 3.4 and 3.5puts pos_arg before kwargs
|
||||
pos_kw_tuple = (
|
||||
"pos_arg " * (args_pos),
|
||||
("kwargs " * args_kw),
|
||||
"pos_arg " * (pos_args_count),
|
||||
("kwargs " * kw_args_count),
|
||||
)
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE LOAD_STR EXTENDED_ARG %s"
|
||||
% (
|
||||
pos_kw_tuple[0],
|
||||
pos_kw_tuple[1],
|
||||
("call " * (annotate_args - 1)),
|
||||
("call " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
@@ -1467,7 +1471,7 @@ class Python3Parser(PythonParser):
|
||||
% (
|
||||
pos_kw_tuple[0],
|
||||
pos_kw_tuple[1],
|
||||
("annotate_arg " * (annotate_args - 1)),
|
||||
("annotate_arg " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
@@ -1476,9 +1480,9 @@ class Python3Parser(PythonParser):
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
|
||||
% (
|
||||
("kwargs " * args_kw),
|
||||
("pos_arg " * (args_pos)),
|
||||
("annotate_arg " * (annotate_args - 1)),
|
||||
("kwargs " * kw_args_count),
|
||||
("pos_arg " * (pos_args_count)),
|
||||
("annotate_arg " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
@@ -1486,9 +1490,9 @@ class Python3Parser(PythonParser):
|
||||
rule = (
|
||||
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
|
||||
% (
|
||||
("kwargs " * args_kw),
|
||||
("pos_arg " * (args_pos)),
|
||||
("call " * (annotate_args - 1)),
|
||||
("kwargs " * kw_args_count),
|
||||
("pos_arg " * pos_args_count),
|
||||
("call " * annotate_args),
|
||||
opname,
|
||||
)
|
||||
)
|
||||
|
@@ -74,8 +74,8 @@ class Python30Parser(Python31Parser):
|
||||
# Need to keep LOAD_FAST as index 1
|
||||
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
|
||||
set_comp_func ::= set_comp_header
|
||||
LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
LOAD_ARG FOR_ITER store comp_iter
|
||||
JUMP_BACK COME_FROM POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
list_comp_header ::= BUILD_LIST_0 DUP_TOP STORE_FAST
|
||||
list_comp ::= list_comp_header
|
||||
|
@@ -92,7 +92,7 @@ class Python32Parser(Python3Parser):
|
||||
"LOAD_CONST LOAD_CODE EXTENDED_ARG %s"
|
||||
) % (
|
||||
("pos_arg " * args_pos),
|
||||
("annotate_arg " * (annotate_args - 1)),
|
||||
("annotate_arg " * (annotate_args)),
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2017, 2019-2020, 2022 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017, 2019-2020, 2022-2023 Rocky Bernstein
|
||||
"""
|
||||
Python 3.7 base code. We keep non-custom-generated grammar rules out of this file.
|
||||
"""
|
||||
@@ -431,35 +431,39 @@ class Python37BaseParser(PythonParser):
|
||||
"BUILD_TUPLE",
|
||||
"BUILD_TUPLE_UNPACK",
|
||||
):
|
||||
v = token.attr
|
||||
collection_size = token.attr
|
||||
|
||||
is_LOAD_CLOSURE = False
|
||||
if opname_base == "BUILD_TUPLE":
|
||||
# If is part of a "load_closure", then it is not part of a
|
||||
# "list".
|
||||
is_LOAD_CLOSURE = True
|
||||
for j in range(v):
|
||||
for j in range(collection_size):
|
||||
if tokens[i - j - 1].kind != "LOAD_CLOSURE":
|
||||
is_LOAD_CLOSURE = False
|
||||
break
|
||||
if is_LOAD_CLOSURE:
|
||||
rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v), opname)
|
||||
rule = "load_closure ::= %s%s" % (
|
||||
("LOAD_CLOSURE " * collection_size),
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if not is_LOAD_CLOSURE or v == 0:
|
||||
if not is_LOAD_CLOSURE or collection_size == 0:
|
||||
# We do this complicated test to speed up parsing of
|
||||
# pathelogically long literals, especially those over 1024.
|
||||
build_count = token.attr
|
||||
thousands = build_count // 1024
|
||||
thirty32s = (build_count // 32) % 32
|
||||
thousands = collection_size // 1024
|
||||
thirty32s = (collection_size // 32) % 32
|
||||
if thirty32s > 0:
|
||||
rule = "expr32 ::=%s" % (" expr" * 32)
|
||||
self.add_unique_rule(rule, opname_base, build_count, customize)
|
||||
self.add_unique_rule(
|
||||
rule, opname_base, collection_size, customize
|
||||
)
|
||||
pass
|
||||
if thousands > 0:
|
||||
self.add_unique_rule(
|
||||
"expr1024 ::=%s" % (" expr32" * 32),
|
||||
opname_base,
|
||||
build_count,
|
||||
collection_size,
|
||||
customize,
|
||||
)
|
||||
pass
|
||||
@@ -468,7 +472,7 @@ class Python37BaseParser(PythonParser):
|
||||
("%s ::= " % collection)
|
||||
+ "expr1024 " * thousands
|
||||
+ "expr32 " * thirty32s
|
||||
+ "expr " * (build_count % 32)
|
||||
+ "expr " * (collection_size % 32)
|
||||
+ opname
|
||||
)
|
||||
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
|
||||
@@ -478,8 +482,8 @@ class Python37BaseParser(PythonParser):
|
||||
if token.attr == 2:
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice2",
|
||||
"build_slice2 ::= expr expr BUILD_SLICE_2",
|
||||
"expr ::= slice2",
|
||||
"slice2 ::= expr expr BUILD_SLICE_2",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
@@ -489,8 +493,8 @@ class Python37BaseParser(PythonParser):
|
||||
)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice3",
|
||||
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||
"expr ::= slice3",
|
||||
"slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
@@ -524,6 +528,7 @@ class Python37BaseParser(PythonParser):
|
||||
|
||||
if opname == "CALL_FUNCTION" and token.attr == 1:
|
||||
rule = """
|
||||
expr ::= dict_comp
|
||||
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
|
||||
@@ -558,11 +563,12 @@ class Python37BaseParser(PythonParser):
|
||||
nak = (len(opname_base) - len("CALL_METHOD")) // 3
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("expr " * args_pos)
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ "expr " * nak
|
||||
+ opname
|
||||
)
|
||||
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
elif opname == "CONTINUE":
|
||||
@@ -1252,20 +1258,13 @@ class Python37BaseParser(PythonParser):
|
||||
try:
|
||||
if fn:
|
||||
return fn(self, lhs, n, rule, ast, tokens, first, last)
|
||||
except:
|
||||
except Exception:
|
||||
import sys, traceback
|
||||
|
||||
print(
|
||||
("Exception in %s %s\n"
|
||||
+ "rule: %s\n"
|
||||
+ "offsets %s .. %s")
|
||||
% (
|
||||
fn.__name__,
|
||||
sys.exc_info()[1],
|
||||
rule2str(rule),
|
||||
tokens[first].offset,
|
||||
tokens[last].offset,
|
||||
)
|
||||
f"Exception in {fn.__name__} {sys.exc_info()[1]}\n"
|
||||
+ f"rule: {rule2str(rule)}\n"
|
||||
+ f"offsets {tokens[first].offset} .. {tokens[last].offset}"
|
||||
)
|
||||
print(traceback.print_tb(sys.exc_info()[2], -1))
|
||||
raise ParserError(tokens[last], tokens[last].off2int(), self.debug["rules"])
|
||||
|
@@ -81,6 +81,16 @@ IFELSE_STMT_RULES = frozenset(
|
||||
"come_froms",
|
||||
),
|
||||
),
|
||||
(
|
||||
'ifelsestmtc',
|
||||
(
|
||||
'testexpr',
|
||||
'c_stmts_opt',
|
||||
'JUMP_FORWARD',
|
||||
'else_suite',
|
||||
'come_froms'
|
||||
),
|
||||
),
|
||||
(
|
||||
"ifelsestmt",
|
||||
(
|
||||
|
@@ -21,7 +21,7 @@ scanner/ingestion module. From here we call various version-specific
|
||||
scanners, e.g. for Python 2.7 or 3.4.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from typing import Optional, Tuple
|
||||
from array import array
|
||||
from collections import namedtuple
|
||||
|
||||
@@ -600,8 +600,25 @@ class Scanner(object):
|
||||
return self.Token
|
||||
|
||||
|
||||
def parse_fn_counts(argc):
|
||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||
# TODO: after the next xdis release, use from there instead.
|
||||
def parse_fn_counts_30_35(argc: int) -> Tuple[int, int, int]:
|
||||
"""
|
||||
In Python 3.0 to 3.5 MAKE_CLOSURE and MAKE_FUNCTION encode
|
||||
arguments counts of positional, default + named, and annotation
|
||||
arguments a particular kind of encoding where each of
|
||||
the entry a a packe byted value of the lower 24 bits
|
||||
of ``argc``. The high bits of argc may have come from
|
||||
an EXTENDED_ARG instruction. Here, we unpack the values
|
||||
from the ``argc`` int and return a triple of the
|
||||
positional args, named_args, and annotation args.
|
||||
"""
|
||||
annotate_count = (argc >> 16) & 0x7FFF
|
||||
# For some reason that I don't understand, annotate_args is off by one
|
||||
# when there is an EXENDED_ARG instruction from what is documented in
|
||||
# https://docs.python.org/3.4/library/dis.html#opcode-MAKE_CLOSURE
|
||||
if annotate_count > 1:
|
||||
annotate_count -= 1
|
||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, annotate_count)
|
||||
|
||||
|
||||
def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2023 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
@@ -38,7 +38,7 @@ from __future__ import print_function
|
||||
from copy import copy
|
||||
|
||||
from xdis import code2num, iscode, op_has_argument, instruction_size
|
||||
from xdis.bytecode import _get_const_info
|
||||
from xdis.bytecode import _get_const_info, _get_name_info
|
||||
from uncompyle6.scanner import Scanner, Token
|
||||
|
||||
from sys import intern
|
||||
@@ -360,6 +360,7 @@ class Scanner2(Scanner):
|
||||
pattr = const
|
||||
pass
|
||||
elif op in self.opc.NAME_OPS:
|
||||
_, pattr = _get_name_info(oparg, names)
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.JREL_OPS:
|
||||
# use instead: hasattr(self, 'patch_continue'): ?
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2019, 2021-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2019, 2021-2023 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
@@ -41,7 +41,7 @@ from xdis import iscode, instruction_size, Instruction
|
||||
from xdis.bytecode import _get_const_info
|
||||
|
||||
from uncompyle6.scanners.tok import Token
|
||||
from uncompyle6.scanner import parse_fn_counts
|
||||
from uncompyle6.scanner import parse_fn_counts_30_35
|
||||
import xdis
|
||||
|
||||
# Get all the opcodes into globals
|
||||
@@ -363,7 +363,7 @@ class Scanner3(Scanner):
|
||||
)
|
||||
new_tokens.append(
|
||||
Token(
|
||||
opname=f"BUILD_DICT_OLDER",
|
||||
opname="BUILD_DICT_OLDER",
|
||||
attr=t.attr,
|
||||
pattr=t.pattr,
|
||||
offset=t.offset,
|
||||
@@ -623,23 +623,29 @@ class Scanner3(Scanner):
|
||||
flags >>= 1
|
||||
attr = attr[:4] # remove last value: attr[5] == False
|
||||
else:
|
||||
pos_args, name_pair_args, annotate_args = parse_fn_counts(
|
||||
pos_args, name_pair_args, annotate_args = parse_fn_counts_30_35(
|
||||
inst.argval
|
||||
)
|
||||
pattr = "%d positional, %d keyword only, %d annotated" % (
|
||||
pos_args,
|
||||
name_pair_args,
|
||||
annotate_args,
|
||||
)
|
||||
if name_pair_args > 0:
|
||||
)
|
||||
|
||||
pattr = f"{pos_args} positional, {name_pair_args} keyword only, {annotate_args} annotated"
|
||||
|
||||
if name_pair_args > 0 and annotate_args > 0:
|
||||
# FIXME: this should probably be K_
|
||||
opname = "%s_N%d" % (opname, name_pair_args)
|
||||
opname += f"_N{name_pair_args}_A{annotate_args}"
|
||||
pass
|
||||
if annotate_args > 0:
|
||||
opname = "%s_A_%d" % (opname, annotate_args)
|
||||
elif annotate_args > 0:
|
||||
opname += f"_A_{annotate_args}"
|
||||
pass
|
||||
opname = "%s_%d" % (opname, pos_args)
|
||||
elif name_pair_args > 0:
|
||||
opname += f"_N_{name_pair_args}"
|
||||
pass
|
||||
else:
|
||||
# Rule customization mathics, MAKE_FUNCTION_...
|
||||
# so make sure to add the "_"
|
||||
opname += "_0"
|
||||
|
||||
attr = (pos_args, name_pair_args, annotate_args)
|
||||
|
||||
new_tokens.append(
|
||||
Token(
|
||||
opname=opname,
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2021 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2021, 2023 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
#
|
||||
@@ -180,7 +180,7 @@ class Token:
|
||||
elif name == "LOAD_ASSERT":
|
||||
return "%s%s %s" % (prefix, offset_opname, pattr)
|
||||
elif self.op in self.opc.NAME_OPS:
|
||||
if self.opc.version >= 3.0:
|
||||
if self.opc.version_tuple >= (3, 0):
|
||||
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
|
||||
elif name == "EXTENDED_ARG":
|
||||
return "%s%s%s 0x%x << %s = %s" % (
|
||||
|
@@ -170,6 +170,7 @@ def customize_for_version36(self, version):
|
||||
class_name = node[1][1].attr
|
||||
if self.is_pypy and class_name.find("<locals>") > 0:
|
||||
class_name = class_name.split(".")[-1]
|
||||
|
||||
else:
|
||||
class_name = node[1][2].attr
|
||||
build_class = node
|
||||
@@ -206,23 +207,24 @@ def customize_for_version36(self, version):
|
||||
elif build_class[1][0] == "load_closure":
|
||||
# Python 3 with closures not functions
|
||||
load_closure = build_class[1]
|
||||
if hasattr(load_closure[-3], "attr"):
|
||||
# Python 3.3 classes with closures work like this.
|
||||
# Note have to test before 3.2 case because
|
||||
# index -2 also has an attr.
|
||||
subclass_code = load_closure[-3].attr
|
||||
elif hasattr(load_closure[-2], "attr"):
|
||||
# Python 3.2 works like this
|
||||
subclass_code = load_closure[-2].attr
|
||||
else:
|
||||
raise "Internal Error n_classdef: cannot find class body"
|
||||
subclass_code = None
|
||||
for i in range(-4, -1):
|
||||
if load_closure[i] == "LOAD_CODE":
|
||||
subclass_code = load_closure[i].attr
|
||||
break
|
||||
if subclass_code is None:
|
||||
raise RuntimeError(
|
||||
"Internal Error n_classdef: cannot find " "class body"
|
||||
)
|
||||
if hasattr(build_class[3], "__len__"):
|
||||
if not subclass_info:
|
||||
subclass_info = build_class[3]
|
||||
elif hasattr(build_class[2], "__len__"):
|
||||
subclass_info = build_class[2]
|
||||
else:
|
||||
raise "Internal Error n_classdef: cannot superclass name"
|
||||
raise RuntimeError(
|
||||
"Internal Error n_classdef: cannot " "superclass name"
|
||||
)
|
||||
elif node == "classdefdeco2":
|
||||
subclass_info = node
|
||||
subclass_code = build_class[1][0].attr
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2019-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2019-2023 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,12 +16,8 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
from uncompyle6.semantics.consts import (
|
||||
PRECEDENCE,
|
||||
TABLE_DIRECT,
|
||||
INDENT_PER_LEVEL,
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.consts import INDENT_PER_LEVEL, PRECEDENCE, TABLE_DIRECT
|
||||
from uncompyle6.semantics.helper import flatten_list
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: "!s", 2: "!r", 3: "!a", "X": ":X"}
|
||||
@@ -54,10 +50,13 @@ def customize_for_version37(self, version):
|
||||
{
|
||||
"and_not": ("%c and not %c", (0, "expr"), (2, "expr")),
|
||||
"ann_assign": (
|
||||
"%|%[2]{attr}: %c\n", 0,
|
||||
"%|%[2]{attr}: %c\n",
|
||||
0,
|
||||
),
|
||||
"ann_assign_init": (
|
||||
"%|%[2]{attr}: %c = %c\n", 0, 1,
|
||||
"%|%[2]{attr}: %c = %c\n",
|
||||
0,
|
||||
1,
|
||||
),
|
||||
"async_for_stmt": (
|
||||
"%|async for %c in %c:\n%+%c%-\n\n",
|
||||
@@ -89,9 +88,8 @@ def customize_for_version37(self, version):
|
||||
"attributes37": (
|
||||
"%[0]{pattr} import %c",
|
||||
(0, "IMPORT_NAME_ATTR"),
|
||||
(1, "IMPORT_FROM")
|
||||
(1, "IMPORT_FROM"),
|
||||
),
|
||||
|
||||
# nested await expressions like:
|
||||
# return await (await bar())
|
||||
# need parenthesis.
|
||||
@@ -126,19 +124,24 @@ def customize_for_version37(self, version):
|
||||
(0, PRECEDENCE["compare"] - 1),
|
||||
(-2, PRECEDENCE["compare"] - 1),
|
||||
),
|
||||
"compare_chained2a_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
|
||||
"compare_chained2b_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
|
||||
"compare_chained2a_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
|
||||
"compare_chained2a_37": (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, PRECEDENCE["compare"] - 1),
|
||||
),
|
||||
"compare_chained2b_false_37": (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, PRECEDENCE["compare"] - 1),
|
||||
),
|
||||
"compare_chained2a_false_37": (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, PRECEDENCE["compare"] - 1),
|
||||
),
|
||||
"compare_chained2c_37": (
|
||||
'%[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, PRECEDENCE["compare"] - 1),
|
||||
(6, PRECEDENCE["compare"] - 1),
|
||||
),
|
||||
'if_exp37': (
|
||||
'%p if %c else %c',
|
||||
(1, 'expr', 27), 0, 3
|
||||
),
|
||||
|
||||
"if_exp37": ("%p if %c else %c", (1, "expr", 27), 0, 3),
|
||||
"except_return": ("%|except:\n%+%c%-", 3),
|
||||
"if_exp_37a": (
|
||||
"%p if %p else %p",
|
||||
@@ -153,9 +156,7 @@ def customize_for_version37(self, version):
|
||||
(5, "expr", 27),
|
||||
),
|
||||
"ifstmtl": ("%|if %c:\n%+%c%-", (0, "testexpr"), (1, "_ifstmts_jumpl")),
|
||||
'import_as37': (
|
||||
"%|import %c as %c\n", 2, -2
|
||||
),
|
||||
"import_as37": ("%|import %c as %c\n", 2, -2),
|
||||
"import_from37": ("%|from %[2]{pattr} import %c\n", (3, "importlist37")),
|
||||
"import_from_as37": (
|
||||
"%|from %c as %c\n",
|
||||
@@ -178,12 +179,11 @@ def customize_for_version37(self, version):
|
||||
(0, "get_aiter"),
|
||||
(3, "list_iter"),
|
||||
),
|
||||
|
||||
"list_if37": (" if %p%c", (0, 27), 1),
|
||||
"list_if37_not": (" if not %p%c", (0, 27), 1),
|
||||
"testfalse_not_or": ("not %c or %c", (0, "expr"), (2, "expr")),
|
||||
"testfalse_not_and": ("not (%c)", 0),
|
||||
"testfalsel": ("not %c", (0, "expr")),
|
||||
"testfalsel": ("not %c", (0, "expr")),
|
||||
"try_except36": ("%|try:\n%+%c%-%c\n\n", 1, -2),
|
||||
"tryfinally36": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", (1, "returns"), 3),
|
||||
"dict_unpack": ("{**%C}", (0, -1, ", **")),
|
||||
|
@@ -466,7 +466,12 @@ class ComprehensionMixin:
|
||||
self.preorder(n[1])
|
||||
else:
|
||||
if self.version == (3, 0):
|
||||
body = n[1]
|
||||
if isinstance(n, Token):
|
||||
body = store
|
||||
elif len(n) > 1:
|
||||
body = n[1]
|
||||
else:
|
||||
body = n[0]
|
||||
else:
|
||||
body = n[0]
|
||||
self.preorder(body)
|
||||
|
@@ -1093,7 +1093,12 @@ class NonterminalActions:
|
||||
self.write("{")
|
||||
if node[0] in ["LOAD_SETCOMP", "LOAD_DICTCOMP"]:
|
||||
if self.version == (3, 0):
|
||||
iter_index = 6
|
||||
if len(node) >= 6:
|
||||
iter_index = 6
|
||||
else:
|
||||
assert node[1].kind.startswith("MAKE_FUNCTION")
|
||||
iter_index = 2
|
||||
pass
|
||||
else:
|
||||
iter_index = 1
|
||||
self.comprehension_walk_newer(node, iter_index=iter_index, code_index=0)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2019-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2019-2023 by Rocky Bernstein
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -119,15 +119,10 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
|
||||
mkfunc_pattr = node[-1].pattr
|
||||
if isinstance(mkfunc_pattr, tuple):
|
||||
assert len(mkfunc_pattr, 4) and isinstance(mkfunc_pattr, int)
|
||||
is_closure = node[-1].pattr[3] != 0
|
||||
else:
|
||||
# FIXME: This is what we had before. It is hoaky and probably wrong.
|
||||
is_closure = mkfunc_pattr == "closure"
|
||||
assert len(mkfunc_pattr) == 4 and isinstance(mkfunc_pattr, int)
|
||||
|
||||
if (
|
||||
(not is_closure)
|
||||
and len(code.co_consts) > 0
|
||||
len(code.co_consts) > 0
|
||||
and isinstance(code.co_consts[0], str)
|
||||
):
|
||||
docstring_node = SyntaxTree(
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# More could be done here though.
|
||||
|
||||
from math import copysign
|
||||
from xdis.version_info import PYTHON_VERSION
|
||||
from xdis.version_info import PYTHON_VERSION_TRIPLE
|
||||
|
||||
|
||||
def is_negative_zero(n):
|
||||
@@ -36,7 +36,7 @@ def better_repr(v, version):
|
||||
if len(v) == 1:
|
||||
return "(%s,)" % better_repr(v[0], version)
|
||||
return "(%s)" % ", ".join(better_repr(i, version) for i in v)
|
||||
elif PYTHON_VERSION < 3.0 and isinstance(v, long):
|
||||
elif PYTHON_VERSION_TRIPLE < (3, 0) and isinstance(v, long):
|
||||
s = repr(v)
|
||||
if version >= 3.0 and s[-1] == "L":
|
||||
return s[:-1]
|
||||
|
@@ -14,4 +14,4 @@
|
||||
# This file is suitable for sourcing inside POSIX shell as
|
||||
# well as importing into Python
|
||||
# fmt: off
|
||||
__version__="3.9.0a1" # noqa
|
||||
__version__="3.9.1.dev0" # noqa
|
||||
|
Reference in New Issue
Block a user