Compare commits

...

25 Commits

Author SHA1 Message Date
rocky
7ad0c37c62 Correct a couple of bugs...
We weren't distinguising relative imports from absolute imports.
Fixes #444

Picking out docstring was broken too.
2023-04-17 16:35:27 -04:00
R. Bernstein
b6aa58790f Merge pull request #446 from rocky/do-not-quote-non-str-args
Use xdis pattr extraction for LOAD_NAME
2023-04-16 15:47:53 -04:00
rocky
ad00b9a4ee Use xdis pattr extraction for LOAD_NAME 2023-04-15 07:31:35 -04:00
rocky
551e428086 Go over stdlib test for 2.{6,7} 2023-04-08 22:10:21 -04:00
R. Bernstein
cd0049933f Merge pull request #440 from andrem-eberle/master
Tentative fix for issue #437.
2023-04-08 20:37:31 -04:00
Andre Eberle
7f3c1fa3a4 Tentative fix for issue #437. I added a new set of rules in ifelsestmt.py to check against for offsets. Seems to have fixed current issue. The result output is switching nested if-else-if-else-etc to a chain of if-elif-elif now, but semantically seems correct. 2023-04-08 16:54:55 -04:00
rocky
f76c35c902 Slightly nicer docstring detection for closure 2023-03-25 02:20:26 -04:00
rocky
82963cdf2c Preserve docstring in closures..
This change synchronized from decompyle3
2023-03-24 20:31:49 -04:00
rocky
a20972dd12 xdis PYTHON_VERSION removal 2023-02-26 19:29:08 -05:00
rocky
18b5934b2d Allow xdis 6.1.0 2023-02-26 19:21:30 -05:00
R. Bernstein
982abe0980 Merge pull request #430 from rocky/sync-with-decompyle3
Synch with decompyle3 code a little bit
2023-01-19 05:10:20 -05:00
rocky
41d1ba31f3 Synch with decompyle3 code a little bit 2023-01-19 04:31:19 -05:00
rocky
e03f4cfe43 Try adding no blank templates 2023-01-18 22:23:06 -05:00
R. Bernstein
53a5e03a8f Merge pull request #429 from rocky/reinstate-pos-args
Reinstate pos_args in CALL_METHOD
2023-01-18 21:23:23 -05:00
rocky
7c99564640 Reinstate pos_args in CALL_METHOD 2023-01-18 21:01:11 -05:00
rocky
931abc5726 self.opc.version -> self.opc.version_tuple
The next release of xdis will no longer support self.opc.version (a
float value which doesn't work in the presense of 3.10 and above)
2023-01-18 17:08:39 -05:00
rocky
2b3cd696db 3.0 set comprehension bug 2023-01-16 03:50:50 -05:00
rocky
50697bb79e Improve set comprehension for Python 3.0 2023-01-16 03:40:55 -05:00
R. Bernstein
137dd64a46 Merge pull request #427 from rocky/make-fn-or-closure-with-annotatation
try to be more honest about MAKE_{FUNCTION,CLOSURE}
2023-01-16 02:12:13 -05:00
rocky
9a7eb0ad0a try to be more honest about MAKE_{FUNCTION,CLOSURE} 2023-01-16 01:45:37 -05:00
rocky
154dabfcef Handle Python 3.4 MAKE_CLOSURE fns ...
Is done just like Python 3.3
2023-01-14 09:54:48 -05:00
rocky
42d26ccbd7 Bump version 2023-01-14 06:21:12 -05:00
rocky
73a4c0be78 Use 3.7.16 for master version 2023-01-14 02:49:43 -05:00
rocky
92830c2eae Newer setuptools 2023-01-14 02:22:53 -05:00
rocky
090570cd34 3.4-3.5 MAKE_CLOSURE with annotate
Docs lie about annnotation args. Slight adjustment here.
More is probably needed.
2023-01-14 02:20:59 -05:00
29 changed files with 295 additions and 219 deletions

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1 @@
blank_issues_enabled: False

View File

@@ -75,7 +75,7 @@ entry_points = {
]
}
ftp_url = None
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.1.0"]
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.2.0"]
license = "GPL3"
mailing_list = "python-debugger@googlegroups.com"

View File

@@ -1,5 +1,5 @@
#!/bin/bash
PYTHON_VERSION=3.7.14
PYTHON_VERSION=3.7.16
function checkout_version {
local repo=$1

View File

@@ -6,4 +6,4 @@ pytest
Click~=7.0
xdis>=6.0.4
configobj~=5.0.6
setuptools~=65.3.0
setuptools~=65.5.1

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,7 @@
# Bug was erroneously putting quotes around Exception on decompilatoin
# RUNNABLE!
"""This program is self-checking!"""
z = ["y", Exception]
assert z[0] == "y"
assert isinstance(z[1], Exception)

View File

@@ -0,0 +1,18 @@
# Related to #426
# This file is RUNNABLE!
"""This program is self-checking!"""
a = 5
class MakeClosureTest():
# This function uses MAKE_CLOSURE with annotation args
def __init__(self, dev: str, b: bool):
super().__init__()
self.dev = dev
self.b = b
self.a = a
x = MakeClosureTest("dev", True)
assert x.dev == "dev"
assert x.b == True
assert x.a == 5

View File

@@ -81,7 +81,7 @@ SKIP_TESTS=(
[test_winreg.py]=1 # it fails on its own
[test_winsound.py]=1 # it fails on its own
[test_zipimport_support.py]=1
[test_zipimport_support.py]=1 # expected test to raise ImportError
[test_zipfile64.py]=1 # Skip Long test
# .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc
# .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc

View File

@@ -22,25 +22,20 @@ SKIP_TESTS=(
[test_doctest2.py]=1 # Fails on its own
[test_format.py]=1 # Control flow "and" vs nested "if"
[test_float.py]=1
[test_grp.py]=1 # test takes to long, works interactively though
[test_io.py]=1 # Test takes too long to run
[test_ioctl.py]=1 # Test takes too long to run
[test_lib2to3.py]=1 # test takes too long to run: 28 seconds
[test_memoryio.py]=1 # FIX
[test_multiprocessing.py]=1 # On uncompyle2, takes 24 secs
[test_poll.py]=1 # test takes too long to run: 11 seconds
[test_regrtest.py]=1 #
[test_runpy.py]=1 # Long and fails on its own
[test_socket.py]=1 # Runs ok but takes 22 seconds
[test_ssl.py]=1 #
[test_ssl.py]=1 # Fails on its own
[test_subprocess.py]=1 # Runs ok but takes 22 seconds
[test_sys_settrace.py]=1 # Line numbers are expected to be different
[test_traceback.py]=1 # Line numbers change - duh.
[test_xpickle.py]=1 # Runs ok but takes 72 seconds
[test_zipfile64.py]=1 # Runs ok but takes 204 seconds
[test_zipimport.py]=1 #
[test_zipimport.py]=1 # expected test to raise ImportError
)
# 334 unit-test files in about 15 minutes

View File

@@ -29,11 +29,18 @@ Step 2: Run the test:
from __future__ import print_function
import getopt, os, py_compile, sys, shutil, tempfile, time
import getopt
import os
import py_compile
import shutil
import sys
import tempfile
import time
from fnmatch import fnmatch
from xdis.version_info import PYTHON_VERSION_TRIPLE
from uncompyle6.main import main
from xdis.version_info import PYTHON_VERSION
def get_srcdir():
@@ -164,10 +171,10 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
if opts["do_compile"]:
compiled_version = opts["compiled_version"]
if compiled_version and PYTHON_VERSION != compiled_version:
if compiled_version and PYTHON_VERSION_TRIPLE != compiled_version:
print(
"Not compiling: desired Python version is %s but we are running %s"
% (compiled_version, PYTHON_VERSION),
% (compiled_version, PYTHON_VERSION_TRIPLE),
file=sys.stderr,
)
else:

View File

@@ -199,7 +199,7 @@ class PythonParser(GenericASTBuilder):
if instructions[finish].linestart:
break
pass
if start > 0:
if start >= 0:
err_token = instructions[index]
print("Instruction context:")
for i in range(start, finish):
@@ -213,10 +213,16 @@ class PythonParser(GenericASTBuilder):
raise ParserError(None, -1, self.debug["reduce"])
def get_pos_kw(self, token):
"""Return then the number of positional parameters and
represented by the attr field of token"""
"""
Return then the number of positional parameters and keyword
parfameters represented by the attr (operand) field of
token.
This appears in CALL_FUNCTION or CALL_METHOD (PyPy) tokens
"""
# Low byte indicates number of positional paramters,
# high byte number of keyword parameters
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith("CALL_METHOD")
args_pos = token.attr & 0xFF
args_kw = (token.attr >> 8) & 0xFF
return args_pos, args_kw

View File

@@ -541,9 +541,9 @@ class Python3Parser(PythonParser):
# token found, while this one doesn't.
if self.version < (3, 6):
call_function = self.call_fn_name(call_fn_tok)
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s" "%s" % (
("expr " * (args_pos - 1) + ("kwarg " * args_kw)),
("expr " * (pos_args_count - 1) + ("kwarg " * kw_args_count)),
call_function,
)
else:
@@ -552,10 +552,10 @@ class Python3Parser(PythonParser):
if call_function.startswith("CALL_FUNCTION_KW"):
self.addRule("classdef ::= build_class_kw store", nop_func)
if is_pypy:
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %s%s%s" % (
"expr " * (args_pos - 1),
"kwarg " * (args_kw),
"expr " * (pos_args_count - 1),
"kwarg " * (kw_args_count),
call_function,
)
else:
@@ -581,7 +581,7 @@ class Python3Parser(PythonParser):
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
"""
args_pos, args_kw = self.get_pos_kw(token)
pos_args_count, kw_args_count = self.get_pos_kw(token)
# Additional exprs for * and ** args:
# 0 if neither
@@ -590,7 +590,7 @@ class Python3Parser(PythonParser):
# Yes, this computation based on instruction name is a little bit hoaky.
nak = (len(opname) - len("CALL_FUNCTION")) // 3
uniq_param = args_kw + args_pos
uniq_param = kw_args_count + pos_args_count
# Note: 3.5+ have subclassed this method; so we don't handle
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
@@ -599,16 +599,16 @@ class Python3Parser(PythonParser):
token.kind = self.call_fn_name(token)
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ token.kind
)
else:
token.kind = self.call_fn_name(token)
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ "expr " * nak
+ token.kind
)
@@ -616,11 +616,11 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, token.kind, uniq_param, customize)
if "LOAD_BUILD_CLASS" in self.seen_ops:
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and args_pos > 1:
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and pos_args_count > 1:
rule = "classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d" % (
("expr " * (args_pos - 1)),
("expr " * (pos_args_count - 1)),
opname,
args_pos,
pos_args_count,
)
self.add_unique_rule(rule, token.kind, uniq_param, customize)
@@ -845,18 +845,18 @@ class Python3Parser(PythonParser):
elif opname in ("BUILD_CONST_LIST", "BUILD_CONST_DICT", "BUILD_CONST_SET"):
if opname == "BUILD_CONST_DICT":
rule = f"""
rule = """
add_consts ::= ADD_VALUE*
const_list ::= COLLECTION_START add_consts {opname}
const_list ::= COLLECTION_START add_consts %s
dict ::= const_list
expr ::= dict
"""
""" % opname
else:
rule = f"""
rule = """
add_consts ::= ADD_VALUE*
const_list ::= COLLECTION_START add_consts {opname}
const_list ::= COLLECTION_START add_consts %s
expr ::= const_list
"""
""" % opname
self.addRule(rule, nop_func)
elif opname_base in (
@@ -955,14 +955,14 @@ class Python3Parser(PythonParser):
elif opname_base == "CALL_METHOD":
# PyPy and Python 3.7+ only - DRY with parse2
args_pos, args_kw = self.get_pos_kw(token)
pos_args_count, kw_args_count = self.get_pos_kw(token)
# number of apply equiv arguments:
nak = (len(opname_base) - len("CALL_METHOD")) // 3
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ "expr " * nak
+ opname
)
@@ -1096,7 +1096,7 @@ class Python3Parser(PythonParser):
"""
self.addRule(rule, nop_func)
args_pos, args_kw, annotate_args = token.attr
pos_args_count, kw_args_count, annotate_args = token.attr
# FIXME: Fold test into add_make_function_rule
if self.version < (3, 3):
@@ -1105,7 +1105,7 @@ class Python3Parser(PythonParser):
j = 2
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %sload_closure LOAD_LAMBDA %%s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1113,7 +1113,7 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1129,7 +1129,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"listcomp ::= %sload_closure LOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname)
% ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1138,7 +1138,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname)
% ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1149,13 +1149,13 @@ class Python3Parser(PythonParser):
self.add_unique_rule(
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
"expr GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname),
% ("pos_arg " * pos_args_count, opname),
opname,
token.attr,
customize,
)
if args_kw > 0:
if kw_args_count > 0:
kwargs_str = "kwargs "
else:
kwargs_str = ""
@@ -1167,36 +1167,40 @@ class Python3Parser(PythonParser):
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE %s"
% (
kwargs_str,
"pos_arg " * args_pos,
"annotate_arg " * (annotate_args - 1),
"pos_arg " * pos_args_count,
"annotate_arg " * (annotate_args),
opname,
)
)
else:
rule = "mkfunc ::= %s%sload_closure LOAD_CODE %s" % (
kwargs_str,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
elif self.version == (3, 3):
self.add_unique_rule(rule, opname, token.attr, customize)
elif (3, 3) <= self.version < (3, 6):
if annotate_args > 0:
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE LOAD_STR %s"
% (
kwargs_str,
"pos_arg " * args_pos,
"annotate_arg " * (annotate_args - 1),
"pos_arg " * pos_args_count,
"annotate_arg " * (annotate_args),
opname,
)
)
else:
rule = "mkfunc ::= %s%sload_closure LOAD_CODE LOAD_STR %s" % (
kwargs_str,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
elif self.version >= (3, 4):
if self.version >= (3, 4):
if not self.is_pypy:
load_op = "LOAD_STR"
else:
@@ -1206,33 +1210,33 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure %s %s"
% (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs_str,
"annotate_arg " * (annotate_args - 1),
"annotate_arg " * (annotate_args),
load_op,
opname,
)
)
else:
rule = "mkfunc ::= %s%s load_closure LOAD_CODE %s %s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs_str,
load_op,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
self.add_unique_rule(rule, opname, token.attr, customize)
if args_kw == 0:
if kw_args_count == 0:
rule = "mkfunc ::= %sload_closure load_genexpr %s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
if self.version < (3, 4):
rule = "mkfunc ::= %sload_closure LOAD_CODE %s" % (
"expr " * args_pos,
"expr " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
@@ -1243,10 +1247,10 @@ class Python3Parser(PythonParser):
if self.version >= (3, 6):
# The semantics of MAKE_FUNCTION in 3.6 are totally different from
# before.
args_pos, args_kw, annotate_args, closure = token.attr
stack_count = args_pos + args_kw + annotate_args
pos_args_count, kw_args_count, annotate_args, closure = token.attr
stack_count = pos_args_count + kw_args_count + annotate_args
if closure:
if args_pos:
if pos_args_count:
rule = "lambda_body ::= %s%s%s%s" % (
"expr " * stack_count,
"load_closure " * closure,
@@ -1279,14 +1283,14 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
)
rule_pat = (
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1308,7 +1312,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("expr " * args_pos, opname)
% ("expr " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1316,8 +1320,8 @@ class Python3Parser(PythonParser):
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
("pos_arg " * args_pos),
("kwarg " * args_kw),
("pos_arg " * pos_args_count),
("kwarg " * kw_args_count),
opname,
)
self.add_make_function_rule(
@@ -1326,9 +1330,9 @@ class Python3Parser(PythonParser):
continue
if self.version < (3, 6):
args_pos, args_kw, annotate_args = token.attr
pos_args_count, kw_args_count, annotate_args = token.attr
else:
args_pos, args_kw, annotate_args, closure = token.attr
pos_args_count, kw_args_count, annotate_args, closure = token.attr
if self.version < (3, 3):
j = 1
@@ -1338,7 +1342,7 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1350,7 +1354,7 @@ class Python3Parser(PythonParser):
# Todo: For Pypy we need to modify this slightly
rule_pat = (
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("expr " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("expr " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1359,13 +1363,13 @@ class Python3Parser(PythonParser):
# FIXME: Fold test into add_make_function_rule
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
("pos_arg " * args_pos),
("kwarg " * args_kw),
("pos_arg " * pos_args_count),
("kwarg " * kw_args_count),
opname,
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
if args_kw == 0:
if kw_args_count == 0:
kwargs = "no_kwargs"
self.add_unique_rule("no_kwargs ::=", opname, token.attr, customize)
else:
@@ -1375,13 +1379,13 @@ class Python3Parser(PythonParser):
# positional args after keyword args
rule = "mkfunc ::= %s %s%s%s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE ",
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
rule = "mkfunc ::= %s%s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE ",
opname,
)
@@ -1389,14 +1393,14 @@ class Python3Parser(PythonParser):
# positional args after keyword args
rule = "mkfunc ::= %s %s%s%s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE LOAD_STR ",
opname,
)
elif self.version >= (3, 6):
# positional args before keyword args
rule = "mkfunc ::= %s%s %s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs,
"LOAD_CODE LOAD_STR ",
opname,
@@ -1404,7 +1408,7 @@ class Python3Parser(PythonParser):
elif self.version >= (3, 4):
# positional args before keyword args
rule = "mkfunc ::= %s%s %s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs,
"LOAD_CODE LOAD_STR ",
opname,
@@ -1412,7 +1416,7 @@ class Python3Parser(PythonParser):
else:
rule = "mkfunc ::= %s%sexpr %s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
@@ -1422,8 +1426,8 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
% (
("pos_arg " * (args_pos)),
("call " * (annotate_args - 1)),
("pos_arg " * pos_args_count),
("call " * annotate_args),
opname,
)
)
@@ -1431,8 +1435,8 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
% (
("pos_arg " * (args_pos)),
("annotate_arg " * (annotate_args - 1)),
("pos_arg " * pos_args_count),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1443,21 +1447,21 @@ class Python3Parser(PythonParser):
if self.version == (3, 3):
# 3.3 puts kwargs before pos_arg
pos_kw_tuple = (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("kwargs " * kw_args_count),
("pos_arg " * pos_args_count),
)
else:
# 3.4 and 3.5puts pos_arg before kwargs
pos_kw_tuple = (
"pos_arg " * (args_pos),
("kwargs " * args_kw),
"pos_arg " * (pos_args_count),
("kwargs " * kw_args_count),
)
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE LOAD_STR EXTENDED_ARG %s"
% (
pos_kw_tuple[0],
pos_kw_tuple[1],
("call " * (annotate_args - 1)),
("call " * annotate_args),
opname,
)
)
@@ -1467,7 +1471,7 @@ class Python3Parser(PythonParser):
% (
pos_kw_tuple[0],
pos_kw_tuple[1],
("annotate_arg " * (annotate_args - 1)),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1476,9 +1480,9 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
% (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("annotate_arg " * (annotate_args - 1)),
("kwargs " * kw_args_count),
("pos_arg " * (pos_args_count)),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1486,9 +1490,9 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
% (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("call " * (annotate_args - 1)),
("kwargs " * kw_args_count),
("pos_arg " * pos_args_count),
("call " * annotate_args),
opname,
)
)

View File

@@ -74,8 +74,8 @@ class Python30Parser(Python31Parser):
# Need to keep LOAD_FAST as index 1
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
set_comp_func ::= set_comp_header
LOAD_FAST FOR_ITER store comp_iter
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
LOAD_ARG FOR_ITER store comp_iter
JUMP_BACK COME_FROM POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
list_comp_header ::= BUILD_LIST_0 DUP_TOP STORE_FAST
list_comp ::= list_comp_header

View File

@@ -92,7 +92,7 @@ class Python32Parser(Python3Parser):
"LOAD_CONST LOAD_CODE EXTENDED_ARG %s"
) % (
("pos_arg " * args_pos),
("annotate_arg " * (annotate_args - 1)),
("annotate_arg " * (annotate_args)),
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2017, 2019-2020, 2022 Rocky Bernstein
# Copyright (c) 2016-2017, 2019-2020, 2022-2023 Rocky Bernstein
"""
Python 3.7 base code. We keep non-custom-generated grammar rules out of this file.
"""
@@ -431,35 +431,39 @@ class Python37BaseParser(PythonParser):
"BUILD_TUPLE",
"BUILD_TUPLE_UNPACK",
):
v = token.attr
collection_size = token.attr
is_LOAD_CLOSURE = False
if opname_base == "BUILD_TUPLE":
# If is part of a "load_closure", then it is not part of a
# "list".
is_LOAD_CLOSURE = True
for j in range(v):
for j in range(collection_size):
if tokens[i - j - 1].kind != "LOAD_CLOSURE":
is_LOAD_CLOSURE = False
break
if is_LOAD_CLOSURE:
rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v), opname)
rule = "load_closure ::= %s%s" % (
("LOAD_CLOSURE " * collection_size),
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
if not is_LOAD_CLOSURE or v == 0:
if not is_LOAD_CLOSURE or collection_size == 0:
# We do this complicated test to speed up parsing of
# pathelogically long literals, especially those over 1024.
build_count = token.attr
thousands = build_count // 1024
thirty32s = (build_count // 32) % 32
thousands = collection_size // 1024
thirty32s = (collection_size // 32) % 32
if thirty32s > 0:
rule = "expr32 ::=%s" % (" expr" * 32)
self.add_unique_rule(rule, opname_base, build_count, customize)
self.add_unique_rule(
rule, opname_base, collection_size, customize
)
pass
if thousands > 0:
self.add_unique_rule(
"expr1024 ::=%s" % (" expr32" * 32),
opname_base,
build_count,
collection_size,
customize,
)
pass
@@ -468,7 +472,7 @@ class Python37BaseParser(PythonParser):
("%s ::= " % collection)
+ "expr1024 " * thousands
+ "expr32 " * thirty32s
+ "expr " * (build_count % 32)
+ "expr " * (collection_size % 32)
+ opname
)
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
@@ -478,8 +482,8 @@ class Python37BaseParser(PythonParser):
if token.attr == 2:
self.add_unique_rules(
[
"expr ::= build_slice2",
"build_slice2 ::= expr expr BUILD_SLICE_2",
"expr ::= slice2",
"slice2 ::= expr expr BUILD_SLICE_2",
],
customize,
)
@@ -489,8 +493,8 @@ class Python37BaseParser(PythonParser):
)
self.add_unique_rules(
[
"expr ::= build_slice3",
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
"expr ::= slice3",
"slice3 ::= expr expr expr BUILD_SLICE_3",
],
customize,
)
@@ -524,6 +528,7 @@ class Python37BaseParser(PythonParser):
if opname == "CALL_FUNCTION" and token.attr == 1:
rule = """
expr ::= dict_comp
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
GET_ITER CALL_FUNCTION_1
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
@@ -558,11 +563,12 @@ class Python37BaseParser(PythonParser):
nak = (len(opname_base) - len("CALL_METHOD")) // 3
rule = (
"call ::= expr "
+ ("expr " * args_pos)
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ "expr " * nak
+ opname
)
self.add_unique_rule(rule, opname, token.attr, customize)
elif opname == "CONTINUE":
@@ -1252,20 +1258,13 @@ class Python37BaseParser(PythonParser):
try:
if fn:
return fn(self, lhs, n, rule, ast, tokens, first, last)
except:
except Exception:
import sys, traceback
print(
("Exception in %s %s\n"
+ "rule: %s\n"
+ "offsets %s .. %s")
% (
fn.__name__,
sys.exc_info()[1],
rule2str(rule),
tokens[first].offset,
tokens[last].offset,
)
f"Exception in {fn.__name__} {sys.exc_info()[1]}\n"
+ f"rule: {rule2str(rule)}\n"
+ f"offsets {tokens[first].offset} .. {tokens[last].offset}"
)
print(traceback.print_tb(sys.exc_info()[2], -1))
raise ParserError(tokens[last], tokens[last].off2int(), self.debug["rules"])

View File

@@ -81,6 +81,16 @@ IFELSE_STMT_RULES = frozenset(
"come_froms",
),
),
(
'ifelsestmtc',
(
'testexpr',
'c_stmts_opt',
'JUMP_FORWARD',
'else_suite',
'come_froms'
),
),
(
"ifelsestmt",
(

View File

@@ -21,7 +21,7 @@ scanner/ingestion module. From here we call various version-specific
scanners, e.g. for Python 2.7 or 3.4.
"""
from typing import Optional
from typing import Optional, Tuple
from array import array
from collections import namedtuple
@@ -600,8 +600,25 @@ class Scanner(object):
return self.Token
def parse_fn_counts(argc):
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
# TODO: after the next xdis release, use from there instead.
def parse_fn_counts_30_35(argc: int) -> Tuple[int, int, int]:
"""
In Python 3.0 to 3.5 MAKE_CLOSURE and MAKE_FUNCTION encode
arguments counts of positional, default + named, and annotation
arguments a particular kind of encoding where each of
the entry a a packe byted value of the lower 24 bits
of ``argc``. The high bits of argc may have come from
an EXTENDED_ARG instruction. Here, we unpack the values
from the ``argc`` int and return a triple of the
positional args, named_args, and annotation args.
"""
annotate_count = (argc >> 16) & 0x7FFF
# For some reason that I don't understand, annotate_args is off by one
# when there is an EXENDED_ARG instruction from what is documented in
# https://docs.python.org/3.4/library/dis.html#opcode-MAKE_CLOSURE
if annotate_count > 1:
annotate_count -= 1
return ((argc & 0xFF), (argc >> 8) & 0xFF, annotate_count)
def get_scanner(version, is_pypy=False, show_asm=None):

View File

@@ -320,7 +320,9 @@ class Scanner2(Scanner):
"BUILD_SET",
):
t = Token(
op_name, oparg, pattr, offset, self.linestarts.get(offset, None), op, has_arg, self.opc
op_name, oparg, pattr, offset,
self.linestarts.get(offset, None),
op, has_arg, self.opc
)
collection_type = op_name.split("_")[1]
next_tokens = self.bound_collection_from_tokens(

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2019, 2021-2022 by Rocky Bernstein
# Copyright (c) 2015-2019, 2021-2023 by Rocky Bernstein
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
#
@@ -41,7 +41,7 @@ from xdis import iscode, instruction_size, Instruction
from xdis.bytecode import _get_const_info
from uncompyle6.scanners.tok import Token
from uncompyle6.scanner import parse_fn_counts
from uncompyle6.scanner import parse_fn_counts_30_35
import xdis
# Get all the opcodes into globals
@@ -363,7 +363,7 @@ class Scanner3(Scanner):
)
new_tokens.append(
Token(
opname=f"BUILD_DICT_OLDER",
opname="BUILD_DICT_OLDER",
attr=t.attr,
pattr=t.pattr,
offset=t.offset,
@@ -623,23 +623,29 @@ class Scanner3(Scanner):
flags >>= 1
attr = attr[:4] # remove last value: attr[5] == False
else:
pos_args, name_pair_args, annotate_args = parse_fn_counts(
pos_args, name_pair_args, annotate_args = parse_fn_counts_30_35(
inst.argval
)
pattr = "%d positional, %d keyword only, %d annotated" % (
pos_args,
name_pair_args,
annotate_args,
)
if name_pair_args > 0:
)
pattr = f"{pos_args} positional, {name_pair_args} keyword only, {annotate_args} annotated"
if name_pair_args > 0 and annotate_args > 0:
# FIXME: this should probably be K_
opname = "%s_N%d" % (opname, name_pair_args)
opname += f"_N{name_pair_args}_A{annotate_args}"
pass
if annotate_args > 0:
opname = "%s_A_%d" % (opname, annotate_args)
elif annotate_args > 0:
opname += f"_A_{annotate_args}"
pass
opname = "%s_%d" % (opname, pos_args)
elif name_pair_args > 0:
opname += f"_N_{name_pair_args}"
pass
else:
# Rule customization mathics, MAKE_FUNCTION_...
# so make sure to add the "_"
opname += "_0"
attr = (pos_args, name_pair_args, annotate_args)
new_tokens.append(
Token(
opname=opname,

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2021 by Rocky Bernstein
# Copyright (c) 2016-2021, 2023 by Rocky Bernstein
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
#
@@ -19,6 +19,7 @@ import re
import sys
intern = sys.intern
from typing import Union
def off2int(offset, prefer_last=True):
@@ -60,7 +61,7 @@ class Token:
opname,
attr=None,
pattr=None,
offset=-1,
offset:Union[int, str]=-1,
linestart=None,
op=None,
has_arg=None,
@@ -180,7 +181,7 @@ class Token:
elif name == "LOAD_ASSERT":
return "%s%s %s" % (prefix, offset_opname, pattr)
elif self.op in self.opc.NAME_OPS:
if self.opc.version >= 3.0:
if self.opc.version_tuple >= (3, 0):
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
elif name == "EXTENDED_ARG":
return "%s%s%s 0x%x << %s = %s" % (

View File

@@ -170,6 +170,7 @@ def customize_for_version36(self, version):
class_name = node[1][1].attr
if self.is_pypy and class_name.find("<locals>") > 0:
class_name = class_name.split(".")[-1]
else:
class_name = node[1][2].attr
build_class = node
@@ -206,23 +207,24 @@ def customize_for_version36(self, version):
elif build_class[1][0] == "load_closure":
# Python 3 with closures not functions
load_closure = build_class[1]
if hasattr(load_closure[-3], "attr"):
# Python 3.3 classes with closures work like this.
# Note have to test before 3.2 case because
# index -2 also has an attr.
subclass_code = load_closure[-3].attr
elif hasattr(load_closure[-2], "attr"):
# Python 3.2 works like this
subclass_code = load_closure[-2].attr
else:
raise "Internal Error n_classdef: cannot find class body"
subclass_code = None
for i in range(-4, -1):
if load_closure[i] == "LOAD_CODE":
subclass_code = load_closure[i].attr
break
if subclass_code is None:
raise RuntimeError(
"Internal Error n_classdef: cannot find " "class body"
)
if hasattr(build_class[3], "__len__"):
if not subclass_info:
subclass_info = build_class[3]
elif hasattr(build_class[2], "__len__"):
subclass_info = build_class[2]
else:
raise "Internal Error n_classdef: cannot superclass name"
raise RuntimeError(
"Internal Error n_classdef: cannot " "superclass name"
)
elif node == "classdefdeco2":
subclass_info = node
subclass_code = build_class[1][0].attr

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2019-2022 by Rocky Bernstein
# Copyright (c) 2019-2023 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -16,12 +16,8 @@
"""
import re
from uncompyle6.semantics.consts import (
PRECEDENCE,
TABLE_DIRECT,
INDENT_PER_LEVEL,
)
from uncompyle6.semantics.consts import INDENT_PER_LEVEL, PRECEDENCE, TABLE_DIRECT
from uncompyle6.semantics.helper import flatten_list
FSTRING_CONVERSION_MAP = {1: "!s", 2: "!r", 3: "!a", "X": ":X"}
@@ -54,10 +50,13 @@ def customize_for_version37(self, version):
{
"and_not": ("%c and not %c", (0, "expr"), (2, "expr")),
"ann_assign": (
"%|%[2]{attr}: %c\n", 0,
"%|%[2]{attr}: %c\n",
0,
),
"ann_assign_init": (
"%|%[2]{attr}: %c = %c\n", 0, 1,
"%|%[2]{attr}: %c = %c\n",
0,
1,
),
"async_for_stmt": (
"%|async for %c in %c:\n%+%c%-\n\n",
@@ -89,9 +88,8 @@ def customize_for_version37(self, version):
"attributes37": (
"%[0]{pattr} import %c",
(0, "IMPORT_NAME_ATTR"),
(1, "IMPORT_FROM")
(1, "IMPORT_FROM"),
),
# nested await expressions like:
# return await (await bar())
# need parenthesis.
@@ -126,19 +124,24 @@ def customize_for_version37(self, version):
(0, PRECEDENCE["compare"] - 1),
(-2, PRECEDENCE["compare"] - 1),
),
"compare_chained2a_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2b_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2a_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2a_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2b_false_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2a_false_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2c_37": (
'%[3]{pattr.replace("-", " ")} %p %p',
(0, PRECEDENCE["compare"] - 1),
(6, PRECEDENCE["compare"] - 1),
),
'if_exp37': (
'%p if %c else %c',
(1, 'expr', 27), 0, 3
),
"if_exp37": ("%p if %c else %c", (1, "expr", 27), 0, 3),
"except_return": ("%|except:\n%+%c%-", 3),
"if_exp_37a": (
"%p if %p else %p",
@@ -153,9 +156,7 @@ def customize_for_version37(self, version):
(5, "expr", 27),
),
"ifstmtl": ("%|if %c:\n%+%c%-", (0, "testexpr"), (1, "_ifstmts_jumpl")),
'import_as37': (
"%|import %c as %c\n", 2, -2
),
"import_as37": ("%|import %c as %c\n", 2, -2),
"import_from37": ("%|from %[2]{pattr} import %c\n", (3, "importlist37")),
"import_from_as37": (
"%|from %c as %c\n",
@@ -178,12 +179,11 @@ def customize_for_version37(self, version):
(0, "get_aiter"),
(3, "list_iter"),
),
"list_if37": (" if %p%c", (0, 27), 1),
"list_if37_not": (" if not %p%c", (0, 27), 1),
"testfalse_not_or": ("not %c or %c", (0, "expr"), (2, "expr")),
"testfalse_not_and": ("not (%c)", 0),
"testfalsel": ("not %c", (0, "expr")),
"testfalsel": ("not %c", (0, "expr")),
"try_except36": ("%|try:\n%+%c%-%c\n\n", 1, -2),
"tryfinally36": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", (1, "returns"), 3),
"dict_unpack": ("{**%C}", (0, -1, ", **")),

View File

@@ -466,7 +466,12 @@ class ComprehensionMixin:
self.preorder(n[1])
else:
if self.version == (3, 0):
body = n[1]
if isinstance(n, Token):
body = store
elif len(n) > 1:
body = n[1]
else:
body = n[0]
else:
body = n[0]
self.preorder(body)

View File

@@ -783,7 +783,7 @@ class NonterminalActions:
def n_import_from(self, node):
relative_path_index = 0
if self.version >= (2, 5):
if node[relative_path_index].attr > 0:
if node[relative_path_index].pattr > 0:
node[2].pattr = ("." * node[relative_path_index].attr) + node[2].pattr
if self.version > (2, 7):
if isinstance(node[1].pattr, tuple):
@@ -1093,7 +1093,12 @@ class NonterminalActions:
self.write("{")
if node[0] in ["LOAD_SETCOMP", "LOAD_DICTCOMP"]:
if self.version == (3, 0):
iter_index = 6
if len(node) >= 6:
iter_index = 6
else:
assert node[1].kind.startswith("MAKE_FUNCTION")
iter_index = 2
pass
else:
iter_index = 1
self.comprehension_walk_newer(node, iter_index=iter_index, code_index=0)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2022 by Rocky Bernstein
# Copyright (c) 2015-2023 by Rocky Bernstein
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
@@ -131,8 +131,6 @@ Python.
import sys
IS_PYPY = "__pypy__" in sys.builtin_module_names
from spark_parser import GenericASTTraversal
from xdis import COMPILER_FLAG_BIT, iscode
from xdis.version_info import PYTHON_VERSION_TRIPLE
@@ -143,7 +141,7 @@ from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6.scanner import Code, get_scanner
from uncompyle6.scanners.tok import Token
from uncompyle6.semantics.check_ast import checker
from uncompyle6.semantics.consts import (ASSIGN_DOC_STRING, ASSIGN_TUPLE_PARAM,
from uncompyle6.semantics.consts import (ASSIGN_TUPLE_PARAM,
INDENT_PER_LEVEL, LINE_LENGTH, MAP,
MAP_DIRECT, NAME_MODULE, NONE, PASS,
PRECEDENCE, RETURN_LOCALS,
@@ -178,6 +176,8 @@ PARSER_DEFAULT_DEBUG = {
"dups": False,
}
IS_PYPY = "__pypy__" in sys.builtin_module_names
TREE_DEFAULT_DEBUG = {"before": False, "after": False}
DEFAULT_DEBUG_OPTS = {
@@ -978,7 +978,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
return result
# return self.traverse(node[1])
return f"({name}"
raise Exception("Can't find tuple parameter " + name)
def build_class(self, code):
"""Dump class definition, doc string and class body."""
@@ -1193,10 +1192,11 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
del ast # Save memory
return transform_tree
# The bytecode for the end of the main routine has a
# "return None". However, you can't issue a "return" statement in
# main. So as the old cigarette slogan goes: I'd rather switch (the token stream)
# than fight (with the grammar to not emit "return None").
# The bytecode for the end of the main routine has a "return
# None". However, you can't issue a "return" statement in
# main. So as the old cigarette slogan goes: I'd rather switch
# (the token stream) than fight (with the grammar to not emit
# "return None").
if self.hide_internal:
if len(tokens) >= 2 and not noneInNames:
if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"):
@@ -1257,6 +1257,7 @@ def code_deparse(
assert iscode(co)
if version is None:
version = PYTHON_VERSION_TRIPLE
@@ -1325,16 +1326,11 @@ def code_deparse(
assert not nonlocals
if version >= (3, 0):
load_op = "LOAD_STR"
else:
load_op = "LOAD_CONST"
# convert leading '__doc__ = "..." into doc string
try:
stmts = deparsed.ast
first_stmt = stmts[0][0]
if version >= 3.6:
first_stmt = stmts[0]
if version >= (3, 6):
if first_stmt[0] == "SETUP_ANNOTATIONS":
del stmts[0]
assert stmts[0] == "sstmt"
@@ -1342,13 +1338,13 @@ def code_deparse(
first_stmt = stmts[0][0]
pass
pass
if first_stmt == ASSIGN_DOC_STRING(co.co_consts[0], load_op):
if first_stmt == "docstring":
print_docstring(deparsed, "", co.co_consts[0])
del stmts[0]
if stmts[-1] == RETURN_NONE:
stmts.pop() # remove last node
# todo: if empty, add 'pass'
except:
except Exception:
pass
deparsed.FUTURE_UNICODE_LITERALS = (

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2019-2022 by Rocky Bernstein
# Copyright (c) 2019-2023 by Rocky Bernstein
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -119,15 +119,10 @@ class TreeTransform(GenericASTTraversal, object):
mkfunc_pattr = node[-1].pattr
if isinstance(mkfunc_pattr, tuple):
assert len(mkfunc_pattr, 4) and isinstance(mkfunc_pattr, int)
is_closure = node[-1].pattr[3] != 0
else:
# FIXME: This is what we had before. It is hoaky and probably wrong.
is_closure = mkfunc_pattr == "closure"
assert len(mkfunc_pattr) == 4 and isinstance(mkfunc_pattr, int)
if (
(not is_closure)
and len(code.co_consts) > 0
len(code.co_consts) > 0
and isinstance(code.co_consts[0], str)
):
docstring_node = SyntaxTree(

View File

@@ -3,7 +3,7 @@
# More could be done here though.
from math import copysign
from xdis.version_info import PYTHON_VERSION
from xdis.version_info import PYTHON_VERSION_TRIPLE
def is_negative_zero(n):
@@ -36,7 +36,7 @@ def better_repr(v, version):
if len(v) == 1:
return "(%s,)" % better_repr(v[0], version)
return "(%s)" % ", ".join(better_repr(i, version) for i in v)
elif PYTHON_VERSION < 3.0 and isinstance(v, long):
elif PYTHON_VERSION_TRIPLE < (3, 0) and isinstance(v, long):
s = repr(v)
if version >= 3.0 and s[-1] == "L":
return s[:-1]

View File

@@ -14,4 +14,4 @@
# This file is suitable for sourcing inside POSIX shell as
# well as importing into Python
# fmt: off
__version__="3.9.0" # noqa
__version__="3.9.1.dev0" # noqa