You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-02 16:44:46 +08:00
Correct a couple of bugs...
We weren't distinguising relative imports from absolute imports. Fixes #444 Picking out docstring was broken too.
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2015-2023 by Rocky Bernstein
|
# Copyright (c) 2015-2022 by Rocky Bernstein
|
||||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
#
|
#
|
||||||
@@ -38,7 +38,7 @@ from __future__ import print_function
|
|||||||
from copy import copy
|
from copy import copy
|
||||||
|
|
||||||
from xdis import code2num, iscode, op_has_argument, instruction_size
|
from xdis import code2num, iscode, op_has_argument, instruction_size
|
||||||
from xdis.bytecode import _get_const_info, _get_name_info
|
from xdis.bytecode import _get_const_info
|
||||||
from uncompyle6.scanner import Scanner, Token
|
from uncompyle6.scanner import Scanner, Token
|
||||||
|
|
||||||
from sys import intern
|
from sys import intern
|
||||||
@@ -320,7 +320,9 @@ class Scanner2(Scanner):
|
|||||||
"BUILD_SET",
|
"BUILD_SET",
|
||||||
):
|
):
|
||||||
t = Token(
|
t = Token(
|
||||||
op_name, oparg, pattr, offset, self.linestarts.get(offset, None), op, has_arg, self.opc
|
op_name, oparg, pattr, offset,
|
||||||
|
self.linestarts.get(offset, None),
|
||||||
|
op, has_arg, self.opc
|
||||||
)
|
)
|
||||||
collection_type = op_name.split("_")[1]
|
collection_type = op_name.split("_")[1]
|
||||||
next_tokens = self.bound_collection_from_tokens(
|
next_tokens = self.bound_collection_from_tokens(
|
||||||
@@ -360,7 +362,6 @@ class Scanner2(Scanner):
|
|||||||
pattr = const
|
pattr = const
|
||||||
pass
|
pass
|
||||||
elif op in self.opc.NAME_OPS:
|
elif op in self.opc.NAME_OPS:
|
||||||
_, pattr = _get_name_info(oparg, names)
|
|
||||||
pattr = names[oparg]
|
pattr = names[oparg]
|
||||||
elif op in self.opc.JREL_OPS:
|
elif op in self.opc.JREL_OPS:
|
||||||
# use instead: hasattr(self, 'patch_continue'): ?
|
# use instead: hasattr(self, 'patch_continue'): ?
|
||||||
|
@@ -19,6 +19,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
intern = sys.intern
|
intern = sys.intern
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
def off2int(offset, prefer_last=True):
|
def off2int(offset, prefer_last=True):
|
||||||
@@ -60,7 +61,7 @@ class Token:
|
|||||||
opname,
|
opname,
|
||||||
attr=None,
|
attr=None,
|
||||||
pattr=None,
|
pattr=None,
|
||||||
offset=-1,
|
offset:Union[int, str]=-1,
|
||||||
linestart=None,
|
linestart=None,
|
||||||
op=None,
|
op=None,
|
||||||
has_arg=None,
|
has_arg=None,
|
||||||
|
@@ -783,7 +783,7 @@ class NonterminalActions:
|
|||||||
def n_import_from(self, node):
|
def n_import_from(self, node):
|
||||||
relative_path_index = 0
|
relative_path_index = 0
|
||||||
if self.version >= (2, 5):
|
if self.version >= (2, 5):
|
||||||
if node[relative_path_index].attr > 0:
|
if node[relative_path_index].pattr > 0:
|
||||||
node[2].pattr = ("." * node[relative_path_index].attr) + node[2].pattr
|
node[2].pattr = ("." * node[relative_path_index].attr) + node[2].pattr
|
||||||
if self.version > (2, 7):
|
if self.version > (2, 7):
|
||||||
if isinstance(node[1].pattr, tuple):
|
if isinstance(node[1].pattr, tuple):
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2015-2022 by Rocky Bernstein
|
# Copyright (c) 2015-2023 by Rocky Bernstein
|
||||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
# Copyright (c) 1999 John Aycock
|
# Copyright (c) 1999 John Aycock
|
||||||
@@ -131,8 +131,6 @@ Python.
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
IS_PYPY = "__pypy__" in sys.builtin_module_names
|
|
||||||
|
|
||||||
from spark_parser import GenericASTTraversal
|
from spark_parser import GenericASTTraversal
|
||||||
from xdis import COMPILER_FLAG_BIT, iscode
|
from xdis import COMPILER_FLAG_BIT, iscode
|
||||||
from xdis.version_info import PYTHON_VERSION_TRIPLE
|
from xdis.version_info import PYTHON_VERSION_TRIPLE
|
||||||
@@ -143,7 +141,7 @@ from uncompyle6.parsers.treenode import SyntaxTree
|
|||||||
from uncompyle6.scanner import Code, get_scanner
|
from uncompyle6.scanner import Code, get_scanner
|
||||||
from uncompyle6.scanners.tok import Token
|
from uncompyle6.scanners.tok import Token
|
||||||
from uncompyle6.semantics.check_ast import checker
|
from uncompyle6.semantics.check_ast import checker
|
||||||
from uncompyle6.semantics.consts import (ASSIGN_DOC_STRING, ASSIGN_TUPLE_PARAM,
|
from uncompyle6.semantics.consts import (ASSIGN_TUPLE_PARAM,
|
||||||
INDENT_PER_LEVEL, LINE_LENGTH, MAP,
|
INDENT_PER_LEVEL, LINE_LENGTH, MAP,
|
||||||
MAP_DIRECT, NAME_MODULE, NONE, PASS,
|
MAP_DIRECT, NAME_MODULE, NONE, PASS,
|
||||||
PRECEDENCE, RETURN_LOCALS,
|
PRECEDENCE, RETURN_LOCALS,
|
||||||
@@ -178,6 +176,8 @@ PARSER_DEFAULT_DEBUG = {
|
|||||||
"dups": False,
|
"dups": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IS_PYPY = "__pypy__" in sys.builtin_module_names
|
||||||
|
|
||||||
TREE_DEFAULT_DEBUG = {"before": False, "after": False}
|
TREE_DEFAULT_DEBUG = {"before": False, "after": False}
|
||||||
|
|
||||||
DEFAULT_DEBUG_OPTS = {
|
DEFAULT_DEBUG_OPTS = {
|
||||||
@@ -978,7 +978,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
return result
|
return result
|
||||||
# return self.traverse(node[1])
|
# return self.traverse(node[1])
|
||||||
return f"({name}"
|
return f"({name}"
|
||||||
raise Exception("Can't find tuple parameter " + name)
|
|
||||||
|
|
||||||
def build_class(self, code):
|
def build_class(self, code):
|
||||||
"""Dump class definition, doc string and class body."""
|
"""Dump class definition, doc string and class body."""
|
||||||
@@ -1193,10 +1192,11 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
del ast # Save memory
|
del ast # Save memory
|
||||||
return transform_tree
|
return transform_tree
|
||||||
|
|
||||||
# The bytecode for the end of the main routine has a
|
# The bytecode for the end of the main routine has a "return
|
||||||
# "return None". However, you can't issue a "return" statement in
|
# None". However, you can't issue a "return" statement in
|
||||||
# main. So as the old cigarette slogan goes: I'd rather switch (the token stream)
|
# main. So as the old cigarette slogan goes: I'd rather switch
|
||||||
# than fight (with the grammar to not emit "return None").
|
# (the token stream) than fight (with the grammar to not emit
|
||||||
|
# "return None").
|
||||||
if self.hide_internal:
|
if self.hide_internal:
|
||||||
if len(tokens) >= 2 and not noneInNames:
|
if len(tokens) >= 2 and not noneInNames:
|
||||||
if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"):
|
if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"):
|
||||||
@@ -1257,6 +1257,7 @@ def code_deparse(
|
|||||||
|
|
||||||
assert iscode(co)
|
assert iscode(co)
|
||||||
|
|
||||||
|
|
||||||
if version is None:
|
if version is None:
|
||||||
version = PYTHON_VERSION_TRIPLE
|
version = PYTHON_VERSION_TRIPLE
|
||||||
|
|
||||||
@@ -1325,16 +1326,11 @@ def code_deparse(
|
|||||||
|
|
||||||
assert not nonlocals
|
assert not nonlocals
|
||||||
|
|
||||||
if version >= (3, 0):
|
|
||||||
load_op = "LOAD_STR"
|
|
||||||
else:
|
|
||||||
load_op = "LOAD_CONST"
|
|
||||||
|
|
||||||
# convert leading '__doc__ = "..." into doc string
|
# convert leading '__doc__ = "..." into doc string
|
||||||
try:
|
try:
|
||||||
stmts = deparsed.ast
|
stmts = deparsed.ast
|
||||||
first_stmt = stmts[0][0]
|
first_stmt = stmts[0]
|
||||||
if version >= 3.6:
|
if version >= (3, 6):
|
||||||
if first_stmt[0] == "SETUP_ANNOTATIONS":
|
if first_stmt[0] == "SETUP_ANNOTATIONS":
|
||||||
del stmts[0]
|
del stmts[0]
|
||||||
assert stmts[0] == "sstmt"
|
assert stmts[0] == "sstmt"
|
||||||
@@ -1342,13 +1338,13 @@ def code_deparse(
|
|||||||
first_stmt = stmts[0][0]
|
first_stmt = stmts[0][0]
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
if first_stmt == ASSIGN_DOC_STRING(co.co_consts[0], load_op):
|
if first_stmt == "docstring":
|
||||||
print_docstring(deparsed, "", co.co_consts[0])
|
print_docstring(deparsed, "", co.co_consts[0])
|
||||||
del stmts[0]
|
del stmts[0]
|
||||||
if stmts[-1] == RETURN_NONE:
|
if stmts[-1] == RETURN_NONE:
|
||||||
stmts.pop() # remove last node
|
stmts.pop() # remove last node
|
||||||
# todo: if empty, add 'pass'
|
# todo: if empty, add 'pass'
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
deparsed.FUTURE_UNICODE_LITERALS = (
|
deparsed.FUTURE_UNICODE_LITERALS = (
|
||||||
|
Reference in New Issue
Block a user