You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Merge from 3.0
This commit is contained in:
@@ -38,7 +38,7 @@ class Python16ParserSingle(Python16Parser, PythonParserSingle):
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python15Parser()
|
||||
p = Python16Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
|
@@ -138,7 +138,7 @@ class Python37BaseParser(PythonParser):
|
||||
# Note: BUILD_TUPLE_UNPACK_WITH_CALL gets considered by
|
||||
# default because it starts with BUILD. So we'll set to ignore it from
|
||||
# the start.
|
||||
custom_ops_processed = {"BUILD_TUPLE_UNPACK_WITH_CALL"}
|
||||
custom_ops_processed = set(["BUILD_TUPLE_UNPACK_WITH_CALL"])
|
||||
|
||||
# A set of instruction operation names that exist in the token stream.
|
||||
# We use this customize the grammar that we create.
|
||||
|
@@ -22,6 +22,8 @@ other versions of Python. Also, we save token information for later
|
||||
use in deparsing.
|
||||
"""
|
||||
|
||||
from copy import copy
|
||||
|
||||
# bytecode verification, verify(), uses jump_ops from here
|
||||
from xdis import iscode
|
||||
from xdis.bytecode import _get_const_info
|
||||
@@ -350,7 +352,7 @@ class Scanner26(Scanner2):
|
||||
if show_asm in ("both", "after"):
|
||||
print("\n# ---- tokenization:")
|
||||
# FIXME: t.format() is changing tokens!
|
||||
for t in tokens.copy():
|
||||
for t in copy(tokens):
|
||||
print(t.format(line_prefix=""))
|
||||
print()
|
||||
return tokens, customize
|
||||
|
@@ -33,6 +33,7 @@ For example:
|
||||
Finally we save token information.
|
||||
"""
|
||||
|
||||
from copy import deepcopy
|
||||
import xdis
|
||||
|
||||
# Get all the opcodes into globals
|
||||
@@ -285,7 +286,7 @@ class Scanner3(Scanner):
|
||||
)
|
||||
return new_tokens
|
||||
|
||||
def bound_map_from_inst(self, insts, next_tokens, t, i):
|
||||
def bound_map_from_inst_35(self, insts, next_tokens, t, i):
|
||||
"""
|
||||
Try to a sequence of instruction that ends with a BUILD_MAP into
|
||||
a sequence that can be parsed much faster, but inserting the
|
||||
@@ -609,11 +610,10 @@ class Scanner3(Scanner):
|
||||
continue
|
||||
|
||||
elif opname in ("BUILD_MAP",):
|
||||
bound_map_from_insts_fn = (
|
||||
self.bound_map_from_inst_35
|
||||
if self.version >= (3, 5)
|
||||
else self.bound_map_from_inst_pre35
|
||||
)
|
||||
if self.version >= (3, 5):
|
||||
bound_map_from_insts_fn = self.bound_map_from_inst_35
|
||||
else:
|
||||
bound_map_from_insts_fn = self.bound_map_from_inst_pre35
|
||||
try_tokens = bound_map_from_insts_fn(
|
||||
self.insts,
|
||||
new_tokens,
|
||||
@@ -898,7 +898,7 @@ class Scanner3(Scanner):
|
||||
if show_asm in ("both", "after"):
|
||||
print("\n# ---- tokenization:")
|
||||
# FIXME: t.format() is changing tokens!
|
||||
for t in new_tokens.copy():
|
||||
for t in deepcopy(new_tokens):
|
||||
print(t.format(line_prefix=""))
|
||||
print()
|
||||
return new_tokens, customize
|
||||
|
Reference in New Issue
Block a user