You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Fix 3.5 if..pass bug
Update HISTORY.MD to include Dan Pascu. Some minor doc corrections
This commit is contained in:
@@ -36,7 +36,7 @@ first subsequent public release announcement that I can find is
|
|||||||
From the CHANGES file found in
|
From the CHANGES file found in
|
||||||
[the tarball for that release](http://old-releases.ubuntu.com/ubuntu/pool/universe/d/decompyle2.2/decompyle2.2_2.2beta1.orig.tar.gz),
|
[the tarball for that release](http://old-releases.ubuntu.com/ubuntu/pool/universe/d/decompyle2.2/decompyle2.2_2.2beta1.orig.tar.gz),
|
||||||
it appears that Hartmut did most of the work to get this code to
|
it appears that Hartmut did most of the work to get this code to
|
||||||
accept the full Python language. He added precidence to the table
|
accept the full Python language. He added precedence to the table
|
||||||
specifiers, support for multiple versions of Python, the
|
specifiers, support for multiple versions of Python, the
|
||||||
pretty-printing of docstrings, lists, and hashes. He also wrote test and verification routines of
|
pretty-printing of docstrings, lists, and hashes. He also wrote test and verification routines of
|
||||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He could verify against the entire Python library.
|
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He could verify against the entire Python library.
|
||||||
@@ -55,6 +55,11 @@ it doesn't look like he's done anything compiler-wise since SPARK). So
|
|||||||
I hope people will use the crazy-compilers service. I wish them the
|
I hope people will use the crazy-compilers service. I wish them the
|
||||||
success that his good work deserves.
|
success that his good work deserves.
|
||||||
|
|
||||||
|
Also looking at code I see Dan Pascu did a bit of work around 2005 on
|
||||||
|
the Python scanner, parser, and marshaling routines. For example I
|
||||||
|
see a bit code to massage disassembly output to make it more amenable
|
||||||
|
for deparsing. 2005 would put his work around the Python 2.4 releases.
|
||||||
|
|
||||||
Next we get to
|
Next we get to
|
||||||
["uncompyle" and PyPI](https://pypi.python.org/pypi/uncompyle/1.1) and
|
["uncompyle" and PyPI](https://pypi.python.org/pypi/uncompyle/1.1) and
|
||||||
the era of git repositories. In contrast to decompyle, this now runs
|
the era of git repositories. In contrast to decompyle, this now runs
|
||||||
|
BIN
test/bytecode_3.5/10_if_pass.pyc
Normal file
BIN
test/bytecode_3.5/10_if_pass.pyc
Normal file
Binary file not shown.
23
test/simple_source/branching/10_if_pass.py
Normal file
23
test/simple_source/branching/10_if_pass.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Bug in Python 3.5 in disentangling jump "over" a "pass" statement
|
||||||
|
# or a jump to the next instruction.
|
||||||
|
|
||||||
|
# On Python 3.5 you should get
|
||||||
|
# compare ::= expr expr COMPARE_OP
|
||||||
|
# ...
|
||||||
|
# jmp_false ::= POP_JUMP_IF_FALSE
|
||||||
|
# ...
|
||||||
|
|
||||||
|
from weakref import ref
|
||||||
|
|
||||||
|
class _localimpl:
|
||||||
|
|
||||||
|
def create_dict(self, thread):
|
||||||
|
"""Create a new dict for the current thread, and return it."""
|
||||||
|
localdict = {}
|
||||||
|
idt = id(thread)
|
||||||
|
def thread_deleted(_, idt=idt):
|
||||||
|
local = wrlocal()
|
||||||
|
if local is not None: # bug is here
|
||||||
|
pass # jumping over here
|
||||||
|
wrlocal = ref(self, local_deleted)
|
||||||
|
return localdict
|
@@ -868,7 +868,7 @@ class Scanner26(scan.Scanner):
|
|||||||
|
|
||||||
Return the list of offsets.
|
Return the list of offsets.
|
||||||
|
|
||||||
This procedure is modelled after dis.findlables(), but here
|
This procedure is modelled after dis.findlabels(), but here
|
||||||
for each target the number of jumps are counted.
|
for each target the number of jumps are counted.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
@@ -581,7 +581,7 @@ class Scanner27(scan.Scanner):
|
|||||||
|
|
||||||
Return the list of offsets.
|
Return the list of offsets.
|
||||||
|
|
||||||
This procedure is modelled after dis.findlables(), but here
|
This procedure is modelled after dis.findlabels(), but here
|
||||||
for each target the number of jumps are counted.
|
for each target the number of jumps are counted.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
@@ -1,10 +1,21 @@
|
|||||||
# Copyright (c) 2015 by Rocky Bernstein
|
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||||
"""
|
"""
|
||||||
Python 3 Generic bytecode scanner/deparser
|
Python 3 Generic bytecode scanner/deparser
|
||||||
|
|
||||||
This overlaps various Python3's dis module, but it can be run from
|
This overlaps various Python3's dis module, but it can be run from
|
||||||
Python 2 and other versions of Python. Also, we save token information
|
Python versions other than the version running this code. Notably,
|
||||||
for later use in deparsing.
|
run from Python version 2.
|
||||||
|
|
||||||
|
Also we *modify* the instruction sequence to assist deparsing code.
|
||||||
|
For example:
|
||||||
|
- we add "COME_FROM" instructions to help in figuring out
|
||||||
|
conditional branching and looping.
|
||||||
|
- LOAD_CONSTs are classified further into the type of thing
|
||||||
|
they load:
|
||||||
|
lambda's, genexpr's, {dict,set,list} comprehension's,
|
||||||
|
- PARAMETER counts appended {CALL,MAKE}_FUNCTION, BUILD_{TUPLE,SET,SLICE}
|
||||||
|
|
||||||
|
Finally we save token information.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
@@ -66,10 +77,10 @@ class Scanner3(scan.Scanner):
|
|||||||
# Scan for assertions. Later we will
|
# Scan for assertions. Later we will
|
||||||
# turn 'LOAD_GLOBAL' to 'LOAD_ASSERT' for those
|
# turn 'LOAD_GLOBAL' to 'LOAD_ASSERT' for those
|
||||||
# assertions
|
# assertions
|
||||||
|
|
||||||
self.load_asserts = set()
|
self.load_asserts = set()
|
||||||
for i in self.op_range(0, codelen):
|
for i in self.op_range(0, codelen):
|
||||||
if self.code[i] == POP_JUMP_IF_TRUE and self.code[i+3] == LOAD_GLOBAL:
|
if (self.code[i] == POP_JUMP_IF_TRUE and
|
||||||
|
self.code[i+3] == LOAD_GLOBAL):
|
||||||
if names[self.get_argument(i+3)] == 'AssertionError':
|
if names[self.get_argument(i+3)] == 'AssertionError':
|
||||||
self.load_asserts.add(i+3)
|
self.load_asserts.add(i+3)
|
||||||
|
|
||||||
@@ -258,7 +269,7 @@ class Scanner3(scan.Scanner):
|
|||||||
|
|
||||||
Return the list of offsets.
|
Return the list of offsets.
|
||||||
|
|
||||||
This procedure is modelled after dis.findlables(), but here
|
This procedure is modelled after dis.findlabels(), but here
|
||||||
for each target the number of jumps is counted.
|
for each target the number of jumps is counted.
|
||||||
"""
|
"""
|
||||||
code = self.code
|
code = self.code
|
||||||
@@ -448,15 +459,20 @@ class Scanner3(scan.Scanner):
|
|||||||
self.fixed_jumps[offset] = rtarget
|
self.fixed_jumps[offset] = rtarget
|
||||||
return
|
return
|
||||||
|
|
||||||
# Does this jump to right after another cond jump?
|
# Does this jump to right after another cond jump that is
|
||||||
# If so, it's part of a larger conditional
|
# not myself? If so, it's part of a larger conditional.
|
||||||
if (code[prev_op[target]] in (JUMP_IF_FALSE_OR_POP, JUMP_IF_TRUE_OR_POP,
|
# rocky: if we have a conditional jump to the next instruction, then
|
||||||
POP_JUMP_IF_FALSE, POP_JUMP_IF_TRUE)) and (target > offset):
|
# possibly I am "skipping over" a "pass" or null statement.
|
||||||
|
if ((code[prev_op[target]] in
|
||||||
|
(JUMP_IF_FALSE_OR_POP, JUMP_IF_TRUE_OR_POP,
|
||||||
|
POP_JUMP_IF_FALSE, POP_JUMP_IF_TRUE)) and
|
||||||
|
(target > offset) and prev_op[target] != offset):
|
||||||
self.fixed_jumps[offset] = prev_op[target]
|
self.fixed_jumps[offset] = prev_op[target]
|
||||||
self.structs.append({'type': 'and/or',
|
self.structs.append({'type': 'and/or',
|
||||||
'start': start,
|
'start': start,
|
||||||
'end': prev_op[target]})
|
'end': prev_op[target]})
|
||||||
return
|
return
|
||||||
|
|
||||||
# Is it an and inside if block
|
# Is it an and inside if block
|
||||||
if op == POP_JUMP_IF_FALSE:
|
if op == POP_JUMP_IF_FALSE:
|
||||||
# Search for other POP_JUMP_IF_FALSE targetting the same op,
|
# Search for other POP_JUMP_IF_FALSE targetting the same op,
|
||||||
|
@@ -37,9 +37,6 @@ class Scanner35(scan3.Scanner3):
|
|||||||
self.build_lines_data(co)
|
self.build_lines_data(co)
|
||||||
self.build_prev_op()
|
self.build_prev_op()
|
||||||
|
|
||||||
# Get jump targets
|
|
||||||
# Format: {target offset: [jump offsets]}
|
|
||||||
jump_targets = self.find_jump_targets()
|
|
||||||
bytecode = dis35.Bytecode(co)
|
bytecode = dis35.Bytecode(co)
|
||||||
|
|
||||||
# self.lines contains (block,addrLastInstr)
|
# self.lines contains (block,addrLastInstr)
|
||||||
@@ -61,12 +58,17 @@ class Scanner35(scan3.Scanner3):
|
|||||||
n = len(bs)
|
n = len(bs)
|
||||||
for i in range(n):
|
for i in range(n):
|
||||||
inst = bs[i]
|
inst = bs[i]
|
||||||
if inst.opname == 'POP_JUMP_IF_TRUE' and i+1 < n:
|
|
||||||
|
if inst.opname == 'POP_JUMP_IF_TRUE' and i+1 < n:
|
||||||
next_inst = bs[i+1]
|
next_inst = bs[i+1]
|
||||||
if (next_inst.opname == 'LOAD_GLOBAL' and
|
if (next_inst.opname == 'LOAD_GLOBAL' and
|
||||||
next_inst.argval == 'AssertionError'):
|
next_inst.argval == 'AssertionError'):
|
||||||
self.load_asserts.add(next_inst.offset)
|
self.load_asserts.add(next_inst.offset)
|
||||||
|
|
||||||
|
# Get jump targets
|
||||||
|
# Format: {target offset: [jump offsets]}
|
||||||
|
jump_targets = self.find_jump_targets()
|
||||||
|
|
||||||
for inst in bytecode:
|
for inst in bytecode:
|
||||||
if inst.offset in jump_targets:
|
if inst.offset in jump_targets:
|
||||||
jump_idx = 0
|
jump_idx = 0
|
||||||
|
Reference in New Issue
Block a user