You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 08:49:51 +08:00
Compare commits
19 Commits
3.6.5
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
b00651d428 | ||
|
da8dccbaca | ||
|
37272ae827 | ||
|
7f2bee46b7 | ||
|
c8a4dcf72b | ||
|
012ff91cfb | ||
|
e690ddd50a | ||
|
45b7c1948c | ||
|
e2fb7ca3d2 | ||
|
b3bda76582 | ||
|
ab6d322eca | ||
|
1a8a0df107 | ||
|
0a37709b0a | ||
|
98cd1417df | ||
|
460069ceaa | ||
|
316aa44f23 | ||
|
7133540c23 | ||
|
590231741d | ||
|
a9349b8f3d |
@@ -3,12 +3,7 @@ language: python
|
||||
sudo: false
|
||||
|
||||
python:
|
||||
- '3.5'
|
||||
- '2.7.12'
|
||||
- '2.6'
|
||||
- '3.3'
|
||||
- '3.4'
|
||||
- '3.2'
|
||||
- '2.7' # this is a cheat here because travis doesn't do 2.4-2.6
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
39
ChangeLog
39
ChangeLog
@@ -1,6 +1,7 @@
|
||||
2016-12-04 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.9.7
|
||||
* : commit d22931cb49f0e28a0fbe48a7c1526b1f170a5b52 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Sun Dec 4 07:31:34 2016 -0500
|
||||
|
||||
2016-11-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
@@ -51,11 +52,34 @@
|
||||
* uncompyle6/semantics/pysource.py: Better line number tracking Indent Python 2 list comprehensions, albeit badly. DRY code a
|
||||
little via indent_if_source_nl
|
||||
|
||||
2016-11-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* circle.yml: CircleCI build
|
||||
|
||||
2016-11-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Remove dup Python 3 grammar rule
|
||||
|
||||
2016-11-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner2.py:
|
||||
<2.7 "if" detection and dup Python 3 grammar rule
|
||||
|
||||
2016-11-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pyenvlib.py, uncompyle6/linenumbers.py,
|
||||
uncompyle6/main.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/pysource.py, uncompyle6/verify.py: Bug in 2.4
|
||||
"if" dectection and... Wrong language used in old-style exceptions: use "except Error,e"
|
||||
not "except Error(e)""
|
||||
|
||||
2016-11-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, pytest/test_grammar.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse26.py: Python 2.6 grammary bug and.. __pkginfo.py__: Bump spark_parser
|
||||
version for parse_flags 'dups'
|
||||
|
||||
2016-11-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, pytest/test_grammar.py, uncompyle6/parser.py,
|
||||
@@ -67,8 +91,17 @@
|
||||
|
||||
2016-11-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 6aa1531972de83ecab15b4c96b89c873ea5a7458 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Wed Nov 23 00:48:38 2016 -0500
|
||||
* : commit 7133540c235e16f02d2db62cb903b70aa311de20 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Wed Nov 23 08:26:12 2016 -0500
|
||||
|
||||
2016-11-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit a9349b8f3d12b2aa0cd88286617c1af9cccad018 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Tue Nov 22 17:49:47 2016 -0500
|
||||
|
||||
2016-11-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* circle.yml: Circle CI uses 2.7.10 and 2.7.12 is not available
|
||||
|
||||
2016-11-22 rocky <rb@dustyfeet.com>
|
||||
|
||||
|
2
Makefile
2
Makefile
@@ -37,7 +37,7 @@ check-3.0 check-3.1 check-3.2 check-3.5 check-3.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:Tests for Python 2.6 (doesn't have pytest)
|
||||
check-2.6:
|
||||
check-2.4 check-2.5 check-2.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:PyPy 2.6.1 or PyPy 5.0.1
|
||||
|
@@ -10,4 +10,4 @@ dependencies:
|
||||
- pip install -r requirements-dev.txt
|
||||
test:
|
||||
override:
|
||||
- python ./setup.py develop && make check-2.7
|
||||
- python ./setup.py develop && make check-2.6
|
||||
|
@@ -1,150 +0,0 @@
|
||||
# std
|
||||
import os
|
||||
# test
|
||||
import pytest
|
||||
import hypothesis
|
||||
from hypothesis import strategies as st
|
||||
# uncompyle6
|
||||
from uncompyle6 import PYTHON_VERSION, deparse_code
|
||||
|
||||
|
||||
@st.composite
|
||||
def expressions(draw):
|
||||
# todo : would be nice to generate expressions using hypothesis however
|
||||
# this is pretty involved so for now just use a corpus of expressions
|
||||
# from which to select.
|
||||
return draw(st.sampled_from((
|
||||
'abc',
|
||||
'len(items)',
|
||||
'x + 1',
|
||||
'lineno',
|
||||
'container',
|
||||
'self.attribute',
|
||||
'self.method()',
|
||||
# These expressions are failing, I think these are control
|
||||
# flow problems rather than problems with FORMAT_VALUE,
|
||||
# however I need to confirm this...
|
||||
#'sorted(items, key=lambda x: x.name)',
|
||||
#'func(*args, **kwargs)',
|
||||
#'text or default',
|
||||
#'43 if life_the_universe and everything else None'
|
||||
)))
|
||||
|
||||
|
||||
@st.composite
|
||||
def format_specifiers(draw):
|
||||
"""
|
||||
Generate a valid format specifier using the rules:
|
||||
|
||||
format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]
|
||||
fill ::= <any character>
|
||||
align ::= "<" | ">" | "=" | "^"
|
||||
sign ::= "+" | "-" | " "
|
||||
width ::= integer
|
||||
precision ::= integer
|
||||
type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"
|
||||
|
||||
See https://docs.python.org/2/library/string.html
|
||||
|
||||
:param draw: Let hypothesis draw from other strategies.
|
||||
|
||||
:return: An example format_specifier.
|
||||
"""
|
||||
alphabet_strategy = st.characters(min_codepoint=ord('a'), max_codepoint=ord('z'))
|
||||
fill = draw(st.one_of(alphabet_strategy, st.none()))
|
||||
align = draw(st.sampled_from(list('<>=^')))
|
||||
fill_align = (fill + align or '') if fill else ''
|
||||
|
||||
type_ = draw(st.sampled_from('bcdeEfFgGnosxX%'))
|
||||
can_have_sign = type_ in 'deEfFgGnoxX%'
|
||||
can_have_comma = type_ in 'deEfFgG%'
|
||||
can_have_precision = type_ in 'fFgG'
|
||||
can_have_pound = type_ in 'boxX%'
|
||||
can_have_zero = type_ in 'oxX'
|
||||
|
||||
sign = draw(st.sampled_from(list('+- ') + [''])) if can_have_sign else ''
|
||||
pound = draw(st.sampled_from(('#', '',))) if can_have_pound else ''
|
||||
zero = draw(st.sampled_from(('0', '',))) if can_have_zero else ''
|
||||
|
||||
int_strategy = st.integers(min_value=1, max_value=1000)
|
||||
|
||||
width = draw(st.one_of(int_strategy, st.none()))
|
||||
width = str(width) if width is not None else ''
|
||||
|
||||
comma = draw(st.sampled_from((',', '',))) if can_have_comma else ''
|
||||
if can_have_precision:
|
||||
precision = draw(st.one_of(int_strategy, st.none()))
|
||||
precision = '.' + str(precision) if precision else ''
|
||||
else:
|
||||
precision = ''
|
||||
|
||||
return ''.join((fill_align, sign, pound, zero, width, comma, precision, type_,))
|
||||
|
||||
|
||||
@st.composite
|
||||
def fstrings(draw):
|
||||
"""
|
||||
Generate a valid f-string.
|
||||
See https://www.python.org/dev/peps/pep-0498/#specification
|
||||
|
||||
:param draw: Let hypothsis draw from other strategies.
|
||||
|
||||
:return: A valid f-string.
|
||||
"""
|
||||
character_strategy = st.characters(
|
||||
blacklist_characters='\r\n\'\\s{}',
|
||||
min_codepoint=1,
|
||||
max_codepoint=1000,
|
||||
)
|
||||
is_raw = draw(st.booleans())
|
||||
integer_strategy = st.integers(min_value=0, max_value=3)
|
||||
expression_count = draw(integer_strategy)
|
||||
content = []
|
||||
for _ in range(expression_count):
|
||||
expression = draw(expressions())
|
||||
conversion = draw(st.sampled_from(('', '!s', '!r', '!a',)))
|
||||
has_specifier = draw(st.booleans())
|
||||
specifier = ':' + draw(format_specifiers()) if has_specifier else ''
|
||||
content.append('{{{}{}}}'.format(expression, conversion, specifier))
|
||||
content.append(draw(st.text(character_strategy)))
|
||||
content = ''.join(content)
|
||||
return "f{}'{}'".format('r' if is_raw else '', content)
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@hypothesis.given(format_specifiers())
|
||||
def test_format_specifiers(format_specifier):
|
||||
"""Verify that format_specifiers generates valid specifiers"""
|
||||
try:
|
||||
exec('"{:' + format_specifier + '}".format(0)')
|
||||
except ValueError as e:
|
||||
if 'Unknown format code' not in str(e):
|
||||
raise
|
||||
|
||||
|
||||
def run_test(text):
|
||||
hypothesis.assume(len(text))
|
||||
hypothesis.assume("f'{" in text)
|
||||
expr = text + '\n'
|
||||
code = compile(expr, '<string>', 'single')
|
||||
deparsed = deparse_code(PYTHON_VERSION, code, compile_mode='single')
|
||||
recompiled = compile(deparsed.text, '<string>', 'single')
|
||||
if recompiled != code:
|
||||
assert 'dis(' + deparsed.text.strip('\n') + ')' == 'dis(' + expr.strip('\n') + ')'
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@hypothesis.given(fstrings())
|
||||
def test_uncompyle_fstring(fstring):
|
||||
"""Verify uncompyling fstring bytecode"""
|
||||
run_test(fstring)
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@pytest.mark.parametrize('fstring', [
|
||||
"f'{abc}{abc!s}'",
|
||||
"f'{abc}0'",
|
||||
])
|
||||
def test_uncompyle_direct(fstring):
|
||||
"""useful for debugging"""
|
||||
run_test(fstring)
|
@@ -20,7 +20,7 @@ check:
|
||||
$(MAKE) check-$$PYTHON_VERSION
|
||||
|
||||
#: Run working tests from Python 2.6 or 2.7
|
||||
check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-2.7-ok
|
||||
check-2.4 check-2.5 check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-2.7-ok
|
||||
|
||||
#: Run working tests from Python 3.0
|
||||
check-3.0: check-bytecode
|
||||
@@ -100,7 +100,7 @@ check-bytecode-2.5:
|
||||
|
||||
#: Check deparsing Python 2.6
|
||||
check-bytecode-2.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
|
||||
|
||||
#: Check deparsing Python 2.7
|
||||
check-bytecode-2.7:
|
||||
|
BIN
test/bytecode_2.6/04_if_and_bug.pyc
Normal file
BIN
test/bytecode_2.6/04_if_and_bug.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.4/05_while_true_break.pyc
Normal file
BIN
test/bytecode_3.4/05_while_true_break.pyc
Normal file
Binary file not shown.
@@ -19,8 +19,6 @@ Step 2: Run the test:
|
||||
test_pyenvlib --mylib --verify # decompile verify 'mylib'
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6 import main, PYTHON3
|
||||
import os, time, shutil
|
||||
from fnmatch import fnmatch
|
||||
|
@@ -27,8 +27,6 @@ Step 2: Run the test:
|
||||
test_pythonlib.py --mylib --verify # decompile verify 'mylib'
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getopt, os, py_compile, sys, shutil, tempfile, time
|
||||
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
@@ -127,8 +125,10 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
if opts['do_compile']:
|
||||
compiled_version = opts['compiled_version']
|
||||
if compiled_version and PYTHON_VERSION != compiled_version:
|
||||
print("Not compiling: desired Python version is %s but we are running %s" %
|
||||
(compiled_version, PYTHON_VERSION), file=sys.stderr)
|
||||
sys.stderr.write("Not compiling: "
|
||||
"desired Python version is %s "
|
||||
"but we are running %s" %
|
||||
(compiled_version, PYTHON_VERSION))
|
||||
else:
|
||||
for root, dirs, basenames in os.walk(src_dir):
|
||||
file_matches(files, root, basenames, PY)
|
||||
@@ -146,8 +146,8 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
file_matches(files, dirname, basenames, obj_patterns)
|
||||
|
||||
if not files:
|
||||
print("Didn't come up with any files to test! Try with --compile?",
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Didn't come up with any files to test! "
|
||||
"Try with --compile?")
|
||||
exit(1)
|
||||
|
||||
os.chdir(cwd)
|
||||
@@ -161,9 +161,9 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
print(time.ctime())
|
||||
print('Source directory: ', src_dir)
|
||||
print('Output directory: ', target_dir)
|
||||
print time.ctime()
|
||||
print 'Source directory: ', src_dir
|
||||
print 'Output directory: ', target_dir
|
||||
try:
|
||||
_, _, failed_files, failed_verify = \
|
||||
main(src_dir, target_dir, files, [],
|
||||
@@ -227,14 +227,13 @@ if __name__ == '__main__':
|
||||
if os.path.isdir(src_dir):
|
||||
checked_dirs.append([src_dir, pattern, target_dir])
|
||||
else:
|
||||
print("Can't find directory %s. Skipping" % src_dir,
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Can't find directory %s. Skipping" % src_dir)
|
||||
continue
|
||||
last_compile_version = compiled_version
|
||||
pass
|
||||
|
||||
if not checked_dirs:
|
||||
print("No directories found to check", file=sys.stderr)
|
||||
sys.stderr.write("No directories found to check\n")
|
||||
sys.exit(1)
|
||||
|
||||
test_opts['compiled_version'] = last_compile_version
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt
|
||||
|
||||
from uncompyle6.disas import disassemble_file
|
||||
@@ -26,7 +25,7 @@ Options:
|
||||
-V | --version show version and stop
|
||||
-h | --help show this message
|
||||
|
||||
""".format(program)
|
||||
""" % (program, program)
|
||||
|
||||
PATTERNS = ('*.pyc', '*.pyo')
|
||||
|
||||
@@ -37,15 +36,15 @@ Type -h for for full help.""" % program
|
||||
native = True
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
print("No file(s) given", file=sys.stderr)
|
||||
print(Usage_short, file=sys.stderr)
|
||||
sys.stderr.write("No file(s) given\n")
|
||||
sys.stderr.write(Usage_short)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hVU',
|
||||
['help', 'version', 'uncompyle6'])
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
except getopt.GetoptError(e):
|
||||
sys.stderr.write('%s: %s' % (os.path.basename(sys.argv[0]), e))
|
||||
sys.exit(-1)
|
||||
|
||||
for opt, val in opts:
|
||||
@@ -59,15 +58,14 @@ Type -h for for full help.""" % program
|
||||
native = False
|
||||
else:
|
||||
print(opt)
|
||||
print(Usage_short, file=sys.stderr)
|
||||
sys.stderr.write(Usage_short)
|
||||
sys.exit(1)
|
||||
|
||||
for file in files:
|
||||
if os.path.exists(files[0]):
|
||||
disassemble_file(file, sys.stdout, native)
|
||||
else:
|
||||
print("Can't read %s - skipping" % files[0],
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Can't read %s - skipping\n" % files[0])
|
||||
pass
|
||||
pass
|
||||
return
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt, time
|
||||
|
||||
program, ext = os.path.splitext(os.path.basename(__file__))
|
||||
@@ -65,11 +64,11 @@ def usage():
|
||||
|
||||
|
||||
def main_bin():
|
||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7),
|
||||
(3, 1), (3, 2), (3, 3),
|
||||
if not (sys.version_info[0:2] in ((2, 4), (2, 5), (2, 6), (2, 7),
|
||||
(3, 2), (3, 3),
|
||||
(3, 4), (3, 5), (3, 6))):
|
||||
print('Error: %s requires Python 2.6-2.7, or 3.1-3.6' % program,
|
||||
file=sys.stderr)
|
||||
sys.stderr.write('Error: %s requires Python 2.4 2.5 2.6, 2.7, '
|
||||
'3.2, 3.3, 3.4, 3.5, or 3.6' % program)
|
||||
sys.exit(-1)
|
||||
|
||||
do_verify = recurse_dirs = False
|
||||
@@ -84,8 +83,8 @@ def main_bin():
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hagtdrVo:c:p:',
|
||||
'help asm grammar linemaps recurse timestamp tree '
|
||||
'verify version showgrammar'.split(' '))
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
except getopt.GetoptError(e):
|
||||
sys.stderr.write('%s: %s\n' % (os.path.basename(sys.argv[0]), e))
|
||||
sys.exit(-1)
|
||||
|
||||
options = {}
|
||||
@@ -119,7 +118,7 @@ def main_bin():
|
||||
elif opt in ('--recurse', '-r'):
|
||||
recurse_dirs = True
|
||||
else:
|
||||
print(opt, file=sys.stderr)
|
||||
sys.stderr.write(opt)
|
||||
usage()
|
||||
|
||||
# expand directory if specified
|
||||
@@ -144,7 +143,7 @@ def main_bin():
|
||||
files = [f[sb_len:] for f in files]
|
||||
|
||||
if not files:
|
||||
print("No files given", file=sys.stderr)
|
||||
sys.stderr.write("No files given\n")
|
||||
usage()
|
||||
|
||||
if outfile == '-':
|
||||
|
@@ -16,8 +16,6 @@ Second, we need structured instruction information for the
|
||||
want to run on Python 2.7.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
@@ -37,10 +35,9 @@ def disco(version, co, out=None, is_pypy=False):
|
||||
|
||||
# store final output stream for case of error
|
||||
real_out = out or sys.stdout
|
||||
print('# Python %s' % version, file=real_out)
|
||||
real_out.write('# Python %s\n' % version)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
real_out.write('# Embedded file name: %s\n' % co.co_filename)
|
||||
|
||||
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||
|
||||
@@ -52,16 +49,15 @@ def disco_loop(disasm, queue, real_out):
|
||||
while len(queue) > 0:
|
||||
co = queue.popleft()
|
||||
if co.co_name != '<module>':
|
||||
print('\n# %s line %d of %s' %
|
||||
(co.co_name, co.co_firstlineno, co.co_filename),
|
||||
file=real_out)
|
||||
real_out.write('\n# %s line %d of %s\n' %
|
||||
(co.co_name, co.co_firstlineno, co.co_filename))
|
||||
tokens, customize = disasm(co)
|
||||
for t in tokens:
|
||||
if iscode(t.pattr):
|
||||
queue.append(t.pattr)
|
||||
elif iscode(t.attr):
|
||||
queue.append(t.attr)
|
||||
print(t, file=real_out)
|
||||
real_out.write(t)
|
||||
pass
|
||||
pass
|
||||
|
||||
|
@@ -10,7 +10,7 @@ def line_number_mapping(pyc_filename, src_filename):
|
||||
source_size) = load_module(pyc_filename)
|
||||
try:
|
||||
code2 = load_file(src_filename)
|
||||
except SyntaxError as e:
|
||||
except SyntaxError, e:
|
||||
return str(e)
|
||||
|
||||
queue = deque([code1, code2])
|
||||
|
@@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
import datetime, os, subprocess, sys, tempfile
|
||||
|
||||
from uncompyle6 import verify, IS_PYPY
|
||||
@@ -22,31 +21,36 @@ def uncompyle(
|
||||
|
||||
# store final output stream for case of error
|
||||
real_out = out or sys.stdout
|
||||
co_pypy_str = 'PyPy ' if is_pypy else ''
|
||||
run_pypy_str = 'PyPy ' if IS_PYPY else ''
|
||||
print('# uncompyle6 version %s\n'
|
||||
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s' %
|
||||
(VERSION, co_pypy_str, bytecode_version,
|
||||
" (%d)" % magic_int if magic_int else "",
|
||||
run_pypy_str, '\n# '.join(sys.version.split('\n'))),
|
||||
file=real_out)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
if timestamp:
|
||||
print('# Compiled at: %s' % datetime.datetime.fromtimestamp(timestamp),
|
||||
file=real_out)
|
||||
if source_size:
|
||||
print('# Size of source mod 2**32: %d bytes' % source_size,
|
||||
file=real_out)
|
||||
if is_pypy:
|
||||
co_pypy_str = 'PyPy '
|
||||
else:
|
||||
co_pypy_str = ''
|
||||
|
||||
try:
|
||||
pysource.deparse_code(bytecode_version, co, out, showasm, showast,
|
||||
showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy)
|
||||
except pysource.SourceWalkerError as e:
|
||||
# deparsing failed
|
||||
raise pysource.SourceWalkerError(str(e))
|
||||
if IS_PYPY:
|
||||
run_pypy_str = 'PyPy '
|
||||
else:
|
||||
run_pypy_str = ''
|
||||
|
||||
if magic_int:
|
||||
m = str(magic_int)
|
||||
else:
|
||||
m = ""
|
||||
real_out.write('# uncompyle6 version %s\n'
|
||||
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s\n' %
|
||||
(VERSION, co_pypy_str, bytecode_version,
|
||||
" (%s)" % m, run_pypy_str,
|
||||
'\n# '.join(sys.version.split('\n'))))
|
||||
if co.co_filename:
|
||||
real_out.write('# Embedded file name: %s\n' % co.co_filename)
|
||||
if timestamp:
|
||||
real_out.write('# Compiled at: %s\n' %
|
||||
datetime.datetime.fromtimestamp(timestamp))
|
||||
if source_size:
|
||||
real_out.write('# Size of source mod 2**32: %d bytes\n' % source_size)
|
||||
|
||||
pysource.deparse_code(bytecode_version, co, out, showasm, showast,
|
||||
showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy)
|
||||
|
||||
|
||||
def uncompyle_file(filename, outstream=None, showasm=None, showast=False,
|
||||
@@ -126,8 +130,9 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
prefix = prefix[:-len('.py')]
|
||||
junk, outfile = tempfile.mkstemp(suffix=".py",
|
||||
prefix=prefix)
|
||||
# Unbuffer output
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
# Unbuffer output if possible
|
||||
buffering = -1 if sys.stdout.isatty() else 0
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', buffering)
|
||||
tee = subprocess.Popen(["tee", outfile], stdin=subprocess.PIPE)
|
||||
os.dup2(tee.stdin.fileno(), sys.stdout.fileno())
|
||||
os.dup2(tee.stdin.fileno(), sys.stderr.fileno())
|
||||
@@ -143,9 +148,9 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
try:
|
||||
uncompyle_file(infile, outstream, showasm, showast, showgrammar)
|
||||
tot_files += 1
|
||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError) as e:
|
||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError):
|
||||
sys.stdout.write("\n")
|
||||
sys.stderr.write("\n# file %s\n# %s\n" % (infile, e))
|
||||
sys.stderr.write("# file %s\n" % (infile))
|
||||
failed_files += 1
|
||||
except KeyboardInterrupt:
|
||||
if outfile:
|
||||
@@ -179,31 +184,35 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
msg = verify.compare_code_with_srcfile(infile, outfile, weak_verify=weak_verify)
|
||||
if not outfile:
|
||||
if not msg:
|
||||
print('\n# okay decompiling %s' % infile)
|
||||
print '\n# okay decompiling %s' % infile
|
||||
okay_files += 1
|
||||
else:
|
||||
print('\n# %s\n\t%s', infile, msg)
|
||||
except verify.VerifyCmpError as e:
|
||||
print '\n# %s\n\t%s', infile, msg
|
||||
except verify.VerifyCmpError, e:
|
||||
print(e)
|
||||
verify_failed_files += 1
|
||||
os.rename(outfile, outfile + '_unverified')
|
||||
sys.stderr.write("### Error Verifying %s\n" % filename)
|
||||
sys.stderr.write(str(e) + "\n")
|
||||
if not outfile:
|
||||
sys.stder.write("### Error Verifiying %s" %
|
||||
filename)
|
||||
sys.stderr.write(e)
|
||||
if raise_on_error:
|
||||
raise
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
elif do_verify:
|
||||
sys.stderr.write("\n### uncompile successful, but no file to compare against\n")
|
||||
sys.stderr.write("\n### uncompile successful, "
|
||||
"but no file to compare against")
|
||||
pass
|
||||
else:
|
||||
okay_files += 1
|
||||
if not outfile:
|
||||
mess = '\n# okay decompiling'
|
||||
# mem_usage = __memUsage()
|
||||
print(mess, infile)
|
||||
print mess, infile
|
||||
if outfile:
|
||||
sys.stdout.write("%s\r" %
|
||||
status_msg(do_verify, tot_files, okay_files, failed_files, verify_failed_files))
|
||||
@@ -230,11 +239,11 @@ def status_msg(do_verify, tot_files, okay_files, failed_files,
|
||||
verify_failed_files):
|
||||
if tot_files == 1:
|
||||
if failed_files:
|
||||
return "decompile failed"
|
||||
return "\n# decompile failed"
|
||||
elif verify_failed_files:
|
||||
return "decompile verify failed"
|
||||
return "\n# decompile verify failed"
|
||||
else:
|
||||
return "Successfully decompiled file"
|
||||
return "\n# Successfully decompiled file"
|
||||
pass
|
||||
pass
|
||||
mess = "decompiled %i files: %i okay, %i failed" % (tot_files, okay_files, failed_files)
|
||||
|
@@ -6,8 +6,6 @@
|
||||
Common uncompyle parser routines.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from xdis.code import iscode
|
||||
@@ -76,7 +74,10 @@ class PythonParser(GenericASTBuilder):
|
||||
def fix(c):
|
||||
s = str(c)
|
||||
i = s.find('_')
|
||||
return s if i == -1 else s[:i]
|
||||
if i == -1:
|
||||
return s
|
||||
else:
|
||||
return s[:i]
|
||||
|
||||
prefix = ''
|
||||
if parent and tokens:
|
||||
@@ -107,7 +108,10 @@ class PythonParser(GenericASTBuilder):
|
||||
err_token = instructions[index]
|
||||
print("Instruction context:")
|
||||
for i in range(start, finish):
|
||||
indent = ' ' if i != index else '-> '
|
||||
if i != index:
|
||||
indent = ' '
|
||||
else:
|
||||
indent = '-> '
|
||||
print("%s%s" % (indent, instructions[i]))
|
||||
raise ParserError(err_token, err_token.offset)
|
||||
|
||||
|
@@ -12,8 +12,6 @@ If we succeed in creating a parse tree, then we have a Python program
|
||||
that a later phase can turn into a sequence of ASCII text.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
@@ -113,16 +113,10 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
break_stmt ::= BREAK_LOOP JUMP_BACK
|
||||
|
||||
# Semantic actions want the else to be at position 3
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
|
||||
# Semantic actions want suite_stmts_opt to be at index 3
|
||||
withstmt ::= expr setupwith SETUP_FINALLY suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM WITH_CLEANUP END_FINALLY
|
||||
@@ -159,6 +153,29 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||
|
||||
ifstmt ::= testexpr_then _ifstmts_jump
|
||||
|
||||
# Semantic actions want the else to be at position 3
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr_then c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtc ::= testexpr_then c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
iflaststmt ::= testexpr_then c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
|
||||
testexpr_then ::= testtrue_then
|
||||
testexpr_then ::= testfalse_then
|
||||
testtrue_then ::= expr jmp_true_then
|
||||
testfalse_then ::= expr jmp_false_then
|
||||
|
||||
jmp_false_then ::= JUMP_IF_FALSE THEN POP_TOP
|
||||
jmp_true_then ::= JUMP_IF_TRUE THEN POP_TOP
|
||||
|
||||
# Common with 2.7
|
||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||
@@ -196,16 +213,16 @@ class Python26Parser(Python2Parser):
|
||||
genexpr_func ::= setup_loop_lf FOR_ITER designator comp_iter JUMP_BACK come_from_pop
|
||||
jb_bp_come_from
|
||||
genexpr ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1 COME_FROM
|
||||
|
||||
list_if ::= list_if ::= expr jmp_false_then list_iter
|
||||
'''
|
||||
|
||||
def p_ret26(self, args):
|
||||
'''
|
||||
ret_and ::= expr jmp_false ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr jmp_true ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr jmp_false expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_cond ::= expr jmp_false expr ret_expr_or_cond
|
||||
ret_cond_not ::= expr jmp_true expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_and ::= expr jmp_false ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr jmp_true ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr jmp_false_then expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_cond ::= expr jmp_false_then expr ret_expr_or_cond
|
||||
ret_cond_not ::= expr jmp_true_then expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP
|
||||
return_stmt ::= ret_expr RETURN_VALUE POP_TOP
|
||||
@@ -215,17 +232,37 @@ class Python26Parser(Python2Parser):
|
||||
'''
|
||||
|
||||
def p_except26(self, args):
|
||||
'''
|
||||
"""
|
||||
except_suite ::= c_stmts_opt jmp_abs POP_TOP
|
||||
'''
|
||||
"""
|
||||
|
||||
def p_misc26(self, args):
|
||||
'''
|
||||
"""
|
||||
conditional ::= expr jmp_false expr jf_cf_pop expr come_from_opt
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO COME_FROM POP_TOP _come_from
|
||||
'''
|
||||
|
||||
conditional_lambda ::= expr jmp_false_then return_if_stmt return_stmt LAMBDA_MARKER
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python26Parser, self).add_custom_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python26Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
# or that it jumps to the same place as the end of "and"
|
||||
jmp_false = ast[1][0]
|
||||
jmp_target = jmp_false.offset + jmp_false.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
return False
|
||||
class Python26ParserSingle(Python2Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -15,8 +15,6 @@ If we succeed in creating a parse tree, then we have a Python program
|
||||
that a later phase can turn into a sequence of ASCII text.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
@@ -461,7 +459,10 @@ class Python3Parser(PythonParser):
|
||||
"""Python 3.3 added a an addtional LOAD_CONST before MAKE_FUNCTION and
|
||||
this has an effect on many rules.
|
||||
"""
|
||||
new_rule = rule % (('LOAD_CONST ') * (1 if self.version >= 3.3 else 0))
|
||||
if self.version >= 3.3:
|
||||
new_rule = rule % (('LOAD_CONST ') * 1)
|
||||
else:
|
||||
new_rule = rule % (('LOAD_CONST ') * 0)
|
||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
|
@@ -2,7 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3.1 for Python 3.0.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
|
@@ -2,7 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3.2 for Python 3.1.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse32 import Python32Parser
|
||||
|
@@ -2,8 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3 for Python 3.2.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3.2 for Python 3.3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse32 import Python32Parser
|
||||
|
@@ -2,7 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3.4 for Python 3.5.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
@@ -2,7 +2,6 @@
|
||||
"""
|
||||
spark grammar differences over Python 3.5 for Python 3.6.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
@@ -10,8 +10,6 @@ scanner/ingestion module. From here we call various version-specific
|
||||
scanners, e.g. for Python 2.7 or 3.4.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from uncompyle6 import PYTHON3, IS_PYPY
|
||||
@@ -228,7 +226,10 @@ class Scanner(object):
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
else:
|
||||
return 2 if self.version >= 3.6 else 3
|
||||
if self.version >= 3.6:
|
||||
return 2
|
||||
else:
|
||||
return 3
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
"""
|
||||
|
@@ -13,13 +13,13 @@ import uncompyle6.scanners.scanner21 as scan
|
||||
from xdis.opcodes import opcode_15
|
||||
JUMP_OPs = opcode_15.JUMP_OPs
|
||||
|
||||
# We base this off of 2.2 instead of the other way around
|
||||
# We base this off of 2.1 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner15(scan.Scanner21):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner21.__init__(self, show_asm)
|
||||
scan.Scanner21.__init__(self, show_asm=False)
|
||||
self.opc = opcode_15
|
||||
self.opname = opcode_15.opname
|
||||
self.version = 1.5
|
||||
|
@@ -20,9 +20,13 @@ For example:
|
||||
Finally we save token information.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import op_has_argument
|
||||
@@ -84,7 +88,9 @@ class Scanner2(scan.Scanner):
|
||||
cause specific rules for the specific number of arguments they take.
|
||||
"""
|
||||
|
||||
show_asm = self.show_asm if not show_asm else show_asm
|
||||
if not show_asm:
|
||||
show_asm = self.show_asm
|
||||
|
||||
# show_asm = 'after'
|
||||
if show_asm in ('both', 'before'):
|
||||
from xdis.bytecode import Bytecode
|
||||
@@ -478,7 +484,7 @@ class Scanner2(scan.Scanner):
|
||||
elif op in self.setup_ops:
|
||||
count_SETUP_ += 1
|
||||
|
||||
def detect_structure(self, offset, op):
|
||||
def detect_control_flow(self, offset, op):
|
||||
"""
|
||||
Detect type of block structures and their boundaries to fix optimized jumps
|
||||
in python2.3+
|
||||
@@ -676,6 +682,8 @@ class Scanner2(scan.Scanner):
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
return
|
||||
|
||||
jump_if_offset = offset
|
||||
|
||||
start = offset+3
|
||||
pre = self.prev
|
||||
|
||||
@@ -698,6 +706,10 @@ class Scanner2(scan.Scanner):
|
||||
'end': pre[target]})
|
||||
return
|
||||
|
||||
# The op offset just before the target jump offset is important
|
||||
# in making a determination of what we have. Save that.
|
||||
pre_rtarget = pre[rtarget]
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.PJIF:
|
||||
|
||||
@@ -708,22 +720,22 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
# If we still have any offsets in set, start working on it
|
||||
if match:
|
||||
if code[pre[rtarget]] in self.jump_forward \
|
||||
and pre[rtarget] not in self.stmts \
|
||||
and self.restrict_to_parent(self.get_target(pre[rtarget]), parent) == rtarget:
|
||||
if code[pre[pre[rtarget]]] == self.opc.JUMP_ABSOLUTE \
|
||||
if code[pre_rtarget] in self.jump_forward \
|
||||
and pre_rtarget not in self.stmts \
|
||||
and self.restrict_to_parent(self.get_target(pre_rtarget), parent) == rtarget:
|
||||
if code[pre[pre_rtarget]] == self.opc.JUMP_ABSOLUTE \
|
||||
and self.remove_mid_line_ifs([offset]) \
|
||||
and target == self.get_target(pre[pre[rtarget]]) \
|
||||
and (pre[pre[rtarget]] not in self.stmts or self.get_target(pre[pre[rtarget]]) > pre[pre[rtarget]])\
|
||||
and 1 == len(self.remove_mid_line_ifs(self.rem_or(start, pre[pre[rtarget]], self.pop_jump_if, target))):
|
||||
and target == self.get_target(pre[pre_rtarget]) \
|
||||
and (pre[pre_rtarget] not in self.stmts or self.get_target(pre[pre_rtarget]) > pre[pre_rtarget])\
|
||||
and 1 == len(self.remove_mid_line_ifs(self.rem_or(start, pre[pre_rtarget], self.pop_jump_if, target))):
|
||||
pass
|
||||
elif code[pre[pre[rtarget]]] == self.opc.RETURN_VALUE \
|
||||
elif code[pre[pre_rtarget]] == self.opc.RETURN_VALUE \
|
||||
and self.remove_mid_line_ifs([offset]) \
|
||||
and 1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start,
|
||||
pre[pre[rtarget]],
|
||||
pre[pre_rtarget],
|
||||
self.pop_jump_if, target)))
|
||||
| set(self.remove_mid_line_ifs(self.rem_or(start, pre[pre[rtarget]],
|
||||
(self.opc.PJIF, self.opc.PJIT, self.opc.JUMP_ABSOLUTE), pre[rtarget], True))))):
|
||||
| set(self.remove_mid_line_ifs(self.rem_or(start, pre[pre_rtarget],
|
||||
(self.opc.PJIF, self.opc.PJIT, self.opc.JUMP_ABSOLUTE), pre_rtarget, True))))):
|
||||
pass
|
||||
else:
|
||||
fix = None
|
||||
@@ -756,7 +768,7 @@ class Scanner2(scan.Scanner):
|
||||
else:
|
||||
assert_offset = offset + 3
|
||||
if (assert_offset) in self.load_asserts:
|
||||
if code[pre[rtarget]] == self.opc.RAISE_VARARGS:
|
||||
if code[pre_rtarget] == self.opc.RAISE_VARARGS:
|
||||
return
|
||||
self.load_asserts.remove(assert_offset)
|
||||
|
||||
@@ -765,7 +777,7 @@ class Scanner2(scan.Scanner):
|
||||
pass
|
||||
elif code[next] in self.jump_forward and target == self.get_target(next):
|
||||
if code[pre[next]] == self.opc.PJIF:
|
||||
if code[next] == self.opc.JUMP_FORWARD or target != rtarget or code[pre[pre[rtarget]]] not in (self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE):
|
||||
if code[next] == self.opc.JUMP_FORWARD or target != rtarget or code[pre[pre_rtarget]] not in (self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE):
|
||||
self.fixed_jumps[offset] = pre[next]
|
||||
return
|
||||
elif code[next] == self.opc.JUMP_ABSOLUTE and code[target] in self.jump_forward:
|
||||
@@ -782,17 +794,17 @@ class Scanner2(scan.Scanner):
|
||||
return
|
||||
|
||||
if self.version == 2.7:
|
||||
if code[pre[rtarget]] == self.opc.JUMP_ABSOLUTE and pre[rtarget] in self.stmts \
|
||||
and pre[rtarget] != offset and pre[pre[rtarget]] != offset:
|
||||
if code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and pre_rtarget in self.stmts \
|
||||
and pre_rtarget != offset and pre[pre_rtarget] != offset:
|
||||
if code[rtarget] == self.opc.JUMP_ABSOLUTE and code[rtarget+3] == self.opc.POP_BLOCK:
|
||||
if code[pre[pre[rtarget]]] != self.opc.JUMP_ABSOLUTE:
|
||||
if code[pre[pre_rtarget]] != self.opc.JUMP_ABSOLUTE:
|
||||
pass
|
||||
elif self.get_target(pre[pre[rtarget]]) != target:
|
||||
elif self.get_target(pre[pre_rtarget]) != target:
|
||||
pass
|
||||
else:
|
||||
rtarget = pre[rtarget]
|
||||
rtarget = pre_rtarget
|
||||
else:
|
||||
rtarget = pre[rtarget]
|
||||
rtarget = pre_rtarget
|
||||
|
||||
# Does the "jump if" jump beyond a jump op?
|
||||
# That is, we have something like:
|
||||
@@ -808,7 +820,6 @@ class Scanner2(scan.Scanner):
|
||||
# There are other contexts we may need to consider
|
||||
# like whether the target is "END_FINALLY"
|
||||
# or if the condition jump is to a forward location
|
||||
pre_rtarget = pre[rtarget]
|
||||
code_pre_rtarget = code[pre_rtarget]
|
||||
|
||||
if code_pre_rtarget in self.jump_forward:
|
||||
@@ -828,20 +839,86 @@ class Scanner2(scan.Scanner):
|
||||
jump_target = self.get_target(next_offset, next_op)
|
||||
if jump_target in self.setup_loops:
|
||||
self.structs.append({'type': 'while-loop',
|
||||
'start': start - 3,
|
||||
'start': jump_if_offset,
|
||||
'end': jump_target})
|
||||
self.fixed_jumps[start-3] = jump_target
|
||||
self.fixed_jumps[jump_if_offset] = jump_target
|
||||
return
|
||||
|
||||
end = self.restrict_to_parent(if_end, parent)
|
||||
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
if_then_maybe = None
|
||||
|
||||
if 2.2 <= self.version <= 2.6:
|
||||
# Take the JUMP_IF target. In an "if/then", it will be
|
||||
# a POP_TOP instruction and the instruction before it
|
||||
# will be a JUMP_FORWARD to just after the POP_TOP.
|
||||
# For example:
|
||||
# Good:
|
||||
# 3 JUMP_IF_FALSE 33 'to 39'
|
||||
# ..
|
||||
# 36 JUMP_FORWARD 1 'to 40'
|
||||
# 39 POP_TOP
|
||||
# 40 ...
|
||||
# example:
|
||||
|
||||
# BAD (is an "and"):
|
||||
# 28 JUMP_IF_FALSE 4 'to 35'
|
||||
# ...
|
||||
# 32 JUMP_ABSOLUTE 40 'to 40' # should be 36 or there should
|
||||
# # be a COME_FROM at the pop top
|
||||
# # before 40 to 35
|
||||
# 35 POP_TOP
|
||||
# 36 ...
|
||||
# 39 POP_TOP
|
||||
# 39_0 COME_FROM 3
|
||||
# 40 ...
|
||||
|
||||
if self.opc.opname[code[jump_if_offset]].startswith('JUMP_IF'):
|
||||
jump_if_target = code[jump_if_offset+1]
|
||||
if self.opc.opname[code[jump_if_target + jump_if_offset + 3]] == 'POP_TOP':
|
||||
jump_inst = jump_if_target + jump_if_offset
|
||||
jump_offset = code[jump_inst+1]
|
||||
jump_op = self.opc.opname[code[jump_inst]]
|
||||
if (jump_op == 'JUMP_FORWARD' and jump_offset == 1):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.thens[start] = end
|
||||
elif jump_op == 'JUMP_ABSOLUTE':
|
||||
if_then_maybe = {'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget}
|
||||
|
||||
elif self.version == 2.7:
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end:
|
||||
# We have an "else" block of some kind.
|
||||
# Is it associated with "if_then_maybe" seen above?
|
||||
# These will be linked in this funny way:
|
||||
|
||||
# 198 JUMP_IF_FALSE 18 'to 219'
|
||||
# 201 POP_TOP
|
||||
# ...
|
||||
# 216 JUMP_ABSOLUTE 256 'to 256'
|
||||
# 219 POP_TOP
|
||||
# ...
|
||||
# 252 JUMP_FORWARD 1 'to 256'
|
||||
# 255 POP_TOP
|
||||
# 256
|
||||
if if_then_maybe and jump_op == 'JUMP_ABSOLUTE':
|
||||
jump_target = self.get_target(jump_inst, code[jump_inst])
|
||||
if self.opc.opname[code[end]] == 'JUMP_FORWARD':
|
||||
end_target = self.get_target(end, code[end])
|
||||
if jump_target == end_target:
|
||||
self.structs.append(if_then_maybe)
|
||||
self.thens[start] = end
|
||||
|
||||
self.structs.append({'type': 'else',
|
||||
'start': rtarget,
|
||||
'end': end})
|
||||
@@ -850,6 +927,7 @@ class Scanner2(scan.Scanner):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': rtarget})
|
||||
self.thens[start] = rtarget
|
||||
if self.version == 2.7 or code[pre_rtarget+1] != self.opc.JUMP_FORWARD:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
|
||||
@@ -883,11 +961,12 @@ class Scanner2(scan.Scanner):
|
||||
self.ignore_if = set()
|
||||
self.build_statement_indices()
|
||||
|
||||
# Containers filled by detect_structure()
|
||||
# Containers filled by detect_control_flow()
|
||||
self.not_continue = set()
|
||||
self.return_end_ifs = set()
|
||||
self.setup_loop_targets = {} # target given setup_loop offset
|
||||
self.setup_loops = {} # setup_loop offset given target
|
||||
self.thens = {} # JUMP_IF's that separate the 'then' part of an 'if'
|
||||
|
||||
targets = {}
|
||||
for offset in self.op_range(0, n):
|
||||
@@ -895,7 +974,7 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
# Determine structures and fix jumps in Python versions
|
||||
# since 2.3
|
||||
self.detect_structure(offset, op)
|
||||
self.detect_control_flow(offset, op)
|
||||
|
||||
if op_has_argument(op, self.opc):
|
||||
label = self.fixed_jumps.get(offset)
|
||||
@@ -934,8 +1013,10 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
# FIXME: rocky: I think we need something like this...
|
||||
if offset not in set(self.ignore_if) or self.version == 2.7:
|
||||
source = (self.setup_loops[label]
|
||||
if label in self.setup_loops else offset)
|
||||
if label in self.setup_loops:
|
||||
source = self.setup_loops[label]
|
||||
else:
|
||||
source = offset
|
||||
targets[label] = targets.get(label, []) + [source]
|
||||
pass
|
||||
|
||||
|
@@ -19,7 +19,7 @@ JUMP_OPs = opcode_21.JUMP_OPs
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner21(scan.Scanner22):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner22.__init__(self, show_asm)
|
||||
scan.Scanner22.__init__(self, show_asm=False)
|
||||
self.opc = opcode_21
|
||||
self.opname = opcode_21.opname
|
||||
self.version = 2.1
|
||||
|
@@ -19,7 +19,7 @@ JUMP_OPs = opcode_22.JUMP_OPs
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner22(scan.Scanner23):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner23.__init__(self, show_asm)
|
||||
scan.Scanner23.__init__(self, show_asm=False)
|
||||
self.opc = opcode_22
|
||||
self.opname = opcode_22.opname
|
||||
self.version = 2.2
|
||||
|
@@ -86,7 +86,9 @@ class Scanner26(scan.Scanner2):
|
||||
cause specific rules for the specific number of arguments they take.
|
||||
"""
|
||||
|
||||
show_asm = self.show_asm if not show_asm else show_asm
|
||||
if not show_asm:
|
||||
show_asm = self.show_asm
|
||||
|
||||
# show_asm = 'after'
|
||||
if show_asm in ('both', 'before'):
|
||||
from xdis.bytecode import Bytecode
|
||||
@@ -94,35 +96,32 @@ class Scanner26(scan.Scanner2):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# from xdis.bytecode import Bytecode
|
||||
# bytecode = Bytecode(co, self.opc)
|
||||
# for instr in bytecode.get_instructions(co):
|
||||
# print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
tokens = []
|
||||
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
|
||||
Token = self.Token # shortcut
|
||||
|
||||
n = self.setup_code(co)
|
||||
codelen = self.setup_code(co)
|
||||
|
||||
self.build_lines_data(co, n)
|
||||
self.build_prev_op(n)
|
||||
self.build_lines_data(co, codelen)
|
||||
self.build_prev_op(codelen)
|
||||
|
||||
free, names, varnames = self.unmangle_code_names(co, classname)
|
||||
self.names = names
|
||||
|
||||
codelen = len(self.code)
|
||||
|
||||
# Scan for assertions. Later we will
|
||||
# turn 'LOAD_GLOBAL' to 'LOAD_ASSERT'.
|
||||
# 'LOAD_ASSERT' is used in assert statements.
|
||||
self.load_asserts = set()
|
||||
for i in self.op_range(0, n):
|
||||
# We need to detect the difference between
|
||||
# "raise AssertionError" and
|
||||
# "assert"
|
||||
for i in self.op_range(0, codelen):
|
||||
# We need to detect the difference between:
|
||||
# raise AssertionError
|
||||
# and
|
||||
# assert ...
|
||||
if (self.code[i] == self.opc.JUMP_IF_TRUE and
|
||||
i + 4 < codelen and
|
||||
self.code[i+3] == self.opc.POP_TOP and
|
||||
@@ -167,6 +166,11 @@ class Scanner26(scan.Scanner2):
|
||||
offset="%s_%d" % (offset, jump_idx),
|
||||
has_arg = True))
|
||||
jump_idx += 1
|
||||
elif offset in self.thens:
|
||||
tokens.append(Token(
|
||||
'THEN', None, self.thens[offset],
|
||||
offset="%s_0" % offset,
|
||||
has_arg = True))
|
||||
|
||||
has_arg = (op >= self.opc.HAVE_ARGUMENT)
|
||||
if has_arg:
|
||||
|
@@ -7,8 +7,6 @@ grammar parsing.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.scanners.scanner2 import Scanner2
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
|
@@ -20,9 +20,13 @@ For example:
|
||||
Finally we save token information.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import Scanner, op_has_argument
|
||||
@@ -147,7 +151,9 @@ class Scanner3(Scanner):
|
||||
cause specific rules for the specific number of arguments they take.
|
||||
"""
|
||||
|
||||
show_asm = self.show_asm if not show_asm else show_asm
|
||||
if not show_asm:
|
||||
show_asm = self.show_asm
|
||||
|
||||
# show_asm = 'after'
|
||||
if show_asm in ('both', 'before'):
|
||||
bytecode = Bytecode(co, self.opc)
|
||||
@@ -199,6 +205,7 @@ class Scanner3(Scanner):
|
||||
# Get jump targets
|
||||
# Format: {target offset: [jump offsets]}
|
||||
jump_targets = self.find_jump_targets(show_asm)
|
||||
last_op_was_break = False
|
||||
|
||||
for inst in bytecode:
|
||||
|
||||
@@ -321,10 +328,12 @@ class Scanner3(Scanner):
|
||||
if target <= inst.offset:
|
||||
next_opname = self.opname[self.code[inst.offset+3]]
|
||||
if (inst.offset in self.stmts and
|
||||
next_opname not in ('END_FINALLY', 'POP_BLOCK',
|
||||
(next_opname not in ('END_FINALLY', 'POP_BLOCK',
|
||||
# Python 3.0 only uses POP_TOP
|
||||
'POP_TOP')
|
||||
and inst.offset not in self.not_continue):
|
||||
and inst.offset not in self.not_continue) or
|
||||
(tokens[-1].type == 'RETURN_VALUE' and
|
||||
self.version < 3.5)):
|
||||
opname = 'CONTINUE'
|
||||
else:
|
||||
opname = 'JUMP_BACK'
|
||||
@@ -334,15 +343,21 @@ class Scanner3(Scanner):
|
||||
# There are other situations where we don't catch
|
||||
# CONTINUE as well.
|
||||
if tokens[-1].type == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
|
||||
if tokens[-2].type == 'BREAK_LOOP':
|
||||
del tokens[-1]
|
||||
else:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
if last_op_was_break and opname == 'CONTINUE':
|
||||
last_op_was_break = False
|
||||
continue
|
||||
elif op == self.opc.RETURN_VALUE:
|
||||
if inst.offset in self.return_end_ifs:
|
||||
opname = 'RETURN_END_IF'
|
||||
elif inst.offset in self.load_asserts:
|
||||
opname = 'LOAD_ASSERT'
|
||||
|
||||
last_op_was_break = opname == 'BREAK_LOOP'
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
@@ -434,7 +449,7 @@ class Scanner3(Scanner):
|
||||
self.build_statement_indices()
|
||||
self.else_start = {}
|
||||
|
||||
# Containers filled by detect_structure()
|
||||
# Containers filled by detect_control_flow()
|
||||
self.not_continue = set()
|
||||
self.return_end_ifs = set()
|
||||
self.setup_loop_targets = {} # target given setup_loop offset
|
||||
@@ -446,7 +461,7 @@ class Scanner3(Scanner):
|
||||
|
||||
# Determine structures and fix jumps in Python versions
|
||||
# since 2.3
|
||||
self.detect_structure(offset, targets)
|
||||
self.detect_control_flow(offset, targets)
|
||||
|
||||
has_arg = (op >= op3.HAVE_ARGUMENT)
|
||||
if has_arg:
|
||||
@@ -573,7 +588,7 @@ class Scanner3(Scanner):
|
||||
|
||||
return target
|
||||
|
||||
def detect_structure(self, offset, targets):
|
||||
def detect_control_flow(self, offset, targets):
|
||||
"""
|
||||
Detect structures and their boundaries to fix optimized jumps
|
||||
in python2.3+
|
||||
@@ -600,7 +615,6 @@ class Scanner3(Scanner):
|
||||
parent = struct
|
||||
|
||||
if op == self.opc.SETUP_LOOP:
|
||||
|
||||
# We categorize loop types: 'for', 'while', 'while 1' with
|
||||
# possibly suffixes '-loop' and '-else'
|
||||
# Try to find the jump_back instruction of the loop.
|
||||
@@ -618,20 +632,30 @@ class Scanner3(Scanner):
|
||||
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE,
|
||||
next_line_byte, False)
|
||||
|
||||
if jump_back and jump_back != self.prev_op[end] and self.is_jump_forward(jump_back+3):
|
||||
if (code[self.prev_op[end]] == self.opc.RETURN_VALUE
|
||||
or (code[self.prev_op[end]] == self.opc.POP_BLOCK
|
||||
and code[self.prev_op[self.prev_op[end]]] == self.opc.RETURN_VALUE)):
|
||||
jump_forward_offset = jump_back+3
|
||||
return_val_offset1 = self.prev[self.prev[end]]
|
||||
|
||||
if (jump_back and jump_back != self.prev_op[end]
|
||||
and self.is_jump_forward(jump_forward_offset)):
|
||||
if (code[self.prev_op[end]] == self.opc.RETURN_VALUE or
|
||||
(code[self.prev_op[end]] == self.opc.POP_BLOCK
|
||||
and code[return_val_offset1] == self.opc.RETURN_VALUE)):
|
||||
jump_back = None
|
||||
if not jump_back: # loop suite ends in return. wtf right?
|
||||
if not jump_back:
|
||||
# loop suite ends in return
|
||||
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE) + 1
|
||||
if not jump_back:
|
||||
return
|
||||
|
||||
jump_back += 1
|
||||
if_offset = None
|
||||
if code[self.prev_op[next_line_byte]] not in POP_JUMP_TF:
|
||||
loop_type = 'for'
|
||||
else:
|
||||
if_offset = self.prev[next_line_byte]
|
||||
if if_offset:
|
||||
loop_type = 'while'
|
||||
self.ignore_if.add(self.prev_op[next_line_byte])
|
||||
self.ignore_if.add(if_offset)
|
||||
else:
|
||||
loop_type = 'for'
|
||||
target = next_line_byte
|
||||
end = jump_back + 3
|
||||
else:
|
||||
@@ -645,6 +669,7 @@ class Scanner3(Scanner):
|
||||
elif target < offset:
|
||||
self.fixed_jumps[offset] = jump_back+4
|
||||
end = jump_back+4
|
||||
|
||||
target = self.get_target(jump_back)
|
||||
|
||||
if code[target] in (self.opc.FOR_ITER, self.opc.GET_ITER):
|
||||
@@ -652,6 +677,7 @@ class Scanner3(Scanner):
|
||||
else:
|
||||
loop_type = 'while'
|
||||
test = self.prev_op[next_line_byte]
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in op3.hasjabs+op3.hasjrel:
|
||||
@@ -692,38 +718,40 @@ class Scanner3(Scanner):
|
||||
'end': prev_op[target]})
|
||||
return
|
||||
|
||||
# Is it an "and" inside an "if" block
|
||||
# The op offset just before the target jump offset is important
|
||||
# in making a determination of what we have. Save that.
|
||||
pre_rtarget = prev_op[rtarget]
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.POP_JUMP_IF_FALSE:
|
||||
|
||||
# Search for another POP_JUMP_IF_FALSE targetting the same op,
|
||||
# in current statement, starting from current offset, and filter
|
||||
# everything inside inner 'or' jumps and midline ifs
|
||||
match = self.rem_or(start, self.next_stmt[offset],
|
||||
self.opc.POP_JUMP_IF_FALSE, target)
|
||||
# We can't remove mid-line ifs because line structures have changed
|
||||
# from restructBytecode().
|
||||
# match = self.remove_mid_line_ifs(match)
|
||||
|
||||
# If we still have any offsets in set, start working on it
|
||||
if match:
|
||||
is_jump_forward = self.is_jump_forward(prev_op[rtarget])
|
||||
if (is_jump_forward and prev_op[rtarget] not in self.stmts and
|
||||
self.restrict_to_parent(self.get_target(prev_op[rtarget]), parent) == rtarget):
|
||||
if (code[prev_op[prev_op[rtarget]]] == self.opc.JUMP_ABSOLUTE
|
||||
is_jump_forward = self.is_jump_forward(pre_rtarget)
|
||||
if (is_jump_forward and pre_rtarget not in self.stmts and
|
||||
self.restrict_to_parent(self.get_target(pre_rtarget), parent) == rtarget):
|
||||
if (code[prev_op[pre_rtarget]] == self.opc.JUMP_ABSOLUTE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
target == self.get_target(prev_op[prev_op[rtarget]]) and
|
||||
(prev_op[prev_op[rtarget]] not in self.stmts or
|
||||
self.get_target(prev_op[prev_op[rtarget]]) > prev_op[prev_op[rtarget]]) and
|
||||
1 == len(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]], POP_JUMP_TF, target)))):
|
||||
target == self.get_target(prev_op[pre_rtarget]) and
|
||||
(prev_op[pre_rtarget] not in self.stmts or
|
||||
self.get_target(prev_op[pre_rtarget]) > prev_op[pre_rtarget]) and
|
||||
1 == len(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget], POP_JUMP_TF, target)))):
|
||||
pass
|
||||
elif (code[prev_op[prev_op[rtarget]]] == self.opc.RETURN_VALUE
|
||||
elif (code[prev_op[pre_rtarget]] == self.opc.RETURN_VALUE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]],
|
||||
1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
POP_JUMP_TF, target))) |
|
||||
set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]],
|
||||
set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
(self.opc.POP_JUMP_IF_FALSE,
|
||||
self.opc.POP_JUMP_IF_TRUE,
|
||||
self.opc.JUMP_ABSOLUTE),
|
||||
prev_op[rtarget], True)))))):
|
||||
pre_rtarget, True)))))):
|
||||
pass
|
||||
else:
|
||||
fix = None
|
||||
@@ -751,7 +779,7 @@ class Scanner3(Scanner):
|
||||
if code[prev_op[next]] == self.opc.POP_JUMP_IF_FALSE:
|
||||
if (code[next] == self.opc.JUMP_FORWARD
|
||||
or target != rtarget
|
||||
or code[prev_op[prev_op[rtarget]]] not in
|
||||
or code[prev_op[pre_rtarget]] not in
|
||||
(self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE)):
|
||||
self.fixed_jumps[offset] = prev_op[next]
|
||||
return
|
||||
@@ -764,14 +792,14 @@ class Scanner3(Scanner):
|
||||
if offset in self.ignore_if:
|
||||
return
|
||||
|
||||
if (code[prev_op[rtarget]] == self.opc.JUMP_ABSOLUTE and
|
||||
prev_op[rtarget] in self.stmts and
|
||||
prev_op[rtarget] != offset and
|
||||
prev_op[prev_op[rtarget]] != offset and
|
||||
if (code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
pre_rtarget in self.stmts and
|
||||
pre_rtarget != offset and
|
||||
prev_op[pre_rtarget] != offset and
|
||||
not (code[rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
code[rtarget+3] == self.opc.POP_BLOCK and
|
||||
code[prev_op[prev_op[rtarget]]] != self.opc.JUMP_ABSOLUTE)):
|
||||
rtarget = prev_op[rtarget]
|
||||
code[prev_op[pre_rtarget]] != self.opc.JUMP_ABSOLUTE)):
|
||||
rtarget = pre_rtarget
|
||||
|
||||
# Does the "jump if" jump beyond a jump op?
|
||||
# That is, we have something like:
|
||||
@@ -787,12 +815,11 @@ class Scanner3(Scanner):
|
||||
# There are other contexts we may need to consider
|
||||
# like whether the target is "END_FINALLY"
|
||||
# or if the condition jump is to a forward location
|
||||
if self.is_jump_forward(prev_op[rtarget]):
|
||||
rrtarget = prev_op[rtarget]
|
||||
if_end = self.get_target(rrtarget)
|
||||
if self.is_jump_forward(pre_rtarget):
|
||||
if_end = self.get_target(pre_rtarget)
|
||||
|
||||
# If the jump target is back, we are looping
|
||||
if (if_end < rrtarget and
|
||||
if (if_end < pre_rtarget and
|
||||
(code[prev_op[if_end]] == self.opc.SETUP_LOOP)):
|
||||
if (if_end > start):
|
||||
return
|
||||
@@ -801,19 +828,25 @@ class Scanner3(Scanner):
|
||||
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': prev_op[rtarget]})
|
||||
self.not_continue.add(prev_op[rtarget])
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end and (
|
||||
code[rtarget] not in (self.opc.END_FINALLY,
|
||||
self.opc.JUMP_ABSOLUTE) and
|
||||
code[prev_op[rrtarget]] not in (self.opc.POP_EXCEPT,
|
||||
code[prev_op[pre_rtarget]] not in (self.opc.POP_EXCEPT,
|
||||
self.opc.END_FINALLY)):
|
||||
self.structs.append({'type': 'else',
|
||||
'start': rtarget,
|
||||
'end': end})
|
||||
self.else_start[rtarget] = end
|
||||
elif code[prev_op[rtarget]] == self.opc.RETURN_VALUE:
|
||||
elif self.is_jump_back(pre_rtarget):
|
||||
if_end = rtarget
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
elif code[pre_rtarget] == self.opc.RETURN_VALUE:
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': rtarget})
|
||||
@@ -843,7 +876,7 @@ class Scanner3(Scanner):
|
||||
return
|
||||
pass
|
||||
pass
|
||||
self.return_end_ifs.add(prev_op[rtarget])
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
|
||||
elif op in self.jump_if_pop:
|
||||
target = self.get_target(offset)
|
||||
@@ -882,6 +915,16 @@ class Scanner3(Scanner):
|
||||
pass
|
||||
return
|
||||
|
||||
def is_jump_back(self, offset):
|
||||
"""
|
||||
Return True if the code at offset is some sort of jump back.
|
||||
That is, it is ether "JUMP_FORWARD" or an absolute jump that
|
||||
goes forward.
|
||||
"""
|
||||
if self.code[offset] != self.opc.JUMP_ABSOLUTE:
|
||||
return False
|
||||
return offset > self.get_target(offset)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.0 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_30 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.1 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_31 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.2 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.3 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_33 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.4 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from xdis.opcodes import opcode_34 as opc
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.5 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
|
@@ -6,8 +6,6 @@ This sets up opcodes Python's 3.5 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
|
@@ -56,12 +56,18 @@ class Token:
|
||||
return self.format(line_prefix='')
|
||||
|
||||
def format(self, line_prefix=''):
|
||||
prefix = ('\n%s%4d ' % (line_prefix, self.linestart)
|
||||
if self.linestart else (' ' * (6 + len(line_prefix))))
|
||||
if self.linestart:
|
||||
prefix = '\n%s%4d ' % (line_prefix, self.linestart)
|
||||
else:
|
||||
prefix = ' ' * (6 + len(line_prefix))
|
||||
offset_opname = '%6s %-17s' % (self.offset, self.type)
|
||||
if not self.has_arg:
|
||||
return "%s%s" % (prefix, offset_opname)
|
||||
argstr = "%6d " % self.attr if isinstance(self.attr, int) else (' '*7)
|
||||
|
||||
if isinstance(self.attr, int):
|
||||
argstr = "%6d " % self.attr
|
||||
else:
|
||||
argstr = ' '*7
|
||||
if self.pattr:
|
||||
pattr = self.pattr
|
||||
if self.opc:
|
||||
|
@@ -51,11 +51,9 @@ The node position 0 will be associated with "import".
|
||||
|
||||
# FIXME: DRY code with pysource
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re, sys
|
||||
|
||||
from uncompyle6 import PYTHON3, IS_PYPY
|
||||
from uncompyle6 import PYTHON3, IS_PYPY, PYTHON_VERSION
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.semantics import pysource
|
||||
from uncompyle6 import parser
|
||||
@@ -75,16 +73,18 @@ from uncompyle6.semantics.pysource import AST, INDENT_PER_LEVEL, NONE, PRECEDENC
|
||||
from uncompyle6.semantics.make_function import find_all_globals, find_none
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
from io import StringIO
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
||||
from collections import namedtuple
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
NodeInfo = namedtuple("NodeInfo", "node start finish")
|
||||
ExtractInfo = namedtuple("ExtractInfo",
|
||||
"lineNo lineStartOffset markerLine selectedLine selectedText")
|
||||
@@ -736,7 +736,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def n_genexpr(self, node):
|
||||
start = len(self.f.getvalue())
|
||||
self.write('(')
|
||||
code_index = -6 if self.version > 3.2 else -5
|
||||
if self.version > 3.2:
|
||||
code_index = -6
|
||||
else:
|
||||
code_index = -5
|
||||
self.comprehension_walk(node, iter_index=3, code_index=code_index)
|
||||
self.write(')')
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
@@ -890,7 +893,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
subclass = n.attr
|
||||
break
|
||||
pass
|
||||
subclass_info = node if node == 'classdefdeco2' else node[0]
|
||||
if node == 'classdefdeco2':
|
||||
subclass_info = node
|
||||
else:
|
||||
subclass_info = node[0]
|
||||
elif buildclass[1][0] == 'load_closure':
|
||||
# Python 3 with closures not functions
|
||||
load_closure = buildclass[1]
|
||||
@@ -914,7 +920,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
subclass = buildclass[1][0].attr
|
||||
subclass_info = node[0]
|
||||
else:
|
||||
buildclass = node if (node == 'classdefdeco2') else node[0]
|
||||
if node == 'classdefdeco2':
|
||||
buildclass = node
|
||||
else:
|
||||
buildclass = node[0]
|
||||
build_list = buildclass[1][0]
|
||||
if hasattr(buildclass[-3][0], 'attr'):
|
||||
subclass = buildclass[-3][0].attr
|
||||
@@ -980,7 +989,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
tokens.append(Token('LAMBDA_MARKER'))
|
||||
try:
|
||||
ast = parser.parse(self.p, tokens, customize)
|
||||
except (parser.ParserError, AssertionError) as e:
|
||||
except parser.ParserError(e):
|
||||
raise ParserError(e, tokens)
|
||||
except AssertionError(e):
|
||||
raise ParserError(e, tokens)
|
||||
maybe_show_ast(self.showast, ast)
|
||||
return ast
|
||||
@@ -1005,7 +1016,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# Build AST from disassembly.
|
||||
try:
|
||||
ast = parser.parse(self.p, tokens, customize)
|
||||
except (parser.ParserError, AssertionError) as e:
|
||||
except parser.ParserError(e):
|
||||
raise ParserError(e, tokens)
|
||||
except AssertionError(e):
|
||||
raise ParserError(e, tokens)
|
||||
|
||||
maybe_show_ast(self.showast, ast)
|
||||
@@ -1642,16 +1655,16 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
except ParserError(p):
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
# build parameters
|
||||
|
||||
tup = [paramnames, defparams]
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
|
||||
name, default in map(lambda *tup:tup, *tup)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
|
@@ -6,14 +6,9 @@ All the crazy things we have to do to handle Python functions
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from uncompyle6.show import maybe_show_ast_param_default
|
||||
|
||||
@@ -122,7 +117,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
except ParserError, p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
@@ -156,7 +151,10 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
self.write(': %s' % value)
|
||||
suffix = ', '
|
||||
|
||||
suffix = ', ' if i > 0 else ''
|
||||
if i > 0:
|
||||
suffix = ', '
|
||||
else:
|
||||
suffix = ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
@@ -301,17 +299,21 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
except ParserError, p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
if self.version >= 3.0:
|
||||
kw_pairs = args_node.attr[1]
|
||||
else:
|
||||
kw_pairs = 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
tup = [paramnames, defparams]
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
name, default in map(lambda *tup:tup, *tup)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
@@ -437,18 +439,22 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
except ParserError, p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
if self.version >= 3.0:
|
||||
kw_pairs = args_node.attr[1]
|
||||
else:
|
||||
kw_pairs = 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
if self.version != 3.2:
|
||||
tup = [paramnames, defparams]
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
name, default in map(lambda *tup:tup, *tup)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
@@ -483,7 +489,10 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
self.write(", ".join(paramnames[:i]))
|
||||
suffix = ', ' if i > 0 else ''
|
||||
if i > 0:
|
||||
suffix = ', '
|
||||
else:
|
||||
suffix = ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
|
@@ -67,8 +67,6 @@ methods implement most of the below.
|
||||
The '%' may optionally be followed by a number (C) in square brackets, which
|
||||
makes the engine walk down to N[C] before evaluating the escape code.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, re
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
@@ -301,7 +299,7 @@ TABLE_DIRECT = {
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmtl': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
|
||||
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
@@ -589,7 +587,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
})
|
||||
else:
|
||||
TABLE_DIRECT.update({
|
||||
'except_cond3': ( '%|except %c, %c:\n', 1, 6 ),
|
||||
'except_cond3': ( '%|except %c, %c:\n', 1, 6 ),
|
||||
'testtrue_then': ( 'not %p', (0, 22) ),
|
||||
|
||||
})
|
||||
if 2.4 <= version <= 2.6:
|
||||
TABLE_DIRECT.update({
|
||||
@@ -622,7 +622,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
code = node[-3]
|
||||
|
||||
self.indentMore()
|
||||
annotate_last = -4 if self.version == 3.1 else -5
|
||||
if self.version == 3.1:
|
||||
annotate_last = -4
|
||||
else:
|
||||
annotate_last = -5
|
||||
|
||||
# FIXME: handle and pass full annotate args
|
||||
make_function3_annotate(self, node, isLambda=False,
|
||||
@@ -1151,7 +1154,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
return
|
||||
n = node[-1]
|
||||
elif node[-1] == 'del_stmt':
|
||||
n = node[-3] if node[-2] == 'JUMP_BACK' else node[-2]
|
||||
if node[-2] == 'JUMP_BACK':
|
||||
n = node[-3]
|
||||
else:
|
||||
n = node[-2]
|
||||
|
||||
assert n == 'list_iter'
|
||||
|
||||
@@ -1169,7 +1175,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
list_iter = node[-1]
|
||||
else:
|
||||
expr = n[1]
|
||||
list_iter = node[-3] if node[-2] == 'JUMP_BACK' else node[-2]
|
||||
if node[-2] == 'JUMP_BACK':
|
||||
list_iter = node[-3]
|
||||
else:
|
||||
list_iter = node[-2]
|
||||
|
||||
assert expr == 'expr'
|
||||
assert list_iter == 'list_iter'
|
||||
@@ -1221,7 +1230,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write( '[ ')
|
||||
|
||||
expr = n[0]
|
||||
list_iter = node[-2] if self.is_pypy and node[-1] == 'JUMP_BACK' else node[-1]
|
||||
if self.is_pypy and node[-1] == 'JUMP_BACK':
|
||||
list_iter = node[-2]
|
||||
else:
|
||||
list_iter = node[-1]
|
||||
|
||||
assert expr == 'expr'
|
||||
assert list_iter == 'list_iter'
|
||||
@@ -1295,7 +1307,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(' for ')
|
||||
self.preorder(ast[iter_index-1])
|
||||
self.write(' in ')
|
||||
iter_expr = node[2] if node[2] == 'expr' else node[-3]
|
||||
if node[2] == 'expr':
|
||||
iter_expr = node[2]
|
||||
else:
|
||||
iter_expr = node[-3]
|
||||
assert iter_expr == 'expr'
|
||||
self.preorder(iter_expr)
|
||||
self.preorder(ast[iter_index])
|
||||
@@ -1303,7 +1318,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_genexpr(self, node):
|
||||
self.write('(')
|
||||
code_index = -6 if self.version > 3.2 else -5
|
||||
if self.version > 3.2:
|
||||
code_index = -6
|
||||
else:
|
||||
code_index = -5
|
||||
self.comprehension_walk(node, iter_index=3, code_index=code_index)
|
||||
self.write(')')
|
||||
self.prune()
|
||||
@@ -1537,7 +1555,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
break
|
||||
pass
|
||||
pass
|
||||
subclass_info = node if node == 'classdefdeco2' else node[0]
|
||||
if node == 'classdefdeco2':
|
||||
subclass_info = node
|
||||
else:
|
||||
subclass_info = node[0]
|
||||
elif buildclass[1][0] == 'load_closure':
|
||||
# Python 3 with closures not functions
|
||||
load_closure = buildclass[1]
|
||||
@@ -1561,7 +1582,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
subclass = buildclass[1][0].attr
|
||||
subclass_info = node[0]
|
||||
else:
|
||||
buildclass = node if (node == 'classdefdeco2') else node[0]
|
||||
if node == 'classdefdeco2':
|
||||
buildclass = node
|
||||
else:
|
||||
buildclass = node[0]
|
||||
build_list = buildclass[1][0]
|
||||
if hasattr(buildclass[-3][0], 'attr'):
|
||||
subclass = buildclass[-3][0].attr
|
||||
@@ -2198,7 +2222,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
tokens.append(Token('LAMBDA_MARKER'))
|
||||
try:
|
||||
ast = python_parser.parse(self.p, tokens, customize)
|
||||
except (python_parser.ParserError, AssertionError) as e:
|
||||
except python_parser.ParserError, e:
|
||||
raise ParserError(e, tokens)
|
||||
except AssertionError, e:
|
||||
raise ParserError(e, tokens)
|
||||
maybe_show_ast(self.showast, ast)
|
||||
return ast
|
||||
@@ -2225,7 +2251,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# Build AST from disassembly.
|
||||
try:
|
||||
ast = python_parser.parse(self.p, tokens, customize)
|
||||
except (python_parser.ParserError, AssertionError) as e:
|
||||
except python_parser.ParserError, e:
|
||||
raise ParserError(e, tokens)
|
||||
except AssertionError, e:
|
||||
raise ParserError(e, tokens)
|
||||
|
||||
maybe_show_ast(self.showast, ast)
|
||||
@@ -2303,10 +2331,10 @@ def deparse_code(version, co, out=sys.stdout, showasm=None, showast=False,
|
||||
if __name__ == '__main__':
|
||||
def deparse_test(co):
|
||||
"This is a docstring"
|
||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
sys_version = float(sys.version[0:3])
|
||||
deparsed = deparse_code(sys_version, co, showasm='after', showast=True)
|
||||
# deparsed = deparse_code(sys_version, co, showasm=None, showast=False,
|
||||
# showgrammar=True)
|
||||
print(deparsed.text)
|
||||
return
|
||||
deparse_test(deparse_test.__code__)
|
||||
deparse_test(deparse_test.func_code)
|
||||
|
@@ -12,7 +12,10 @@ def maybe_show_asm(showasm, tokens):
|
||||
:param tokens: The asm tokens to show.
|
||||
"""
|
||||
if showasm:
|
||||
stream = showasm if hasattr(showasm, 'write') else sys.stdout
|
||||
if hasattr(showasm, 'write'):
|
||||
stream = showasm
|
||||
else:
|
||||
stream = sys.stdout
|
||||
for t in tokens:
|
||||
stream.write(str(t))
|
||||
stream.write('\n')
|
||||
@@ -29,7 +32,10 @@ def maybe_show_ast(showast, ast):
|
||||
:param ast: The ast to show.
|
||||
"""
|
||||
if showast:
|
||||
stream = showast if hasattr(showast, 'write') else sys.stdout
|
||||
if hasattr(showast, 'write'):
|
||||
stream = showast
|
||||
else:
|
||||
stream = sys.stdout
|
||||
stream.write(str(ast))
|
||||
stream.write('\n')
|
||||
|
||||
@@ -48,7 +54,10 @@ def maybe_show_ast_param_default(showast, name, default):
|
||||
:param default: The function parameter default.
|
||||
"""
|
||||
if showast:
|
||||
stream = showast if hasattr(showast, 'write') else sys.stdout
|
||||
if hasattr(showast, 'write'):
|
||||
stream = showast
|
||||
else:
|
||||
stream = sys.stdout
|
||||
stream.write('\n')
|
||||
stream.write('--' + name)
|
||||
stream.write('\n')
|
||||
|
@@ -6,8 +6,6 @@
|
||||
byte-code verification
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import dis, operator
|
||||
|
||||
import uncompyle6
|
||||
@@ -406,7 +404,8 @@ def compare_code_with_srcfile(pyc_filename, src_filename, weak_verify=False):
|
||||
return msg
|
||||
try:
|
||||
code_obj2 = load_file(src_filename)
|
||||
except SyntaxError as e:
|
||||
except SyntaxError, e:
|
||||
return str(e).replace(src_filename, pyc_filename)
|
||||
return str(e)
|
||||
cmp_code_objects(version, is_pypy, code_obj1, code_obj2, ignore_code=weak_verify)
|
||||
return None
|
||||
|
Reference in New Issue
Block a user