You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Compare commits
21 Commits
release-2.
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
4377354cf9 | ||
|
6caa2c12fa | ||
|
3153a955d4 | ||
|
6f3a88d7e2 | ||
|
109737cbef | ||
|
05733c6171 | ||
|
6765a2ea97 | ||
|
c85496a92d | ||
|
e4ba73adfb | ||
|
7bf93980ce | ||
|
8241a5e3a8 | ||
|
faac11ad8c | ||
|
fe04b97c6b | ||
|
62f6220082 | ||
|
11e6eff427 | ||
|
2286aa5320 | ||
|
72ac7eb27c | ||
|
a8c5f71cfe | ||
|
feec241da8 | ||
|
c5f359f9be | ||
|
bfe8357f52 |
@@ -3,10 +3,11 @@ language: python
|
||||
sudo: false
|
||||
|
||||
python:
|
||||
- '2.6'
|
||||
- '2.7'
|
||||
- '3.4'
|
||||
- '3.5'
|
||||
- '2.7'
|
||||
- '2.6'
|
||||
- '3.4'
|
||||
- '3.2'
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
98
ChangeLog
98
ChangeLog
@@ -1,8 +1,100 @@
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.3.4
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Remove pypy3 add python 3.2 testing Reorder list for testing preference
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Remove pypy
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* Makefile, test/Makefile, uncompyle6/semantics/pysource.py: Fix up
|
||||
3.2 tests Remove pypy
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Try pypy and pypy3
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* test/simple_source/def/05_abc_class.py,
|
||||
test/simple_source/def/06_classbug.py, uncompyle6/parsers/parse3.py:
|
||||
Python 3.5 abc.py bug distilled
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/dis35.py, uncompyle6/scanners/scanner35.py:
|
||||
Add cross-Python-protable 3.5 dis module
|
||||
|
||||
2016-05-04 rocky <rocky@gnu.org>
|
||||
|
||||
* test/simple_source/stmts/05_with.py,
|
||||
uncompyle6/opcodes/opcode_35.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner35.py:
|
||||
Handle 3.5 with [as] scanner35.py: Fix a small variable-name typo
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* : One more test
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner34.py, uncompyle6/scanners/scanner35.py:
|
||||
Don't repeat next_except_jump
|
||||
|
||||
2016-05-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, requirements.txt: Wrong package name
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* __pkginfo__.py, requirements.txt, setup.py: More package
|
||||
administrivia
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanner.py: Remove one more old-style Python class
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/scanner27.py: DRY Python 2.7 scanner more
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* MANIFEST.in: Include LICENSE in package
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.3.3
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* __pkginfo__.py, bin/pydisassemble, bin/uncompyle6, setup.py,
|
||||
uncompyle6/__init__.py, uncompyle6/version.py: Add -V | --version
|
||||
and simplfy changing it
|
||||
* README.rst: Be more explicit that we need Python 2.6 or later
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* : commit feec241da88107b97bbdfbabeb3ae7131a7aa923 Author: rocky
|
||||
<rocky@gnu.org> Date: Mon May 2 21:20:17 2016 -0400
|
||||
|
||||
2016-05-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Note relation to other uncompyle forks Add some other minor corrections and additions as well.
|
||||
|
||||
2016-05-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/__init__.py: Trivial spacing change
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* ChangeLog, NEWS, __pkginfo__.py, bin/pydisassemble,
|
||||
bin/uncompyle6, setup.py, uncompyle6/__init__.py,
|
||||
uncompyle6/version.py: Add -V | --version and simplfy changing it
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
include README.rst
|
||||
include HISTORY.md
|
||||
include ChangeLog
|
||||
include HISTORY.md
|
||||
include LICENSE
|
||||
include __pkginfo__.py
|
||||
recursive-include uncompyle6 *.py
|
||||
include bin/uncompyle6
|
||||
|
2
Makefile
2
Makefile
@@ -28,7 +28,7 @@ check-2.7 check-3.3 check-3.4: pytest
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#: Tests for Python 3.5 - pytest doesn't work here
|
||||
check-3.5:
|
||||
check-3.2 check-3.5:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:Tests for Python 2.6 (doesn't have pytest)
|
||||
|
13
NEWS
13
NEWS
@@ -1,3 +1,16 @@
|
||||
uncompyle6 2.3.4 2016-05-5
|
||||
|
||||
- More Python 3.5 parsing bugs addressed
|
||||
- decompiling Python 3.5 from other Python versions works
|
||||
- test from Python 3.2
|
||||
- remove "__module__ = __name__" in 3.0 <= Python 3.2
|
||||
|
||||
uncompyle6 2.3.3 2016-05-3
|
||||
|
||||
- Fix bug in running uncompyle6 script on Python 3
|
||||
- Speed up performance on deparsing long lists by grouping in chunks of 32 and 256 items
|
||||
- DRY Python expressions between Python 2 and 3
|
||||
|
||||
uncompyle6 2.3.2 2016-05-1
|
||||
|
||||
- Add --version option standalone scripts
|
||||
|
21
README.rst
21
README.rst
@@ -4,7 +4,7 @@ uncompyle6
|
||||
==========
|
||||
|
||||
A native Python bytecode Disassembler, Decompiler, Fragment Decompiler
|
||||
and bytecode library
|
||||
and bytecode library. Follows in the tradition of decompyle, uncompyle, and uncompyle2.
|
||||
|
||||
|
||||
Introduction
|
||||
@@ -12,12 +12,17 @@ Introduction
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 2.5 to 3.4 or
|
||||
so and has been tested on Python running versions 2.6, 2.7, 3.3,
|
||||
3.4 and 3.5.
|
||||
so. The code requires Python 2.6 or later and has been tested on Python
|
||||
running versions 2.6, 2.7, 3.3, 3.4 and 3.5.
|
||||
|
||||
Why this?
|
||||
---------
|
||||
|
||||
There were a number of decompyle, uncompile, uncompyle2, uncompyle3
|
||||
forks around. All of them come basically from the same code base, and
|
||||
almost all of them not maintained very well. This code pulls these together
|
||||
and addresses a number of open issues in those.
|
||||
|
||||
What makes this different from other CPython bytecode decompilers? Its
|
||||
ability to deparse just fragments and give source-code information
|
||||
around a given bytecode offset.
|
||||
@@ -81,20 +86,22 @@ Run
|
||||
./bin/uncompyle6 -h
|
||||
./bin/pydisassemble -h
|
||||
|
||||
for usage help
|
||||
for usage help.
|
||||
|
||||
|
||||
Known Bugs/Restrictions
|
||||
-----------------------
|
||||
|
||||
Python 2 deparsing is probably as solid as the various versions of
|
||||
uncompyle2. Python 3 deparsing is okay but not as solid.
|
||||
uncompyle2. Python 3 deparsing is okay but not as solid. Python 3.5 is missing some of new opcodes added, but still often works.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
* https://github.com/zrax/pycdc
|
||||
* https://code.google.com/p/unpyc3/
|
||||
* https://github.com/zrax/pycdc : supports all versions of Python and is written in C++
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only
|
||||
|
||||
The above projects use a different decompiling technique what is used here.
|
||||
|
||||
The HISTORY file.
|
||||
|
||||
|
@@ -12,7 +12,7 @@ copyright = """
|
||||
Copyright (C) 2015, 2016 Rocky Bernstein <rb@dustyfeet.com>.
|
||||
"""
|
||||
|
||||
classifiers = ['Development Status :: 3 - Alpha',
|
||||
classifiers = ['Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
@@ -31,7 +31,7 @@ classifiers = ['Development Status :: 3 - Alpha',
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
ftp_url = None
|
||||
install_requires = ['python-spark >= 1.1.0']
|
||||
install_requires = ['spark-parser >= 1.1.1']
|
||||
license = 'GPL'
|
||||
|
||||
license = 'MIT'
|
||||
|
@@ -47,7 +47,7 @@ Extensions of generated files:
|
||||
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
||||
+ '_unverified' successfully decompile but --verify failed
|
||||
+ '_failed' decompile failed (contact author for enhancement)
|
||||
""" % ((program,) * 6
|
||||
""" % ((program,) * 5)
|
||||
|
||||
program = os.path.basename(__file__)
|
||||
|
||||
|
@@ -1 +1 @@
|
||||
spark_parser >= 1.1.0
|
||||
spark-parser >= 1.1.1
|
||||
|
8
setup.py
8
setup.py
@@ -3,8 +3,8 @@
|
||||
"""Setup script for the 'uncompyle6' distribution."""
|
||||
|
||||
from __pkginfo__ import \
|
||||
author, author_email, \
|
||||
license, long_description, \
|
||||
author, author_email, install_requires, \
|
||||
license, long_description, classifiers, \
|
||||
modname, packages, py_modules, scripts, \
|
||||
short_desc, web, zip_safe
|
||||
|
||||
@@ -16,9 +16,9 @@ exec(open('uncompyle6/version.py').read())
|
||||
setup(
|
||||
author = author,
|
||||
author_email = author_email,
|
||||
# classifiers = classifiers,
|
||||
classifiers = classifiers,
|
||||
description = short_desc,
|
||||
# install_requires = install_requires,
|
||||
install_requires = install_requires,
|
||||
license = license,
|
||||
long_description = long_description,
|
||||
py_modules = py_modules,
|
||||
|
@@ -22,6 +22,10 @@ check:
|
||||
#: Run working tests from Python 2.6 or 2.7
|
||||
check-2.6 check-2.7: check-bytecode-sans-3.5 check-2.7-ok
|
||||
|
||||
#: Run working tests from Python 3.2
|
||||
check-3.2: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.2 --verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.3
|
||||
check-3.3: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --verify $(COMPILE)
|
||||
|
BIN
test/bytecode_2.7/05_long_list.pyc
Normal file
BIN
test/bytecode_2.7/05_long_list.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.4/05_with.pyc
Normal file
BIN
test/bytecode_3.4/05_with.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_abc_class.pyc
Normal file
BIN
test/bytecode_3.5/05_abc_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_long_list.pyc
Normal file
BIN
test/bytecode_3.5/05_long_list.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_with.pyc
Normal file
BIN
test/bytecode_3.5/05_with.pyc
Normal file
Binary file not shown.
Binary file not shown.
14
test/simple_source/def/05_abc_class.py
Normal file
14
test/simple_source/def/05_abc_class.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Python3.5 bug from abc.py:
|
||||
# stmt ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
#
|
||||
# And this gets ignored.
|
||||
|
||||
# Note this is similar to 06_classbug.py but not the same.
|
||||
# classmethod -> object
|
||||
|
||||
class abstractclassmethod(classmethod):
|
||||
__isabstractmethod__ = True
|
||||
|
||||
def __init__(self, callable):
|
||||
callable.__isabstractmethod__ = True
|
||||
super().__init__(callable)
|
@@ -5,6 +5,9 @@
|
||||
# LOAD_FAST '__locals__'
|
||||
# STORE_LOCALS ''
|
||||
|
||||
# Note this is similar to 05_abc_class.py but not the same:
|
||||
# object -> classmethod
|
||||
|
||||
class abstractclassmethod(object):
|
||||
"""A Python 3.2 STORE_LOCALS bug
|
||||
"""
|
||||
|
3
test/simple_source/expression/05_long_list.py
Normal file
3
test/simple_source/expression/05_long_list.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# Long lists pose a slowdown in uncompiling.
|
||||
x = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
|
||||
print(x)
|
2
test/simple_source/stmts/05_with.py
Normal file
2
test/simple_source/stmts/05_with.py
Normal file
@@ -0,0 +1,2 @@
|
||||
with (sys) as f:
|
||||
print(f)
|
@@ -38,7 +38,7 @@ PYTHON3 = (sys.version_info >= (3, 0))
|
||||
# doesn't support version_major, and has a bug in
|
||||
# floating point so we can't divide 26 by 10 and get
|
||||
# 2.6
|
||||
PYTHON_VERSION = sys.version_info[0]+ (sys.version_info[1] / 10.0)
|
||||
PYTHON_VERSION = sys.version_info[0] + (sys.version_info[1] / 10.0)
|
||||
PYTHON_VERSION_STR = "%s.%s" % (sys.version_info[0], sys.version_info[1])
|
||||
|
||||
sys.setrecursionlimit(5000)
|
||||
|
@@ -118,14 +118,14 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
os.remove(outfile)
|
||||
sys.stderr.write("\nLast file: %s " % (infile))
|
||||
raise
|
||||
except:
|
||||
failed_files += 1
|
||||
if outfile:
|
||||
outstream.close()
|
||||
os.rename(outfile, outfile + '_failed')
|
||||
else:
|
||||
sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
# except:
|
||||
# failed_files += 1
|
||||
# if outfile:
|
||||
# outstream.close()
|
||||
# os.rename(outfile, outfile + '_failed')
|
||||
# else:
|
||||
# sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
# sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
else: # uncompile successful
|
||||
if outfile:
|
||||
outstream.close()
|
||||
|
@@ -37,6 +37,9 @@ rm_op(opname, opmap, 'STORE_LOCALS', 69)
|
||||
def_op('YIELD_FROM', 72)
|
||||
def_op('LOAD_CLASSDEREF', 148)
|
||||
|
||||
# These are removed since Python 3.4
|
||||
rm_op(opname, opmap, 'WITH_CLEANUP', 81)
|
||||
|
||||
# These are new since Python 3.4
|
||||
def_op('BINARY_MATRIX_MULTIPLY', 16)
|
||||
def_op('INPLACE_MATRIX_MULTIPLY', 17)
|
||||
|
@@ -237,6 +237,154 @@ class PythonParser(GenericASTBuilder):
|
||||
comp_ifnot ::= expr jmp_true comp_iter
|
||||
"""
|
||||
|
||||
def p_expr(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
slice0 ::= expr SLICE+0
|
||||
slice0 ::= expr DUP_TOP SLICE+0
|
||||
slice1 ::= expr expr SLICE+1
|
||||
slice1 ::= expr expr DUP_TOPX_2 SLICE+1
|
||||
slice2 ::= expr expr SLICE+2
|
||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||
slice3 ::= expr expr expr SLICE+3
|
||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
or ::= expr jmp_true expr _come_from
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
|
||||
expr32 ::= expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr
|
||||
expr1024 ::= expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32
|
||||
'''
|
||||
|
||||
|
||||
def parse(p, tokens, customize):
|
||||
p.add_custom_rules(tokens, customize)
|
||||
@@ -299,6 +447,8 @@ def python_parser(version, co, out=sys.stdout, showasm=False,
|
||||
for t in tokens:
|
||||
print(t)
|
||||
|
||||
# For heavy grammar debugging
|
||||
# parser_debug = {'rules': True, 'transition': True, 'reduce' : True}
|
||||
p = get_python_parser(version, parser_debug)
|
||||
return parse(p, tokens, customize)
|
||||
|
||||
|
@@ -362,69 +362,10 @@ class Python2Parser(PythonParser):
|
||||
|
||||
'''
|
||||
|
||||
def p_expr(self, args):
|
||||
def p_expr2(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= LOAD_LOCALS
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
slice0 ::= expr SLICE+0
|
||||
slice0 ::= expr DUP_TOP SLICE+0
|
||||
slice1 ::= expr expr SLICE+1
|
||||
@@ -433,79 +374,6 @@ class Python2Parser(PythonParser):
|
||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||
slice3 ::= expr expr expr SLICE+3
|
||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
or ::= expr jmp_true expr _come_from
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
'''
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -535,7 +403,8 @@ class Python2Parser(PythonParser):
|
||||
|
||||
op = k[:k.rfind('_')]
|
||||
if op in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
rule = 'build_list ::= ' + 'expr '*v + k
|
||||
rule = ('build_list ::= ' + 'expr1024 '*(v//1024) +
|
||||
'expr32 '*((v//32)%32) + 'expr '*(v%32) + k)
|
||||
elif op in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||
rule = 'unpack ::= ' + k + ' designator'*v
|
||||
elif op == 'UNPACK_LIST':
|
||||
|
@@ -371,145 +371,10 @@ class Python3Parser(PythonParser):
|
||||
|
||||
'''
|
||||
|
||||
def p_expr(self, args):
|
||||
def p_expr3(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= LOAD_LOCALS
|
||||
expr ::= LOAD_CLASSNAME
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
|
||||
# Python3 drops slice0..slice3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr jmp_true expr _come_from
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
'''
|
||||
|
||||
@staticmethod
|
||||
@@ -613,7 +478,9 @@ class Python3Parser(PythonParser):
|
||||
elif opname == 'LOAD_BUILD_CLASS':
|
||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
rule = 'build_list ::= ' + 'expr ' * token.attr + opname
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32)%32) + 'expr '*(v%32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif self.version >= 3.5 and opname_base == 'BUILD_MAP':
|
||||
kvlist_n = "kvlist_%s" % token.attr
|
||||
@@ -677,6 +544,18 @@ class Python35onParser(Python3Parser):
|
||||
"""
|
||||
# this optimization is only used in Python 3.5 and beyond
|
||||
_ifstmts_jump ::= c_stmts_opt
|
||||
|
||||
# Python 3.5+ has WITH_CLEANUP_START/FINISH
|
||||
withstmt ::= expr SETUP_WITH with_setup suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
withasstmt ::= expr SETUP_WITH designator suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
# Python 3.5+ classes seem to end with this:
|
||||
stmt ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
"""
|
||||
|
||||
class Python35onParserSingle(Python35onParser, PythonParserSingle):
|
||||
|
@@ -33,7 +33,7 @@ from uncompyle6.opcodes import (opcode_25, opcode_26, opcode_27,
|
||||
opcode_32, opcode_33, opcode_34, opcode_35)
|
||||
|
||||
|
||||
class Code:
|
||||
class Code(object):
|
||||
'''
|
||||
Class for representing code-objects.
|
||||
|
||||
|
399
uncompyle6/scanners/dis35.py
Normal file
399
uncompyle6/scanners/dis35.py
Normal file
@@ -0,0 +1,399 @@
|
||||
# This is take from the python 3.5 dis module
|
||||
"""Disassembler of Python byte code into mnemonics."""
|
||||
|
||||
from dis import findlinestarts
|
||||
import types
|
||||
import collections
|
||||
import io
|
||||
|
||||
# This part is modified for cross Python compatability
|
||||
from uncompyle6.opcodes.opcode_35 import *
|
||||
from uncompyle6.opcodes.opcode_35 import opname
|
||||
|
||||
_have_code = (types.MethodType, types.FunctionType, types.CodeType, type)
|
||||
|
||||
def _try_compile(source, name):
|
||||
"""Attempts to compile the given source, first as an expression and
|
||||
then as a statement if the first approach fails.
|
||||
|
||||
Utility function to accept strings in functions that otherwise
|
||||
expect code objects
|
||||
"""
|
||||
try:
|
||||
c = compile(source, name, 'eval')
|
||||
except SyntaxError:
|
||||
c = compile(source, name, 'exec')
|
||||
return c
|
||||
|
||||
def dis(x=None):
|
||||
"""Disassemble classes, methods, functions, generators, or code.
|
||||
"""
|
||||
if x is None:
|
||||
distb()
|
||||
return
|
||||
if hasattr(x, '__func__'): # Method
|
||||
x = x.__func__
|
||||
if hasattr(x, '__code__'): # Function
|
||||
x = x.__code__
|
||||
if hasattr(x, 'gi_code'): # Generator
|
||||
x = x.gi_code
|
||||
if hasattr(x, '__dict__'): # Class or module
|
||||
items = sorted(x.__dict__.items())
|
||||
for name, x1 in items:
|
||||
if isinstance(x1, _have_code):
|
||||
print("Disassembly of %s:" % name, file)
|
||||
try:
|
||||
dis(x1, file)
|
||||
except TypeError as msg:
|
||||
print("Sorry:", msg)
|
||||
print(file)
|
||||
elif isinstance(x, (bytes, bytearray)): # Raw bytecode
|
||||
_disassemble_bytes(x, file)
|
||||
else:
|
||||
raise TypeError("don't know how to disassemble %s objects" %
|
||||
type(x).__name__)
|
||||
|
||||
# The inspect module interrogates this dictionary to build its
|
||||
# list of CO_* constants. It is also used by pretty_flags to
|
||||
# turn the co_flags field into a human readable list.
|
||||
COMPILER_FLAG_NAMES = {
|
||||
1: "OPTIMIZED",
|
||||
2: "NEWLOCALS",
|
||||
4: "VARARGS",
|
||||
8: "VARKEYWORDS",
|
||||
16: "NESTED",
|
||||
32: "GENERATOR",
|
||||
64: "NOFREE",
|
||||
128: "COROUTINE",
|
||||
256: "ITERABLE_COROUTINE",
|
||||
}
|
||||
|
||||
def pretty_flags(flags):
|
||||
"""Return pretty representation of code flags."""
|
||||
names = []
|
||||
for i in range(32):
|
||||
flag = 1<<i
|
||||
if flags & flag:
|
||||
names.append(COMPILER_FLAG_NAMES.get(flag, hex(flag)))
|
||||
flags ^= flag
|
||||
if not flags:
|
||||
break
|
||||
else:
|
||||
names.append(hex(flags))
|
||||
return ", ".join(names)
|
||||
|
||||
def _get_code_object(x):
|
||||
"""Helper to handle methods, functions, generators, strings and raw code objects"""
|
||||
if hasattr(x, '__func__'): # Method
|
||||
x = x.__func__
|
||||
if hasattr(x, '__code__'): # Function
|
||||
x = x.__code__
|
||||
if hasattr(x, 'gi_code'): # Generator
|
||||
x = x.gi_code
|
||||
if isinstance(x, str): # Source code
|
||||
x = _try_compile(x, "<disassembly>")
|
||||
if hasattr(x, 'co_code'): # Code object
|
||||
return x
|
||||
raise TypeError("don't know how to disassemble %s objects" %
|
||||
type(x).__name__)
|
||||
|
||||
def code_info(x):
|
||||
"""Formatted details of methods, functions, or code."""
|
||||
return _format_code_info(_get_code_object(x))
|
||||
|
||||
def _format_code_info(co):
|
||||
lines = []
|
||||
lines.append("Name: %s" % co.co_name)
|
||||
lines.append("Filename: %s" % co.co_filename)
|
||||
lines.append("Argument count: %s" % co.co_argcount)
|
||||
lines.append("Kw-only arguments: %s" % co.co_kwonlyargcount)
|
||||
lines.append("Number of locals: %s" % co.co_nlocals)
|
||||
lines.append("Stack size: %s" % co.co_stacksize)
|
||||
lines.append("Flags: %s" % pretty_flags(co.co_flags))
|
||||
if co.co_consts:
|
||||
lines.append("Constants:")
|
||||
for i_c in enumerate(co.co_consts):
|
||||
lines.append("%4d: %r" % i_c)
|
||||
if co.co_names:
|
||||
lines.append("Names:")
|
||||
for i_n in enumerate(co.co_names):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_varnames:
|
||||
lines.append("Variable names:")
|
||||
for i_n in enumerate(co.co_varnames):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_freevars:
|
||||
lines.append("Free variables:")
|
||||
for i_n in enumerate(co.co_freevars):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_cellvars:
|
||||
lines.append("Cell variables:")
|
||||
for i_n in enumerate(co.co_cellvars):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
return "\n".join(lines)
|
||||
|
||||
def show_code(co):
|
||||
"""Print details of methods, functions, or code to *file*.
|
||||
|
||||
If *file* is not provided, the output is printed on stdout.
|
||||
"""
|
||||
print(code_info(co))
|
||||
|
||||
_Instruction = collections.namedtuple("_Instruction",
|
||||
"opname opcode arg argval argrepr offset starts_line is_jump_target")
|
||||
|
||||
class Instruction(_Instruction):
|
||||
"""Details for a bytecode operation
|
||||
|
||||
Defined fields:
|
||||
opname - human readable name for operation
|
||||
opcode - numeric code for operation
|
||||
arg - numeric argument to operation (if any), otherwise None
|
||||
argval - resolved arg value (if known), otherwise same as arg
|
||||
argrepr - human readable description of operation argument
|
||||
offset - start index of operation within bytecode sequence
|
||||
starts_line - line started by this opcode (if any), otherwise None
|
||||
is_jump_target - True if other code jumps to here, otherwise False
|
||||
"""
|
||||
|
||||
def _disassemble(self, lineno_width=3, mark_as_current=False):
|
||||
"""Format instruction details for inclusion in disassembly output
|
||||
|
||||
*lineno_width* sets the width of the line number field (0 omits it)
|
||||
*mark_as_current* inserts a '-->' marker arrow as part of the line
|
||||
"""
|
||||
fields = []
|
||||
# Column: Source code line number
|
||||
if lineno_width:
|
||||
if self.starts_line is not None:
|
||||
lineno_fmt = "%%%dd" % lineno_width
|
||||
fields.append(lineno_fmt % self.starts_line)
|
||||
else:
|
||||
fields.append(' ' * lineno_width)
|
||||
# Column: Current instruction indicator
|
||||
if mark_as_current:
|
||||
fields.append('-->')
|
||||
else:
|
||||
fields.append(' ')
|
||||
# Column: Jump target marker
|
||||
if self.is_jump_target:
|
||||
fields.append('>>')
|
||||
else:
|
||||
fields.append(' ')
|
||||
# Column: Instruction offset from start of code sequence
|
||||
fields.append(repr(self.offset).rjust(4))
|
||||
# Column: Opcode name
|
||||
fields.append(opname.ljust(20))
|
||||
# Column: Opcode argument
|
||||
if self.arg is not None:
|
||||
fields.append(repr(self.arg).rjust(5))
|
||||
# Column: Opcode argument details
|
||||
if self.argrepr:
|
||||
fields.append('(' + self.argrepr + ')')
|
||||
return ' '.join(fields).rstrip()
|
||||
|
||||
|
||||
def get_instructions(x, first_line=None):
|
||||
"""Iterator for the opcodes in methods, functions or code
|
||||
|
||||
Generates a series of Instruction named tuples giving the details of
|
||||
each operations in the supplied code.
|
||||
|
||||
If *first_line* is not None, it indicates the line number that should
|
||||
be reported for the first source line in the disassembled code.
|
||||
Otherwise, the source line information (if any) is taken directly from
|
||||
the disassembled code object.
|
||||
"""
|
||||
co = _get_code_object(x)
|
||||
cell_names = co.co_cellvars + co.co_freevars
|
||||
linestarts = dict(findlinestarts(co))
|
||||
if first_line is not None:
|
||||
line_offset = first_line - co.co_firstlineno
|
||||
else:
|
||||
line_offset = 0
|
||||
return _get_instructions_bytes(co.co_code, co.co_varnames, co.co_names,
|
||||
co.co_consts, cell_names, linestarts,
|
||||
line_offset)
|
||||
|
||||
def _get_const_info(const_index, const_list):
|
||||
"""Helper to get optional details about const references
|
||||
|
||||
Returns the dereferenced constant and its repr if the constant
|
||||
list is defined.
|
||||
Otherwise returns the constant index and its repr().
|
||||
"""
|
||||
argval = const_index
|
||||
if const_list is not None:
|
||||
argval = const_list[const_index]
|
||||
return argval, repr(argval)
|
||||
|
||||
def _get_name_info(name_index, name_list):
|
||||
"""Helper to get optional details about named references
|
||||
|
||||
Returns the dereferenced name as both value and repr if the name
|
||||
list is defined.
|
||||
Otherwise returns the name index and its repr().
|
||||
"""
|
||||
argval = name_index
|
||||
if name_list is not None:
|
||||
argval = name_list[name_index]
|
||||
argrepr = argval
|
||||
else:
|
||||
argrepr = repr(argval)
|
||||
return argval, argrepr
|
||||
|
||||
|
||||
def _get_instructions_bytes(code, varnames=None, names=None, constants=None,
|
||||
cells=None, linestarts=None, line_offset=0):
|
||||
"""Iterate over the instructions in a bytecode string.
|
||||
|
||||
Generates a sequence of Instruction namedtuples giving the details of each
|
||||
opcode. Additional information about the code's runtime environment
|
||||
(e.g. variable names, constants) can be specified using optional
|
||||
arguments.
|
||||
|
||||
"""
|
||||
labels = findlabels(code)
|
||||
extended_arg = 0
|
||||
starts_line = None
|
||||
# enumerate() is not an option, since we sometimes process
|
||||
# multiple elements on a single pass through the loop
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
op = code[i]
|
||||
if isinstance(op, str):
|
||||
op_num = ord(op)
|
||||
else:
|
||||
op_num = op
|
||||
|
||||
offset = i
|
||||
if linestarts is not None:
|
||||
starts_line = linestarts.get(i, None)
|
||||
if starts_line is not None:
|
||||
starts_line += line_offset
|
||||
is_jump_target = i in labels
|
||||
i = i+1
|
||||
arg = None
|
||||
argval = None
|
||||
argrepr = ''
|
||||
if op >= HAVE_ARGUMENT:
|
||||
if isinstance(code[i], str):
|
||||
arg = op_num + ord(code[i+1])*256 + extended_arg
|
||||
else:
|
||||
arg = code[i] + code[i+1]*256 + extended_arg
|
||||
extended_arg = 0
|
||||
i = i+2
|
||||
if op == EXTENDED_ARG:
|
||||
extended_arg = arg*65536
|
||||
# Set argval to the dereferenced value of the argument when
|
||||
# availabe, and argrepr to the string representation of argval.
|
||||
# _disassemble_bytes needs the string repr of the
|
||||
# raw name index for LOAD_GLOBAL, LOAD_CONST, etc.
|
||||
argval = arg
|
||||
if op in hasconst:
|
||||
argval, argrepr = _get_const_info(arg, constants)
|
||||
elif op in hasname:
|
||||
argval, argrepr = _get_name_info(arg, names)
|
||||
elif op in hasjrel:
|
||||
argval = i + arg
|
||||
argrepr = "to " + repr(argval)
|
||||
elif op in haslocal:
|
||||
argval, argrepr = _get_name_info(arg, varnames)
|
||||
elif op in hascompare:
|
||||
argval = cmp_op[arg]
|
||||
argrepr = argval
|
||||
elif op in hasfree:
|
||||
argval, argrepr = _get_name_info(arg, cells)
|
||||
elif op in hasnargs:
|
||||
argrepr = "%d positional, %d keyword pair" % (code[i-2], code[i-1])
|
||||
yield Instruction(opname[op_num], op,
|
||||
arg, argval, argrepr,
|
||||
offset, starts_line, is_jump_target)
|
||||
|
||||
def findlabels(code):
|
||||
"""Detect all offsets in a byte code which are jump targets.
|
||||
|
||||
Return the list of offsets.
|
||||
|
||||
"""
|
||||
labels = []
|
||||
# enumerate() is not an option, since we sometimes process
|
||||
# multiple elements on a single pass through the loop
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
op = code[i]
|
||||
i = i+1
|
||||
if op >= HAVE_ARGUMENT:
|
||||
arg = code[i] + code[i+1]*256
|
||||
i = i+2
|
||||
label = -1
|
||||
if op in hasjrel:
|
||||
label = i+arg
|
||||
elif op in hasjabs:
|
||||
label = arg
|
||||
if label >= 0:
|
||||
if label not in labels:
|
||||
labels.append(label)
|
||||
return labels
|
||||
|
||||
class Bytecode:
|
||||
"""The bytecode operations of a piece of code
|
||||
|
||||
Instantiate this with a function, method, string of code, or a code object
|
||||
(as returned by compile()).
|
||||
|
||||
Iterating over this yields the bytecode operations as Instruction instances.
|
||||
"""
|
||||
def __init__(self, x, first_line=None, current_offset=None):
|
||||
self.codeobj = co = _get_code_object(x)
|
||||
if first_line is None:
|
||||
self.first_line = co.co_firstlineno
|
||||
self._line_offset = 0
|
||||
else:
|
||||
self.first_line = first_line
|
||||
self._line_offset = first_line - co.co_firstlineno
|
||||
self._cell_names = co.co_cellvars + co.co_freevars
|
||||
self._linestarts = dict(findlinestarts(co))
|
||||
self._original_object = x
|
||||
self.current_offset = current_offset
|
||||
|
||||
def __iter__(self):
|
||||
co = self.codeobj
|
||||
return _get_instructions_bytes(co.co_code, co.co_varnames, co.co_names,
|
||||
co.co_consts, self._cell_names,
|
||||
self._linestarts,
|
||||
line_offset=self._line_offset)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({!r})".format(self.__class__.__name__,
|
||||
self._original_object)
|
||||
|
||||
@classmethod
|
||||
def from_traceback(cls, tb):
|
||||
""" Construct a Bytecode from the given traceback """
|
||||
while tb.tb_next:
|
||||
tb = tb.tb_next
|
||||
return cls(tb.tb_frame.f_code, current_offset=tb.tb_lasti)
|
||||
|
||||
def info(self):
|
||||
"""Return formatted information about the code object."""
|
||||
return _format_code_info(self.codeobj)
|
||||
|
||||
def dis(self):
|
||||
"""Return a formatted view of the bytecode operations."""
|
||||
co = self.codeobj
|
||||
if self.current_offset is not None:
|
||||
offset = self.current_offset
|
||||
else:
|
||||
offset = -1
|
||||
with io.StringIO() as output:
|
||||
_disassemble_bytes(co.co_code, varnames=co.co_varnames,
|
||||
names=co.co_names, constants=co.co_consts,
|
||||
cells=self._cell_names,
|
||||
linestarts=self._linestarts,
|
||||
line_offset=self._line_offset,
|
||||
file=output,
|
||||
lasti=offset)
|
||||
return output.getvalue()
|
@@ -214,12 +214,6 @@ class Scanner27(scan.Scanner):
|
||||
rv.append(Token(replace[offset], oparg, pattr, offset, linestart))
|
||||
return rv, customize
|
||||
|
||||
def op_size(self, op):
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
else:
|
||||
return 3
|
||||
|
||||
def build_stmt_indices(self):
|
||||
code = self.code
|
||||
start = 0
|
||||
@@ -300,15 +294,6 @@ class Scanner27(scan.Scanner):
|
||||
i = s
|
||||
slist += [end] * (end-len(slist))
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
filtered = []
|
||||
for i in ifs:
|
||||
if self.lines[i].l_no == self.lines[i+3].l_no:
|
||||
if self.code[self.prev[self.lines[i].next]] in (PJIT, PJIF):
|
||||
continue
|
||||
filtered.append(i)
|
||||
return filtered
|
||||
|
||||
def next_except_jump(self, start):
|
||||
'''
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
|
@@ -555,6 +555,33 @@ class Scanner3(scan.Scanner):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
def rem_or(self, start, end, instr, target=None, include_beyond_target=False):
|
||||
"""
|
||||
Find offsets of all requested <instr> between <start> and <end>,
|
||||
|
@@ -298,33 +298,6 @@ class Scanner34(scan3.Scanner3):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner34(3.4).disassemble(co)
|
||||
|
@@ -10,16 +10,14 @@ for later use in deparsing.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import dis, inspect
|
||||
import inspect
|
||||
from array import array
|
||||
import uncompyle6.scanners.scanner3 as scan3
|
||||
import uncompyle6.scanners.dis35 as dis35
|
||||
|
||||
from uncompyle6.code import iscode
|
||||
from uncompyle6.scanner import Token
|
||||
|
||||
# Get all the opcodes into globals
|
||||
globals().update(dis.opmap)
|
||||
|
||||
import uncompyle6.opcodes.opcode_35
|
||||
# verify uses JUMP_OPs from here
|
||||
JUMP_OPs = uncompyle6.opcodes.opcode_35.JUMP_OPs
|
||||
@@ -42,7 +40,7 @@ class Scanner35(scan3.Scanner3):
|
||||
# Get jump targets
|
||||
# Format: {target offset: [jump offsets]}
|
||||
jump_targets = self.find_jump_targets()
|
||||
bytecode = dis.Bytecode(co)
|
||||
bytecode = dis35.Bytecode(co)
|
||||
|
||||
# self.lines contains (block,addrLastInstr)
|
||||
if classname:
|
||||
@@ -127,7 +125,7 @@ class Scanner35(scan3.Scanner3):
|
||||
if target < inst.offset:
|
||||
if (inst.offset in self.stmts and
|
||||
self.code[inst.offset+3] not in (END_FINALLY, POP_BLOCK)
|
||||
and offset not in self.not_continue):
|
||||
and inst.offset not in self.not_continue):
|
||||
opname = 'CONTINUE'
|
||||
else:
|
||||
opname = 'JUMP_BACK'
|
||||
@@ -288,33 +286,6 @@ class Scanner35(scan3.Scanner3):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner35(3.5).disassemble(co)
|
||||
|
@@ -973,13 +973,29 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
else:
|
||||
raise RuntimeError('Internal Error: n_build_list expects list or tuple')
|
||||
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
line_separator = ', '
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
for elem in node:
|
||||
|
||||
# FIXME:
|
||||
# if flat_elems > some_number, then group
|
||||
# do automatic wrapping
|
||||
for elem in flat_elems:
|
||||
if (elem == 'ROT_THREE'):
|
||||
continue
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015 by Rocky Bernstein
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -1195,33 +1195,49 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
lastnode = node.pop().type
|
||||
if lastnode.startswith('BUILD_LIST'):
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.type
|
||||
if lastnodetype.startswith('BUILD_LIST'):
|
||||
self.write('['); endchar = ']'
|
||||
elif lastnode.startswith('BUILD_TUPLE'):
|
||||
elif lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write('('); endchar = ')'
|
||||
elif lastnode.startswith('BUILD_SET'):
|
||||
elif lastnodetype.startswith('BUILD_SET'):
|
||||
self.write('{'); endchar = '}'
|
||||
elif lastnode.startswith('ROT_TWO'):
|
||||
elif lastnodetype.startswith('ROT_TWO'):
|
||||
self.write('('); endchar = ')'
|
||||
else:
|
||||
raise 'Internal Error: n_build_list expects list or tuple'
|
||||
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
if lastnode.attr > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
line_separator = ', '
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
for elem in node:
|
||||
if (elem == 'ROT_THREE'):
|
||||
continue
|
||||
|
||||
# FIXME:
|
||||
# if flat_elems > some_number, then group
|
||||
# do automatic wrapping
|
||||
for elem in flat_elems:
|
||||
if elem == 'ROT_THREE':
|
||||
continue
|
||||
assert elem == 'expr'
|
||||
value = self.traverse(elem)
|
||||
self.write(sep, value)
|
||||
sep = line_separator
|
||||
if len(node) == 1 and lastnode.startswith('BUILD_TUPLE'):
|
||||
if lastnode.attr == 1 and lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write(',')
|
||||
self.write(endchar)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
@@ -1542,8 +1558,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
if ast[0][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[0]
|
||||
try:
|
||||
if ast[0][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[0]
|
||||
elif ast[1][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[1]
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
qualname = '.'.join(self.classes)
|
||||
@@ -1563,9 +1584,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if (ast[0][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 0
|
||||
do_doc = True
|
||||
elif (len(ast) > 2 and 3.0 <= self.version <= 3.2 and
|
||||
ast[2][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 2
|
||||
elif (len(ast) > 1 and 3.0 <= self.version <= 3.2 and
|
||||
ast[1][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 1
|
||||
do_doc = True
|
||||
if do_doc and self.hide_internal:
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.3.2'
|
||||
VERSION='2.3.4'
|
||||
|
Reference in New Issue
Block a user