You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Python3: remove "return None" at end of main for uncompyle. Fix up verify for Python3. First automated Python 3.4 tests via "makecheck-3.4" in test directory.
This commit is contained in:
@@ -248,7 +248,7 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
try:
|
||||
uncompyle_file(infile, outstream, showasm, showast)
|
||||
tot_files += 1
|
||||
except FileNotFoundError as e:
|
||||
except ValueError as e:
|
||||
sys.stderr.write("\n# %s" % e)
|
||||
failed_files += 1
|
||||
except KeyboardInterrupt:
|
||||
|
@@ -561,9 +561,17 @@ class Traverser(walker.Walker, object):
|
||||
print(repr(ast))
|
||||
return ast
|
||||
|
||||
# The bytecode for the end of the main routine has a
|
||||
# "return None". However you can't issue a "return" statement in
|
||||
# main. In the other build_ast routine we eliminate the
|
||||
# return statement instructions before parsing.
|
||||
# But here we want to keep these instructions at the expense of
|
||||
# a fully runnable Python program because we
|
||||
# my be queried about the role of one of those instructuions
|
||||
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1] == Token('RETURN_VALUE'):
|
||||
if tokens[-2] != Token('LOAD_CONST'):
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-2].type != 'LOAD_CONST':
|
||||
tokens.append(Token('RETURN_LAST'))
|
||||
if len(tokens) == 0:
|
||||
return
|
||||
|
@@ -28,8 +28,8 @@ def check_object_path(path):
|
||||
path = importlib.util.cache_from_source(path)
|
||||
return path
|
||||
if not path.endswith(".pyc") and not path.endswith(".pyo"):
|
||||
raise FileNotFoundError("path %s must point to a .py or .pyc file" %
|
||||
path)
|
||||
raise ValueError("path %s must point to a .py or .pyc file" %
|
||||
path)
|
||||
return path
|
||||
|
||||
def disco(version, co, out=None):
|
||||
@@ -134,7 +134,7 @@ def disassemble_files(in_base, out_base, files, outfile=None):
|
||||
else: # uncompyle successfull
|
||||
if outfile:
|
||||
outstream.close()
|
||||
if not outfile: print('\n# okay decompyling', infile)
|
||||
if not outfile: print('\n# okay disassembling', infile)
|
||||
sys.stdout.flush()
|
||||
|
||||
if outfile:
|
||||
|
@@ -42,6 +42,10 @@ def jabs_op(name, op):
|
||||
def_op(name, op)
|
||||
hasjabs.append(op)
|
||||
|
||||
def updateGlobal():
|
||||
# JUMP_OPs are used in verification
|
||||
globals().update({'JUMP_OPs': map(lambda op: opname[op], hasjrel + hasjabs)})
|
||||
|
||||
# Instruction opcodes for compiled code
|
||||
# Blank lines correspond to available opcodes
|
||||
|
||||
@@ -189,4 +193,5 @@ hasfree.append(148)
|
||||
def_op('EXTENDED_ARG', 144)
|
||||
EXTENDED_ARG = 144
|
||||
|
||||
updateGlobal()
|
||||
del def_op, name_op, jrel_op, jabs_op
|
||||
|
@@ -44,6 +44,8 @@ class _State:
|
||||
self.T, self.complete, self.items = [], [], items
|
||||
self.stateno = stateno
|
||||
|
||||
# DEFAULT_DEBUG = {'rules': True, 'transition': False}
|
||||
DEFAULT_DEBUG = {'rules': False, 'transition': False}
|
||||
class GenericParser:
|
||||
'''
|
||||
An Earley parser, as per J. Earley, "An Efficient Context-Free
|
||||
@@ -56,7 +58,7 @@ class GenericParser:
|
||||
Parsing", unpublished paper, 2001.
|
||||
'''
|
||||
|
||||
def __init__(self, start, debug=False):
|
||||
def __init__(self, start, debug=DEFAULT_DEBUG):
|
||||
self.rules = {}
|
||||
self.rule2func = {}
|
||||
self.rule2name = {}
|
||||
@@ -401,10 +403,11 @@ class GenericParser:
|
||||
return rv
|
||||
|
||||
def gotoT(self, state, t):
|
||||
if self.debug: print("Terminal", t)
|
||||
if self.debug['rules']: print("Terminal", t, state)
|
||||
return [self.goto(state, t)]
|
||||
|
||||
def gotoST(self, state, st):
|
||||
if self.debug['transition']: print("GotoST", st, state)
|
||||
rv = []
|
||||
for t in self.states[state].T:
|
||||
if st == t:
|
||||
@@ -567,7 +570,7 @@ class GenericParser:
|
||||
return self.rule2func[self.new2old[rule]](attr)
|
||||
|
||||
def buildTree(self, nt, item, tokens, k):
|
||||
if self.debug:
|
||||
if self.debug['rules']:
|
||||
print("NT", nt)
|
||||
state, parent = item
|
||||
|
||||
|
@@ -17,7 +17,7 @@ import dis, inspect
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
from uncompyle6.opcodes.opcode_27 import *
|
||||
from uncompyle6.opcodes.opcode_27 import * # NOQA
|
||||
import uncompyle6.scanner as scan
|
||||
|
||||
class Scanner27(scan.Scanner):
|
||||
@@ -40,7 +40,7 @@ class Scanner27(scan.Scanner):
|
||||
self.code = array('B', co.co_code[:n])
|
||||
|
||||
self.prev = [0]
|
||||
# mapping adresses of instru & arg
|
||||
# mapping addresses of instruction & argument
|
||||
for i in self.op_range(0, n):
|
||||
op = self.code[i]
|
||||
self.prev.append(i)
|
||||
|
@@ -21,9 +21,14 @@ from collections import namedtuple
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
from uncompyle6.scanner import Token, L65536
|
||||
|
||||
import uncompyle6.opcodes.opcode_34
|
||||
# Get all the opcodes into globals
|
||||
JUMP_OPs = uncompyle6.opcodes.opcode_34.JUMP_OPs
|
||||
globals().update(dis.opmap)
|
||||
|
||||
from uncompyle6.opcodes.opcode_34 import *
|
||||
|
||||
|
||||
import uncompyle6.scanner as scan
|
||||
|
||||
|
||||
|
@@ -14,13 +14,14 @@ import uncompyle6.scanner as scanner
|
||||
# FIXME: DRY
|
||||
if (sys.version_info >= (3, 0)):
|
||||
truediv = operator.truediv
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
from functools import reduce
|
||||
else:
|
||||
truediv = operator.div
|
||||
|
||||
|
||||
def code_equal(a, b):
|
||||
return a.co_code == b.co_code
|
||||
|
||||
BIN_OP_FUNCS = {
|
||||
'BINARY_POWER': operator.pow,
|
||||
'BINARY_MULTIPLY': operator.mul,
|
||||
@@ -151,7 +152,7 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
||||
name = '%s.%s' % (name, code_obj1.co_name)
|
||||
if name == '.?': name = '__main__'
|
||||
|
||||
if isinstance(code_obj1, object) and cmp(code_obj1, code_obj2):
|
||||
if isinstance(code_obj1, object) and code_equal(code_obj1, code_obj2):
|
||||
# use the new style code-classes' __cmp__ method, which
|
||||
# should be faster and more sophisticated
|
||||
# if this compare fails, we use the old routine to
|
||||
@@ -186,9 +187,9 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
||||
elif version == 3.4:
|
||||
import uncompyle6.scanners.scanner34 as scan
|
||||
scanner = scan.Scanner34()
|
||||
scanner.setShowAsm( showasm=False )
|
||||
|
||||
global JUMP_OPs
|
||||
JUMP_OPs = scan.JUMP_OPs + ['JUMP_BACK']
|
||||
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
||||
|
||||
# use changed Token class
|
||||
# we (re)set this here to save exception handling,
|
||||
@@ -227,7 +228,7 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
||||
raise CmpErrorCode(name, tokens1[idx1].offset, tokens1[idx1],
|
||||
tokens2[idx2], tokens1, tokens2)
|
||||
|
||||
if tokens1[i1] != tokens2[i2]:
|
||||
if tokens1[i1].type != tokens2[i2].type:
|
||||
if tokens1[i1].type == 'LOAD_CONST' == tokens2[i2].type:
|
||||
i = 1
|
||||
while tokens1[i1+i].type == 'LOAD_CONST':
|
||||
|
@@ -1406,9 +1406,13 @@ class Walker(GenericASTTraversal, object):
|
||||
self.print_(repr(ast))
|
||||
return ast
|
||||
|
||||
# The bytecode for the end of the main routine has a
|
||||
# "return None". However you can't issue a "return" statement in
|
||||
# main. So as the old cigarette slogan goes: I'd rather switch (the token stream)
|
||||
# than fight (with the grammar to not emit "return None").
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1] == Token('RETURN_VALUE'):
|
||||
if tokens[-2] == Token('LOAD_CONST'):
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-2].type == 'LOAD_CONST':
|
||||
del tokens[-2:]
|
||||
else:
|
||||
tokens.append(Token('RETURN_LAST'))
|
||||
|
Reference in New Issue
Block a user