You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Fix * in multi-target assignment
This commit is contained in:
BIN
test/bytecode_3.4/04_multi_assign.pyc
Normal file
BIN
test/bytecode_3.4/04_multi_assign.pyc
Normal file
Binary file not shown.
3
test/simple_source/stmts/04_multi_assign.py
Normal file
3
test/simple_source/stmts/04_multi_assign.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Bug from Python 3.4 ftplib
|
||||||
|
cls_or_self, *rest = args
|
||||||
|
a, *b, c = args
|
@@ -445,10 +445,6 @@ class Python3Parser(PythonParser):
|
|||||||
setcomp ::= {expr}^n LOAD_SETCOMP MAKE_CLOSURE
|
setcomp ::= {expr}^n LOAD_SETCOMP MAKE_CLOSURE
|
||||||
GET_ITER CALL_FUNCTION_1
|
GET_ITER CALL_FUNCTION_1
|
||||||
|
|
||||||
dictcomp ::= LOAD_DICTCOMP MAKE_FUNCTION_0 expr
|
|
||||||
GET_ITER CALL_FUNCTION_1
|
|
||||||
|
|
||||||
|
|
||||||
# build_class (see load_build_class)
|
# build_class (see load_build_class)
|
||||||
|
|
||||||
build_list ::= {expr}^n BUILD_LIST_n
|
build_list ::= {expr}^n BUILD_LIST_n
|
||||||
@@ -456,9 +452,10 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
load_closure ::= {LOAD_CLOSURE}^n BUILD_TUPLE_n
|
load_closure ::= {LOAD_CLOSURE}^n BUILD_TUPLE_n
|
||||||
|
|
||||||
unpack_list ::= UNPACK_LIST {expr}^n
|
unpack_list ::= UNPACK_LIST_n {expr}^n
|
||||||
unpack ::= UNPACK_TUPLE {expr}^n
|
unpack ::= UNPACK_TUPLE_n {expr}^n
|
||||||
unpack ::= UNPACK_SEQEUENCE {expr}^n
|
unpack ::= UNPACK_SEQEUENCE_n {expr}^n
|
||||||
|
unpack_ex ::= UNPACK_EX_b_a {expr}^(a+b)
|
||||||
|
|
||||||
mkfunc ::= {pos_arg}^n LOAD_CONST MAKE_FUNCTION_n
|
mkfunc ::= {pos_arg}^n LOAD_CONST MAKE_FUNCTION_n
|
||||||
mklambda ::= {pos_arg}^n LOAD_LAMBDA MAKE_FUNCTION_n
|
mklambda ::= {pos_arg}^n LOAD_LAMBDA MAKE_FUNCTION_n
|
||||||
@@ -524,6 +521,10 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
rule = "mapexpr ::= %s %s" % (opname, kvlist_n)
|
rule = "mapexpr ::= %s %s" % (opname, kvlist_n)
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
elif opname_base in ('UNPACK_EX'):
|
||||||
|
before_count, after_count = token.attr
|
||||||
|
rule = 'unpack ::= ' + opname + ' designator' * (before_count + after_count + 1)
|
||||||
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||||
rule = 'unpack ::= ' + opname + ' designator' * token.attr
|
rule = 'unpack ::= ' + opname + ' designator' * token.attr
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
@@ -91,6 +91,7 @@ class Scanner3(scan.Scanner):
|
|||||||
|
|
||||||
for inst in bytecode:
|
for inst in bytecode:
|
||||||
|
|
||||||
|
argval = inst.argval
|
||||||
if inst.offset in jump_targets:
|
if inst.offset in jump_targets:
|
||||||
jump_idx = 0
|
jump_idx = 0
|
||||||
for jump_offset in jump_targets[inst.offset]:
|
for jump_offset in jump_targets[inst.offset]:
|
||||||
@@ -153,6 +154,14 @@ class Scanner3(scan.Scanner):
|
|||||||
):
|
):
|
||||||
pos_args = inst.argval
|
pos_args = inst.argval
|
||||||
opname = '%s_%d' % (opname, pos_args)
|
opname = '%s_%d' % (opname, pos_args)
|
||||||
|
elif opname == 'UNPACK_EX':
|
||||||
|
# FIXME: try with scanner and parser by
|
||||||
|
# changing inst.argval
|
||||||
|
before_args = inst.argval & 0xFF
|
||||||
|
after_args = (inst.argval >> 8) & 0xff
|
||||||
|
pattr = "%d before vararg, %d after" % (before_args, after_args)
|
||||||
|
argval = (before_args, after_args)
|
||||||
|
opname = '%s_%d+%d' % (opname, before_args, after_args)
|
||||||
elif opname == 'JUMP_ABSOLUTE':
|
elif opname == 'JUMP_ABSOLUTE':
|
||||||
pattr = inst.argval
|
pattr = inst.argval
|
||||||
target = self.get_target(inst.offset)
|
target = self.get_target(inst.offset)
|
||||||
@@ -171,7 +180,7 @@ class Scanner3(scan.Scanner):
|
|||||||
tokens.append(
|
tokens.append(
|
||||||
Token(
|
Token(
|
||||||
type_ = opname,
|
type_ = opname,
|
||||||
attr = inst.argval,
|
attr = argval,
|
||||||
pattr = pattr,
|
pattr = pattr,
|
||||||
offset = inst.offset,
|
offset = inst.offset,
|
||||||
linestart = inst.starts_line,
|
linestart = inst.starts_line,
|
||||||
@@ -308,7 +317,7 @@ class Scanner3(scan.Scanner):
|
|||||||
|
|
||||||
designator_ops = set([
|
designator_ops = set([
|
||||||
STORE_FAST, STORE_NAME, STORE_GLOBAL, STORE_DEREF, STORE_ATTR,
|
STORE_FAST, STORE_NAME, STORE_GLOBAL, STORE_DEREF, STORE_ATTR,
|
||||||
STORE_SUBSCR, UNPACK_SEQUENCE, JUMP_ABSOLUTE
|
STORE_SUBSCR, UNPACK_SEQUENCE, JUMP_ABSOLUTE, UNPACK_EX
|
||||||
])
|
])
|
||||||
|
|
||||||
# Compose preliminary list of indices with statements,
|
# Compose preliminary list of indices with statements,
|
||||||
|
@@ -1432,6 +1432,20 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.prune()
|
self.prune()
|
||||||
|
|
||||||
def n_unpack(self, node):
|
def n_unpack(self, node):
|
||||||
|
if node[0].type.startswith('UNPACK_EX'):
|
||||||
|
# Python 3+
|
||||||
|
before_count, after_count = node[0].attr
|
||||||
|
for i in range(before_count+1):
|
||||||
|
self.preorder(node[i])
|
||||||
|
if i != 0:
|
||||||
|
self.write(', ')
|
||||||
|
self.write('*')
|
||||||
|
for i in range(1, after_count+2):
|
||||||
|
self.preorder(node[before_count+i])
|
||||||
|
if i != after_count + 1:
|
||||||
|
self.write(', ')
|
||||||
|
self.prune()
|
||||||
|
return
|
||||||
for n in node[1:]:
|
for n in node[1:]:
|
||||||
if n[0].type == 'unpack':
|
if n[0].type == 'unpack':
|
||||||
n[0].type = 'unpack_w_parens'
|
n[0].type = 'unpack_w_parens'
|
||||||
|
Reference in New Issue
Block a user