You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 09:22:40 +08:00
Merge branch 'master' into python-2.4
This commit is contained in:
89
.circleci/config.yml
Normal file
89
.circleci/config.yml
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
# This configuration was automatically generated from a CircleCI 1.0 config.
|
||||||
|
# It should include any build commands you had along with commands that CircleCI
|
||||||
|
# inferred from your project structure. We strongly recommend you read all the
|
||||||
|
# comments in this file to understand the structure of CircleCI 2.0, as the idiom
|
||||||
|
# for configuration has changed substantially in 2.0 to allow arbitrary jobs rather
|
||||||
|
# than the prescribed lifecycle of 1.0. In general, we recommend using this generated
|
||||||
|
# configuration as a reference rather than using it in production, though in most
|
||||||
|
# cases it should duplicate the execution of your original 1.0 config.
|
||||||
|
version: 2
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
working_directory: ~/rocky/python-uncompyle6
|
||||||
|
parallelism: 1
|
||||||
|
shell: /bin/bash --login
|
||||||
|
# CircleCI 2.0 does not support environment variables that refer to each other the same way as 1.0 did.
|
||||||
|
# If any of these refer to each other, rewrite them so that they don't or see https://circleci.com/docs/2.0/env-vars/#interpolating-environment-variables-to-set-other-environment-variables .
|
||||||
|
environment:
|
||||||
|
CIRCLE_ARTIFACTS: /tmp/circleci-artifacts
|
||||||
|
CIRCLE_TEST_REPORTS: /tmp/circleci-test-results
|
||||||
|
COMPILE: --compile
|
||||||
|
# In CircleCI 1.0 we used a pre-configured image with a large number of languages and other packages.
|
||||||
|
# In CircleCI 2.0 you can now specify your own image, or use one of our pre-configured images.
|
||||||
|
# The following configuration line tells CircleCI to use the specified docker image as the runtime environment for you job.
|
||||||
|
# We have selected a pre-built image that mirrors the build environment we use on
|
||||||
|
# the 1.0 platform, but we recommend you choose an image more tailored to the needs
|
||||||
|
# of each job. For more information on choosing an image (or alternatively using a
|
||||||
|
# VM instead of a container) see https://circleci.com/docs/2.0/executor-types/
|
||||||
|
# To see the list of pre-built images that CircleCI provides for most common languages see
|
||||||
|
# https://circleci.com/docs/2.0/circleci-images/
|
||||||
|
docker:
|
||||||
|
- image: circleci/build-image:ubuntu-14.04-XXL-upstart-1189-5614f37
|
||||||
|
command: /sbin/init
|
||||||
|
steps:
|
||||||
|
# Machine Setup
|
||||||
|
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
|
||||||
|
# The following `checkout` command checks out your code to your working directory. In 1.0 we did this implicitly. In 2.0 you can choose where in the course of a job your code should be checked out.
|
||||||
|
- checkout
|
||||||
|
# Prepare for artifact and test results collection equivalent to how it was done on 1.0.
|
||||||
|
# In many cases you can simplify this from what is generated here.
|
||||||
|
# 'See docs on artifact collection here https://circleci.com/docs/2.0/artifacts/'
|
||||||
|
- run: mkdir -p $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS
|
||||||
|
# This is based on your 1.0 configuration file or project settings
|
||||||
|
- run:
|
||||||
|
working_directory: ~/rocky/python-uncompyle6
|
||||||
|
command: pyenv install 2.7.13 && pyenv local 2.7.13 && pyenv rehash && pip install virtualenv && pip install nose && pip install pep8 && pyenv rehash
|
||||||
|
# Dependencies
|
||||||
|
# This would typically go in either a build or a build-and-test job when using workflows
|
||||||
|
# Restore the dependency cache
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
# This branch if available
|
||||||
|
- v1-dep-{{ .Branch }}-
|
||||||
|
# Default branch if not
|
||||||
|
- v1-dep-master-
|
||||||
|
# Any branch if there are none on the default branch - this should be unnecessary if you have your default branch configured correctly
|
||||||
|
- v1-dep-
|
||||||
|
# This is based on your 1.0 configuration file or project settings
|
||||||
|
- run: pip install --upgrade setuptools
|
||||||
|
- run: pip install -e .
|
||||||
|
- run: pip install pytest==3.2.5 hypothesis
|
||||||
|
# Save dependency cache
|
||||||
|
- save_cache:
|
||||||
|
key: v1-dep-{{ .Branch }}-{{ epoch }}
|
||||||
|
paths:
|
||||||
|
# This is a broad list of cache paths to include many possible development environments
|
||||||
|
# You can probably delete some of these entries
|
||||||
|
- vendor/bundle
|
||||||
|
- ~/virtualenvs
|
||||||
|
- ~/.m2
|
||||||
|
- ~/.ivy2
|
||||||
|
- ~/.bundle
|
||||||
|
- ~/.go_workspace
|
||||||
|
- ~/.gradle
|
||||||
|
- ~/.cache/bower
|
||||||
|
# Test
|
||||||
|
# This would typically be a build job when using workflows, possibly combined with build
|
||||||
|
# This is based on your 1.0 configuration file or project settings
|
||||||
|
- run: python ./setup.py develop && make check-2.7
|
||||||
|
- run: cd ./test/stdlib && pyenv local 2.7.13 && bash ./runtests.sh 'test_[p-z]*.py'
|
||||||
|
# Teardown
|
||||||
|
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
|
||||||
|
# Save test results
|
||||||
|
- store_test_results:
|
||||||
|
path: /tmp/circleci-test-results
|
||||||
|
# Save artifacts
|
||||||
|
- store_artifacts:
|
||||||
|
path: /tmp/circleci-artifacts
|
||||||
|
- store_artifacts:
|
||||||
|
path: /tmp/circleci-test-results
|
BIN
test/bytecode_3.0_run/06_listcomp.pyc
Normal file
BIN
test/bytecode_3.0_run/06_listcomp.pyc
Normal file
Binary file not shown.
24
test/simple_source/bug30/06_listcomp.py
Normal file
24
test/simple_source/bug30/06_listcomp.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Python 3.0 comprehensions can produce different code from
|
||||||
|
# all other Python versions. Thanks, Python!
|
||||||
|
|
||||||
|
# This code is RUNNABLE!
|
||||||
|
|
||||||
|
# Adapted from 3.0 ast.py; uses comprehension implemented via CLOSURE
|
||||||
|
def _format(node):
|
||||||
|
return [(a, int(b)) for a, b in node.items()]
|
||||||
|
|
||||||
|
x = {'a': '1', 'b': '2'}
|
||||||
|
assert [('a', 1), ('b', 2)] == _format(x)
|
||||||
|
|
||||||
|
# Adapted from 3.0 cmd.py; ises "if" comprehension
|
||||||
|
def monthrange(ary, dotext):
|
||||||
|
return [a[3:] for a in ary if a.startswith(dotext)]
|
||||||
|
|
||||||
|
ary = ["Monday", "Twoday", "Monmonth"]
|
||||||
|
assert ['day', 'month'] == monthrange(ary, "Mon")
|
||||||
|
|
||||||
|
# From 3.0 cmd.py; uses "if not" comprehension
|
||||||
|
def columnize(l):
|
||||||
|
return [i for i in range(len(l))
|
||||||
|
if not isinstance(l[i], str)]
|
||||||
|
assert [0, 2] == columnize([1, 'a', 2])
|
17
test/simple_source/bug31/06_listcomp.py
Normal file
17
test/simple_source/bug31/06_listcomp.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Python 3.0 comprehensions can produce different code from
|
||||||
|
# all other Python versions. Thanks, Python!
|
||||||
|
|
||||||
|
# Adapted from 3.0 ast.py
|
||||||
|
# This code is RUNNABLE!
|
||||||
|
def _format(node):
|
||||||
|
return [(a, int(b)) for a, b in node.items()]
|
||||||
|
|
||||||
|
# Adapted from 3.0 cmd.py
|
||||||
|
def monthrange(ary, dotext):
|
||||||
|
return [a[3:] for a in ary if a.startswith(dotext)]
|
||||||
|
|
||||||
|
x = {'a': '1', 'b': '2'}
|
||||||
|
assert [('a', 1), ('b', 2)] == _format(x)
|
||||||
|
|
||||||
|
ary = ["Monday", "Twoday", "Monmonth"]
|
||||||
|
assert ['day', 'month'] == monthrange(ary, "Mon")
|
@@ -110,8 +110,8 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
|
|||||||
files = [file for file in files if not 'site-packages' in file]
|
files = [file for file in files if not 'site-packages' in file]
|
||||||
files = [file for file in files if not 'test' in file]
|
files = [file for file in files if not 'test' in file]
|
||||||
if len(files) > max_files:
|
if len(files) > max_files:
|
||||||
# print("Numer of files %d - truncating to last 200" % len(files))
|
# print("Number of files %d - truncating to last 200" % len(files))
|
||||||
print("Numer of files %d - truncating to first %s" %
|
print("Number of files %d - truncating to first %s" %
|
||||||
(len(files), max_files))
|
(len(files), max_files))
|
||||||
files = files[:max_files]
|
files = files[:max_files]
|
||||||
|
|
||||||
|
@@ -32,7 +32,13 @@ class Python30Parser(Python31Parser):
|
|||||||
# Used to keep index order the same in semantic actions
|
# Used to keep index order the same in semantic actions
|
||||||
jb_pop_top ::= JUMP_BACK POP_TOP
|
jb_pop_top ::= JUMP_BACK POP_TOP
|
||||||
|
|
||||||
while1stmt ::= SETUP_LOOP l_stmts COME_FROM_LOOP
|
while1stmt ::= SETUP_LOOP l_stmts COME_FROM_LOOP
|
||||||
|
whileelsestmt ::= SETUP_LOOP testexpr l_stmts
|
||||||
|
jb_pop_top POP_BLOCK
|
||||||
|
else_suitel COME_FROM_LOOP
|
||||||
|
# while1elsestmt ::= SETUP_LOOP l_stmts
|
||||||
|
# jb_pop_top POP_BLOCK
|
||||||
|
# else_suitel COME_FROM_LOOP
|
||||||
|
|
||||||
else_suitel ::= l_stmts COME_FROM_LOOP JUMP_BACK
|
else_suitel ::= l_stmts COME_FROM_LOOP JUMP_BACK
|
||||||
|
|
||||||
@@ -52,14 +58,27 @@ class Python30Parser(Python31Parser):
|
|||||||
LOAD_FAST FOR_ITER store comp_iter
|
LOAD_FAST FOR_ITER store comp_iter
|
||||||
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
|
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||||
|
|
||||||
list_comp_header ::= BUILD_LIST_0 DUP_TOP STORE_FAST
|
list_comp_header ::= BUILD_LIST_0 DUP_TOP STORE_FAST
|
||||||
list_comp ::= list_comp_header
|
list_comp ::= list_comp_header
|
||||||
LOAD_FAST FOR_ITER store comp_iter
|
LOAD_FAST FOR_ITER store comp_iter
|
||||||
JUMP_BACK
|
JUMP_BACK
|
||||||
|
|
||||||
|
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
|
||||||
|
set_comp ::= set_comp_header
|
||||||
|
LOAD_FAST FOR_ITER store comp_iter
|
||||||
|
JUMP_BACK
|
||||||
|
|
||||||
|
dict_comp_header ::= BUILD_MAP_0 DUP_TOP STORE_FAST
|
||||||
|
dict_comp ::= dict_comp_header
|
||||||
|
LOAD_FAST FOR_ITER store dict_comp_iter
|
||||||
|
JUMP_BACK
|
||||||
|
|
||||||
|
dict_comp_iter ::= expr expr ROT_TWO expr STORE_SUBSCR
|
||||||
|
|
||||||
# JUMP_IF_TRUE POP_TOP as a replacement
|
# JUMP_IF_TRUE POP_TOP as a replacement
|
||||||
comp_if ::= expr jmp_false comp_iter
|
comp_if ::= expr jmp_false comp_iter
|
||||||
|
comp_if ::= expr jmp_false comp_iter JUMP_BACK POP_TOP
|
||||||
|
comp_if_not ::= expr jmp_true comp_iter JUMP_BACK POP_TOP
|
||||||
comp_iter ::= expr expr SET_ADD
|
comp_iter ::= expr expr SET_ADD
|
||||||
comp_iter ::= expr expr LIST_APPEND
|
comp_iter ::= expr expr LIST_APPEND
|
||||||
|
|
||||||
@@ -70,6 +89,7 @@ class Python30Parser(Python31Parser):
|
|||||||
_jump POP_TOP
|
_jump POP_TOP
|
||||||
jump_except ::= JUMP_FORWARD POP_TOP
|
jump_except ::= JUMP_FORWARD POP_TOP
|
||||||
or ::= expr jmp_false expr jmp_true expr
|
or ::= expr jmp_false expr jmp_true expr
|
||||||
|
or ::= expr jmp_true expr
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# In many ways 3.0 is like 2.6. One similarity is there is no JUMP_IF_TRUE and
|
# In many ways 3.0 is like 2.6. One similarity is there is no JUMP_IF_TRUE and
|
||||||
@@ -88,7 +108,7 @@ class Python30Parser(Python31Parser):
|
|||||||
|
|
||||||
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP
|
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP
|
||||||
and ::= expr jmp_false expr come_from_opt
|
and ::= expr jmp_false expr come_from_opt
|
||||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt
|
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt come_from_opt
|
||||||
JUMP_BACK POP_TOP POP_BLOCK COME_FROM_LOOP
|
JUMP_BACK POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||||
whilestmt ::= SETUP_LOOP testexpr returns
|
whilestmt ::= SETUP_LOOP testexpr returns
|
||||||
POP_TOP POP_BLOCK COME_FROM_LOOP
|
POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||||
@@ -99,6 +119,7 @@ class Python30Parser(Python31Parser):
|
|||||||
jmp_false compare_chained1 _come_froms
|
jmp_false compare_chained1 _come_froms
|
||||||
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP
|
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP
|
||||||
jmp_false compare_chained2 _come_froms
|
jmp_false compare_chained2 _come_froms
|
||||||
|
compare_chained2 ::= expr COMPARE_OP RETURN_END_IF
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def customize_grammar_rules(self, tokens, customize):
|
def customize_grammar_rules(self, tokens, customize):
|
||||||
@@ -117,13 +138,14 @@ class Python30Parser(Python31Parser):
|
|||||||
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
|
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
|
||||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||||
except_suite ::= c_stmts POP_EXCEPT jump_except
|
except_suite ::= c_stmts POP_EXCEPT jump_except
|
||||||
|
whileelsestmt ::= SETUP_LOOP testexpr l_stmts JUMP_BACK POP_BLOCK
|
||||||
|
else_suitel COME_FROM_LOOP
|
||||||
|
|
||||||
# No JUMP_IF_FALSE_OR_POP
|
# No JUMP_IF_FALSE_OR_POP
|
||||||
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
compare_chained1 COME_FROM
|
compare_chained1 COME_FROM
|
||||||
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
compare_chained2 COME_FROM
|
compare_chained2 COME_FROM
|
||||||
|
|
||||||
""")
|
""")
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@@ -666,11 +666,14 @@ class Scanner3(Scanner):
|
|||||||
and code[return_val_offset1] == self.opc.RETURN_VALUE)):
|
and code[return_val_offset1] == self.opc.RETURN_VALUE)):
|
||||||
jump_back = None
|
jump_back = None
|
||||||
if not jump_back:
|
if not jump_back:
|
||||||
|
# loop suite ends in return
|
||||||
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE)
|
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE)
|
||||||
if not jump_back:
|
if not jump_back:
|
||||||
return
|
return
|
||||||
|
|
||||||
jump_back += 2 # FIXME ???
|
jb_inst = self.get_inst(jump_back)
|
||||||
|
jump_back = self.next_offset(jb_inst.opcode, jump_back)
|
||||||
|
|
||||||
if_offset = None
|
if_offset = None
|
||||||
if code[self.prev_op[next_line_byte]] not in self.pop_jump_tf:
|
if code[self.prev_op[next_line_byte]] not in self.pop_jump_tf:
|
||||||
if_offset = self.prev[next_line_byte]
|
if_offset = self.prev[next_line_byte]
|
||||||
@@ -703,18 +706,15 @@ class Scanner3(Scanner):
|
|||||||
loop_type = 'for'
|
loop_type = 'for'
|
||||||
else:
|
else:
|
||||||
loop_type = 'while'
|
loop_type = 'while'
|
||||||
if next_line_byte < len(code):
|
test = self.prev_op[next_line_byte]
|
||||||
test_inst = self.insts[self.offset2inst_index[next_line_byte]-1]
|
|
||||||
if test_inst.offset == offset:
|
if test == offset:
|
||||||
loop_type = 'while 1'
|
loop_type = 'while 1'
|
||||||
elif test_inst.opcode in self.opc.JUMP_OPs:
|
elif self.code[test] in self.opc.JUMP_OPs:
|
||||||
self.ignore_if.add(test_inst.offset)
|
self.ignore_if.add(test)
|
||||||
test_target = self.get_target(test_inst.offset)
|
test_target = self.get_target(test)
|
||||||
if test_target > (jump_back+3):
|
if test_target > (jump_back+3):
|
||||||
jump_back = test_target
|
jump_back = test_target
|
||||||
pass
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
self.not_continue.add(jump_back)
|
self.not_continue.add(jump_back)
|
||||||
self.loops.append(target)
|
self.loops.append(target)
|
||||||
self.structs.append({'type': loop_type + '-loop',
|
self.structs.append({'type': loop_type + '-loop',
|
||||||
|
@@ -8,7 +8,7 @@ scanner routine for Python 3.
|
|||||||
|
|
||||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||||
from xdis.opcodes import opcode_30 as opc
|
from xdis.opcodes import opcode_30 as opc
|
||||||
from xdis.bytecode import instruction_size, next_offset
|
from xdis.bytecode import instruction_size
|
||||||
import xdis
|
import xdis
|
||||||
|
|
||||||
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
||||||
@@ -29,12 +29,12 @@ class Scanner30(Scanner3):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
code = self.code
|
code = self.code
|
||||||
op = code[offset]
|
op = self.insts[inst_index].opcode
|
||||||
|
|
||||||
# Detect parent structure
|
# Detect parent structure
|
||||||
parent = self.structs[0]
|
parent = self.structs[0]
|
||||||
start = parent['start']
|
start = parent['start']
|
||||||
end = parent['end']
|
end = parent['end']
|
||||||
|
|
||||||
# Pick inner-most parent for our offset
|
# Pick inner-most parent for our offset
|
||||||
for struct in self.structs:
|
for struct in self.structs:
|
||||||
@@ -42,8 +42,8 @@ class Scanner30(Scanner3):
|
|||||||
current_end = struct['end']
|
current_end = struct['end']
|
||||||
if ((current_start <= offset < current_end)
|
if ((current_start <= offset < current_end)
|
||||||
and (current_start >= start and current_end <= end)):
|
and (current_start >= start and current_end <= end)):
|
||||||
start = current_start
|
start = current_start
|
||||||
end = current_end
|
end = current_end
|
||||||
parent = struct
|
parent = struct
|
||||||
|
|
||||||
if op == self.opc.SETUP_LOOP:
|
if op == self.opc.SETUP_LOOP:
|
||||||
@@ -55,7 +55,6 @@ class Scanner30(Scanner3):
|
|||||||
start += instruction_size(op, self.opc)
|
start += instruction_size(op, self.opc)
|
||||||
target = self.get_target(offset)
|
target = self.get_target(offset)
|
||||||
end = self.restrict_to_parent(target, parent)
|
end = self.restrict_to_parent(target, parent)
|
||||||
self.setup_loop_targets[offset] = target
|
|
||||||
self.setup_loops[target] = offset
|
self.setup_loops[target] = offset
|
||||||
|
|
||||||
if target != end:
|
if target != end:
|
||||||
@@ -66,7 +65,7 @@ class Scanner30(Scanner3):
|
|||||||
next_line_byte, False)
|
next_line_byte, False)
|
||||||
|
|
||||||
if jump_back:
|
if jump_back:
|
||||||
jump_forward_offset = next_offset(code[jump_back], self.opc, jump_back)
|
jump_forward_offset = xdis.next_offset(code[jump_back], self.opc, jump_back)
|
||||||
else:
|
else:
|
||||||
jump_forward_offset = None
|
jump_forward_offset = None
|
||||||
|
|
||||||
@@ -84,7 +83,9 @@ class Scanner30(Scanner3):
|
|||||||
if not jump_back:
|
if not jump_back:
|
||||||
return
|
return
|
||||||
|
|
||||||
jump_back += 2
|
jb_inst = self.get_inst(jump_back)
|
||||||
|
jump_back = self.next_offset(jb_inst.opcode, jump_back)
|
||||||
|
|
||||||
if_offset = None
|
if_offset = None
|
||||||
if code[self.prev_op[next_line_byte]] not in JUMP_TF:
|
if code[self.prev_op[next_line_byte]] not in JUMP_TF:
|
||||||
if_offset = self.prev[next_line_byte]
|
if_offset = self.prev[next_line_byte]
|
||||||
@@ -96,18 +97,22 @@ class Scanner30(Scanner3):
|
|||||||
target = next_line_byte
|
target = next_line_byte
|
||||||
end = jump_back + 3
|
end = jump_back + 3
|
||||||
else:
|
else:
|
||||||
if self.get_target(jump_back, 0) >= next_line_byte:
|
if self.get_target(jump_back) >= next_line_byte:
|
||||||
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE, start, False)
|
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE, start, False)
|
||||||
if end > jump_back+4 and self.is_jump_forward(end):
|
|
||||||
if self.is_jump_forward(jump_back+4):
|
jb_inst = self.get_inst(jump_back)
|
||||||
|
|
||||||
|
jb_next_offset = self.next_offset(jb_inst.opcode, jump_back)
|
||||||
|
if end > jb_next_offset and self.is_jump_forward(end):
|
||||||
|
if self.is_jump_forward(jb_next_offset):
|
||||||
if self.get_target(jump_back+4) == self.get_target(end):
|
if self.get_target(jump_back+4) == self.get_target(end):
|
||||||
self.fixed_jumps[offset] = jump_back+4
|
self.fixed_jumps[offset] = jump_back+4
|
||||||
end = jump_back+4
|
end = jb_next_offset
|
||||||
elif target < offset:
|
elif target < offset:
|
||||||
self.fixed_jumps[offset] = jump_back+4
|
self.fixed_jumps[offset] = jump_back+4
|
||||||
end = jump_back+4
|
end = jb_next_offset
|
||||||
|
|
||||||
target = self.get_target(jump_back, 0)
|
target = self.get_target(jump_back)
|
||||||
|
|
||||||
if code[target] in (self.opc.FOR_ITER, self.opc.GET_ITER):
|
if code[target] in (self.opc.FOR_ITER, self.opc.GET_ITER):
|
||||||
loop_type = 'for'
|
loop_type = 'for'
|
||||||
@@ -128,6 +133,9 @@ class Scanner30(Scanner3):
|
|||||||
'start': target,
|
'start': target,
|
||||||
'end': jump_back})
|
'end': jump_back})
|
||||||
after_jump_offset = xdis.next_offset(code[jump_back], self.opc, jump_back)
|
after_jump_offset = xdis.next_offset(code[jump_back], self.opc, jump_back)
|
||||||
|
if (self.get_inst(after_jump_offset).opname == 'POP_TOP'):
|
||||||
|
after_jump_offset = xdis.next_offset(code[after_jump_offset], self.opc,
|
||||||
|
after_jump_offset)
|
||||||
if after_jump_offset != end:
|
if after_jump_offset != end:
|
||||||
self.structs.append({'type': loop_type + '-else',
|
self.structs.append({'type': loop_type + '-else',
|
||||||
'start': after_jump_offset,
|
'start': after_jump_offset,
|
||||||
@@ -321,6 +329,9 @@ class Scanner30(Scanner3):
|
|||||||
next_op = rtarget
|
next_op = rtarget
|
||||||
if code[next_op] == self.opc.POP_TOP:
|
if code[next_op] == self.opc.POP_TOP:
|
||||||
next_op = rtarget
|
next_op = rtarget
|
||||||
|
for block in self.structs:
|
||||||
|
if block['type'] == 'while-loop' and block['end'] == next_op:
|
||||||
|
return
|
||||||
next_op += instruction_size(self.code[next_op], self.opc)
|
next_op += instruction_size(self.code[next_op], self.opc)
|
||||||
if code[next_op] == self.opc.POP_BLOCK:
|
if code[next_op] == self.opc.POP_BLOCK:
|
||||||
return
|
return
|
||||||
|
@@ -1051,10 +1051,15 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.prec=100
|
self.prec=100
|
||||||
ast = ast[0]
|
ast = ast[0]
|
||||||
|
|
||||||
|
# Pick out important parts of the comprehension:
|
||||||
|
# * the variable we interate over: "store"
|
||||||
|
# * the results we accumulate: "n"
|
||||||
|
|
||||||
|
is_30_dict_comp = False
|
||||||
store = None
|
store = None
|
||||||
n = ast[iter_index]
|
n = ast[iter_index]
|
||||||
if ast in ['set_comp_func', 'dict_comp_func',
|
if ast in ('set_comp_func', 'dict_comp_func',
|
||||||
'list_comp', 'set_comp_func_header']:
|
'list_comp', 'set_comp_func_header'):
|
||||||
for k in ast:
|
for k in ast:
|
||||||
if k == 'comp_iter':
|
if k == 'comp_iter':
|
||||||
n = k
|
n = k
|
||||||
@@ -1063,6 +1068,21 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
|
elif ast in ('dict_comp', 'set_comp'):
|
||||||
|
assert self.version == 3.0
|
||||||
|
for k in ast:
|
||||||
|
if k in ('dict_comp_header', 'set_comp_header'):
|
||||||
|
n = k
|
||||||
|
elif k == 'store':
|
||||||
|
store = k
|
||||||
|
elif k == 'dict_comp_iter':
|
||||||
|
is_30_dict_comp = True
|
||||||
|
n = (k[3], k[1])
|
||||||
|
pass
|
||||||
|
elif k == 'comp_iter':
|
||||||
|
n = k[1]
|
||||||
|
pass
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
assert n == 'list_iter', n
|
assert n == 'list_iter', n
|
||||||
|
|
||||||
@@ -1115,7 +1135,12 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
# Another approach might be to be able to pass in the source name
|
# Another approach might be to be able to pass in the source name
|
||||||
# for the dummy argument.
|
# for the dummy argument.
|
||||||
|
|
||||||
self.preorder(n[0])
|
if is_30_dict_comp:
|
||||||
|
self.preorder(n[0])
|
||||||
|
self.write(': ')
|
||||||
|
self.preorder(n[1])
|
||||||
|
else:
|
||||||
|
self.preorder(n[0])
|
||||||
self.write(' for ')
|
self.write(' for ')
|
||||||
if comp_store:
|
if comp_store:
|
||||||
self.preorder(comp_store)
|
self.preorder(comp_store)
|
||||||
@@ -1178,6 +1203,26 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
stores = [ast[3]]
|
stores = [ast[3]]
|
||||||
assert ast[4] == 'comp_iter'
|
assert ast[4] == 'comp_iter'
|
||||||
n = ast[4]
|
n = ast[4]
|
||||||
|
# Find the list comprehension body. It is the inner-most
|
||||||
|
# node that is not comp_.. .
|
||||||
|
while n == 'comp_iter':
|
||||||
|
if n[0] == 'comp_for':
|
||||||
|
n = n[0]
|
||||||
|
stores.append(n[2])
|
||||||
|
n = n[3]
|
||||||
|
elif n[0] in ('comp_if', 'comp_if_not'):
|
||||||
|
n = n[0]
|
||||||
|
# FIXME: just a guess
|
||||||
|
if n[0].kind == 'expr':
|
||||||
|
list_ifs.append(n)
|
||||||
|
else:
|
||||||
|
list_ifs.append([1])
|
||||||
|
n = n[2]
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
pass
|
||||||
|
|
||||||
# Skip over n[0] which is something like: _[1]
|
# Skip over n[0] which is something like: _[1]
|
||||||
self.preorder(n[1])
|
self.preorder(n[1])
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user