Merge branch 'master' into python-2.4

This commit is contained in:
rocky
2018-04-01 13:48:16 -04:00
53 changed files with 382 additions and 115 deletions

View File

@@ -18,21 +18,20 @@ def test_grammar():
right_recursive, dup_rhs) = p.check_sets()
# We have custom rules that create the below
expect_lhs = set(['expr1024', 'pos_arg', 'get_iter', 'attribute'])
expect_lhs = set(['pos_arg', 'get_iter', 'attribute'])
unused_rhs = set(['list', 'mkfunc',
unused_rhs = set(['list', 'mkfunc', 'dict',
'mklambda',
'unpack',])
expect_right_recursive = set([('designList',
('store', 'DUP_TOP', 'designList'))])
expect_lhs.add('kvlist')
expect_lhs.add('kv3')
if PYTHON3:
expect_lhs.add('load_genexpr')
expect_lhs.add('kvlist')
expect_lhs.add('kv3')
unused_rhs = unused_rhs.union(set("""
except_pop_except generator_exp
dict
""".split()))
if PYTHON_VERSION >= 3.0:
expect_lhs.add("annotate_arg")

View File

@@ -139,7 +139,7 @@ grammar-coverage-2.6:
grammar-coverage-2.7:
-rm $(COVER_DIR)/spark-grammar-2.7.cover || true
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pythonlib.py --bytecode-2.7
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pyenvlib.py --2.7.14 --max=400
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pyenvlib.py --2.7.14 --max=600
#: Get grammar coverage for Python 3.0
grammar-coverage-3.0:
@@ -182,7 +182,7 @@ grammar-coverage-3.5:
grammar-coverage-3.6:
rm $(COVER_DIR)/spark-grammar-3.6.cover || /bin/true
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pythonlib.py --bytecode-3.6
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pyenvlib.py --3.6.4 --max=800
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pyenvlib.py --3.6.4 --max=280
#: Check deparsing Python 2.6
check-bytecode-2.6:

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

1
test/grammar-cover/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/.python-version

View File

@@ -1,5 +1,5 @@
#!/bin/bash
for VERS in 2{4,5,6,7} 3{2,3,4,5} ; do
GRAMMAR_TXT=grammar-${VERS}.txt
spark-parser-coverage --max-count 900 --path spark-grammar-${VERS}.cover > $GRAMMAR_TXT
spark-parser-coverage --max-count 3000 --path spark-grammar-${VERS}.cover > $GRAMMAR_TXT
done

View File

@@ -1,2 +1,2 @@
#!/bin/bash
$SHELL ./grammar.sh 2.4 2.5 2.6 2.7 3.3 3.4 3.5 3.6
$SHELL ./grammar.sh 2.4 2.5 2.6 2.7 3.2 3.3 3.4 3.5 3.6

View File

@@ -1,7 +1,7 @@
#!/bin/bash
# Remake Python grammar statistics
typeset -A ALL_VERS=([2.4]=2.4.6 [2.5]=2.5.6 [2.6]=2.6.9 [2.7]=2.7.14 [3.3]=3.3.6 [3.4]=3.4.8 [3.5]=3.5.5 [3.6]=3.6.4)
typeset -A ALL_VERS=([2.4]=2.4.6 [2.5]=2.5.6 [2.6]=2.6.9 [2.7]=2.7.14 [3.2]=3.2.6 [3.3]=3.3.6 [3.4]=3.4.8 [3.5]=3.5.5 [3.6]=3.6.4)
if (( $# == 0 )); then
echo 1>&2 "usage: $0 two-digit-version"
@@ -21,6 +21,7 @@ while [[ -n $1 ]] ; do
fi
tmpdir=$workdir/../../tmp/grammar-cover
COVER_FILE=${tmpdir}/spark-grammar-${SHORT_VERSION}.cover
[[ -d $tmpdir ]] || mkdir $tmpdir
cd $workdir/../..
if [[ $SHORT_VERSION > 2.5 ]] ; then
@@ -31,10 +32,13 @@ while [[ -n $1 ]] ; do
GRAMMAR_TXT=$tmpdir/grammar-${SHORT_VERSION}.txt
pyenv local ${LONG_VERSION}
cd ./test
if [[ -r $COVER_FILE ]]; then
rm $COVER_FILE
fi
if [[ -r $GRAMMAR_TXT ]]; then
GRAMMAR_SAVE_TXT=${tmpdir}/grammar-${SHORT_VERSION}-save.txt
cp $GRAMMAR_TXT $GRAMMAR_SAVE_TXT
fi
make grammar-coverage-${SHORT_VERSION};
spark-parser-coverage --path ${tmpdir}/spark-grammar-${SHORT_VERSION}.cover > $GRAMMAR_TXT
spark-parser-coverage --max-count=3000 --path $COVER_FILE > $GRAMMAR_TXT
done

View File

@@ -0,0 +1,13 @@
#!/bin/bash
USER=${USER:-rocky}
EMAIL=${EMAIL:-rb@dustyfeet.com}
SUBJECT_PREFIX="grammar cover testing for"
LOGFILE=/tmp/grammar-cover-$$.log
/bin/bash ./grammar-all.sh >$LOGFILE 2>&1
rc=$?
if ((rc == 0)); then
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX ok" ${USER}@localhost
else
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX not ok" ${USER}@localhost
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX not ok" $EMAIL
fi

View File

@@ -0,0 +1,18 @@
# Bug found in 2.4 test_math.py
# Bug was turning last try/except/else into try/else
import math
def test_exceptions():
try:
x = math.exp(-1000000000)
except:
raise RuntimeError
x = 1
try:
x = math.sqrt(-1.0)
except ValueError:
return x
else:
raise RuntimeError
test_exceptions()

View File

@@ -1,5 +1,5 @@
# From 2.7 test_itertools.py
# Bug was in 2.7 decompiling like the commented out
# Bug was in 2.7 decompiling the target assignment
# code below
from itertools import izip_longest
for args in [

View File

@@ -0,0 +1,13 @@
# From 3.6.4 pathlib.py
# Bug was handling "continue" as last statement of "if"
# RUNNABLE!
def parse_parts(it, parts):
for part in it:
if not part:
continue
parts = 1
return parts
assert parse_parts([], 5) == 5
assert parse_parts([True], 6) == 1
assert parse_parts([False], 6) == 6

View File

@@ -1,9 +1,19 @@
# Python 3.5+ PEP 448 - Additional Unpacking Generalizations for dictionaries
{**{}}
{**{'a': 1, 'b': 2}}
## {**{'x': 1}, **{'y': 2}}
# RUNNABLE!
b = {**{}}
assert b == {}
c = {**{'a': 1, 'b': 2}}
assert c == {'a': 1, 'b': 2}
d = {**{'x': 1}, **{'y': 2}}
assert d == {'x': 1, 'y': 2}
# {'c': 1, {'d': 2}, **{'e': 3}}
[*[]]
{**{0:0 for a in b}}
## {**{}, **{}}
## {**{}, **{}, **{}}
assert {0: 0} == {**{0:0 for a in c}}
# FIXME: assert deparsing is incorrect for:
# {**{}, **{}}
# assert {} == {**{}, **{}, **{}}
# {**{}, **{}, **{}}
# assert {} == {**{}, **{}, **{}}

View File

@@ -0,0 +1,23 @@
# From python 3.5.5 telnetlib
# The bug is the end of a "then" jumping
# back to the loop which could look like
# a "continue" and also not like a then/else
# break
def process_rawq(self, cmd, cmd2):
while self.rawq:
if self.iacseq:
if cmd:
pass
elif cmd2:
if self.option_callback:
self.option = 2
else:
self.option = 3
# From python 3.5.5 telnetlib
def listener(data):
while 1:
if data:
data = 1
else:
data = 2

View File

@@ -47,3 +47,14 @@ def __init__(self, cnf={}):
def Value(self, fn, typecode_or_type, *args, lock=True):
return fn(typecode_or_type, *args, lock=lock,
ctx=self.get_context())
# From 3.6.4 heapq.py
def merge(*iterables, key=None, reverse=False):
return
def __call__(self, *args, **kwds):
pass
# From 3.6.4 shutil
def unpack_archive(func, filename, dict, format_info, extract_dir=None):
func(filename, extract_dir, **dict(format_info[2]))

View File

@@ -0,0 +1,12 @@
# From 3.6.4 pdb.py
# Bug was not having a semantic action for "except_return" tree
def do_commands(self, arg):
if not arg:
bnum = 1
else:
try:
bnum = int(arg)
except:
self.error("Usage:")
return
self.commands_bnum = bnum

View File

@@ -0,0 +1,13 @@
# From 3.6.4 configparser.py
# Bug in 3.6 was handling "else" with compound
# if. there is no POP_BLOCK and
# there are several COME_FROMs before the else
def _read(self, fp, a, value, f):
for line in fp:
for prefix in a:
fp()
if (value and fp and
prefix > 5):
f()
else:
f()

View File

@@ -0,0 +1,18 @@
# From 3.6.4 test_argparse.py
# Bug was in parsing ** args
import argparse
def test_namespace_starkwargs_notidentifier(self):
ns = argparse.Namespace(**{'"': 'quote'})
string = """Namespace(**{'"': 'quote'})"""
assert ns == string
def test_namespace_kwargs_and_starkwargs_notidentifier(self):
ns = argparse.Namespace(a=1, **{'"': 'quote'})
string = """Namespace(a=1, **{'"': 'quote'})"""
assert ns == string
def test_namespace(self):
ns = argparse.Namespace(foo=42, bar='spam')
string = "Namespace(bar='spam', foo=42)"
assert ns == string

View File

@@ -0,0 +1,16 @@
# Adapted from Python 3.3 idlelib/PyParse.py
# Bug is continue flowing back to while messing up the determination
# that it is inside an "if".
# RUNNABLE!
def _study1(i, n, ch):
while i == 3:
i = 4
if ch:
i = 10
assert i < 5
continue
if n:
return n
assert _study1(3, 4, False) == 4

View File

@@ -1,7 +1,5 @@
# RUNNABLE!
# But if it miscompiles one of the tests may loop forever
# Tests:
# 2.7:
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
@@ -35,11 +33,3 @@ def getpreferredencoding(do_setlocale=True):
assert not do_setlocale
getpreferredencoding(False)
# From python 3.3 idlelib/PyParse.py
def _study1(i):
while i:
assert i
continue
_study1(False)

1
test/stdlib/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/.python-version

View File

@@ -176,6 +176,7 @@ def main(in_base, out_base, files, codes, outfile=None,
for filename in files:
infile = os.path.join(in_base, filename)
# print("XXX", infile)
if not os.path.exists(infile):
sys.stderr.write("File '%s' doesn't exist. Skipped\n"
% infile)

View File

@@ -565,9 +565,6 @@ class PythonParser(GenericASTBuilder):
# Positional arguments in make_function
pos_arg ::= expr
expr32 ::= expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr
expr1024 ::= expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32
'''
def p_store(self, args):

View File

@@ -107,11 +107,8 @@ class Python2Parser(PythonParser):
_mklambda ::= load_closure mklambda
kwarg ::= LOAD_CONST expr
kvlist ::= kvlist kv3
kv3 ::= expr expr STORE_MAP
dict ::= BUILD_MAP kvlist
classdef ::= buildclass store
buildclass ::= LOAD_CONST expr mkfunc

View File

@@ -73,7 +73,6 @@ class Python25Parser(Python26Parser):
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
classdefdeco2 ::= LOAD_CONST expr mkfunc CALL_FUNCTION_0 BUILD_CLASS
kv3 ::= expr expr STORE_MAP
kvlist ::= kvlist kv3
ret_cond ::= expr jmp_false_then expr RETURN_END_IF POP_TOP ret_expr_or_cond
return_if_lambda ::= RETURN_END_IF_LAMBDA POP_TOP
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP

View File

@@ -261,6 +261,9 @@ class Python26Parser(Python2Parser):
def p_misc26(self, args):
"""
dict ::= BUILD_MAP kvlist
kvlist ::= kvlist kv3
conditional ::= expr jmp_false expr jf_cf_pop expr come_from_opt
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP

View File

@@ -38,8 +38,6 @@ class Python27Parser(Python2Parser):
comp_for ::= expr for_iter store comp_iter JUMP_BACK
comp_iter ::= comp_if
comp_iter ::= comp_if_not
comp_if_not ::= expr jmp_true comp_iter
comp_iter ::= comp_body
dict_comp_body ::= expr expr MAP_ADD

View File

@@ -505,7 +505,8 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, token.kind, uniq_param, customize)
if possible_class_decorator:
if next_token == 'CALL_FUNCTION' and next_token.attr == 1:
if (next_token == 'CALL_FUNCTION' and next_token.attr == 1
and args_pos > 1):
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
% (('expr ' * (args_pos-1)), opname, args_pos))
self.add_unique_rule(rule, token.kind, uniq_param, customize)
@@ -571,6 +572,7 @@ class Python3Parser(PythonParser):
# This is used in parse36.py as well as here
self.seen_LOAD_DICTCOMP = False
self.seen_LOAD_SETCOMP = False
# Loop over instructions adding custom grammar rules based on
@@ -621,9 +623,7 @@ class Python3Parser(PythonParser):
self.addRule(rule, nop_func)
elif opname.startswith('BUILD_LIST_UNPACK'):
v = token.attr
rule = ('build_list_unpack ::= ' + 'expr1024 ' * int(v//1024) +
'expr32 ' * int((v//32) % 32) +
'expr ' * (v % 32) + opname)
rule = 'build_list_unpack ::= %s%s' % ('expr ' * v, opname)
self.addRule(rule, nop_func)
rule = 'expr ::= build_list_unpack'
self.addRule(rule, nop_func)
@@ -642,19 +642,27 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, 'kvlist_n', 1, customize)
rule = "dict ::= BUILD_MAP_n kvlist_n"
elif self.version >= 3.5:
if opname != 'BUILD_MAP_WITH_CALL':
if opname == 'BUILD_MAP_UNPACK':
if not opname.startswith('BUILD_MAP_WITH_CALL'):
# FIXME: Use the attr
# so this doesn't run into exponential parsing time.
if opname.startswith('BUILD_MAP_UNPACK'):
# FIXME: start here
# rule = "%s ::= %s %s" % (kvlist_n, 'expr ' * (token.attr*2), opname)
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
self.add_unique_rule(rule, opname, token.attr, customize)
rule = 'dict_entry ::= ' + 'expr ' * (token.attr*2)
self.add_unique_rule(rule, opname, token.attr, customize)
rule = 'dict ::= ' + 'dict_entry ' * token.attr
self.add_unique_rule(rule, opname, token.attr, customize)
rule = ('unmap_dict ::= ' +
('dict ' * token.attr) +
'BUILD_MAP_UNPACK')
rule = 'dict ::= %s' % ('dict_entry ' * token.attr)
self.addRule(rule, nop_func)
# FIXME: really we need a combination of dict_entry-like things.
# It just so happens the most common case is not to mix
# dictionary comphensions with dictionary, elements
if self.seen_LOAD_DICTCOMP:
rule = 'dict ::= %s%s' % ('dict_comp ' * token.attr, opname)
self.addRule(rule, nop_func)
rule = 'unmap_dict ::= %s%s' % (('dict ' * token.attr), opname)
else:
rule = "%s ::= %s %s" % (kvlist_n, 'expr ' * (token.attr*2), opname)
self.add_unique_rule(rule, opname, token.attr, customize)
@@ -666,9 +674,7 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, opname, token.attr, customize)
elif opname.startswith('BUILD_MAP_UNPACK_WITH_CALL'):
v = token.attr
rule = ('build_map_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
'expr32 ' * int((v//32) % 32) +
'expr ' * (v % 32) + opname)
rule = 'build_map_unpack_with_call ::= %s%s' % ('expr ' * v, opname)
self.addRule(rule, nop_func)
elif opname.startswith('BUILD_TUPLE_UNPACK_WITH_CALL'):
v = token.attr
@@ -691,9 +697,7 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, opname, token.attr, customize)
if not is_LOAD_CLOSURE or v == 0:
collection = opname_base[opname_base.find('_')+1:].lower()
rule = (('%s ::= ' % collection) + 'expr1024 ' * int(v//1024) +
'expr32 ' * int((v//32) % 32) +
'expr ' * (v % 32) + opname)
rule = '%s ::= %s%s' % (collection, 'expr ' * v, opname)
self.add_unique_rules([
'expr ::= %s' % collection,
rule], customize)
@@ -721,9 +725,12 @@ class Python3Parser(PythonParser):
rule = """
dict_comp ::= LOAD_DICTCOMP LOAD_CONST MAKE_FUNCTION_0 expr
GET_ITER CALL_FUNCTION_1
classdefdeco1 ::= expr classdefdeco1 CALL_FUNCTION_1
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
"""
if self.version < 3.5:
rule += """
classdefdeco1 ::= expr classdefdeco1 CALL_FUNCTION_1
"""
self.addRule(rule, nop_func)
self.custom_classfunc_rule(opname, token, customize,
@@ -809,6 +816,7 @@ class Python3Parser(PythonParser):
elif opname == 'LOAD_LISTCOMP':
self.add_unique_rule("expr ::= listcomp", opname, token.attr, customize)
elif opname == 'LOAD_SETCOMP':
self.seen_LOAD_SETCOMP = True
# Should this be generalized and put under MAKE_FUNCTION?
if has_get_iter_call_function1:
self.addRule("expr ::= set_comp", nop_func)
@@ -1026,6 +1034,7 @@ class Python3Parser(PythonParser):
rule = ('mkfunc ::= %s%sexpr %s' %
(kwargs, 'pos_arg ' * args_pos, opname))
self.add_unique_rule(rule, opname, token.attr, customize)
if opname.startswith('MAKE_FUNCTION_A'):
if self.version >= 3.6:
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST %s' %

View File

@@ -32,7 +32,7 @@ class Python35Parser(Python34Parser):
# ...
# the end of the if will jump back to the loop and there will be a COME_FROM
# after the jump
l_stmts ::= lastl_stmt COME_FROM l_stmts
l_stmts ::= lastl_stmt come_froms l_stmts
# Python 3.5+ Await statement
expr ::= await_expr
@@ -101,7 +101,20 @@ class Python35Parser(Python34Parser):
return_if_stmt ::= ret_expr RETURN_END_IF POP_BLOCK
jb_else ::= JUMP_BACK ELSE
ifelsestmtc ::= testexpr c_stmts_opt JUMP_FORWARD else_suitec
ifelsestmtl ::= testexpr c_stmts_opt jb_else else_suitel
# 3.5 Has jump optimization which can route the end of an
# "if/then" back to to a loop just before an else.
jump_absolute_else ::= jb_else
jump_absolute_else ::= CONTINUE ELSE
# Our hacky "ELSE" determination doesn't do a good job and really
# determine the start of an "else". It could also be the end of an
# "if-then" which ends in a "continue". Perhaps with real control-flow
# analysis we'll sort this out. Or call "ELSE" something more appropriate.
_ifstmts_jump ::= c_stmts_opt ELSE
# ifstmt ::= testexpr c_stmts_opt

View File

@@ -48,8 +48,9 @@ class Python36Parser(Python35Parser):
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt
JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
# This might be valid in < 3.6
# A COME_FROM is dropped off because of JUMP-to-JUMP optimization
and ::= expr jmp_false expr
and ::= expr jmp_false expr jmp_false
jf_cf ::= JUMP_FORWARD COME_FROM
conditional ::= expr jmp_false expr jf_cf expr COME_FROM
@@ -59,6 +60,9 @@ class Python36Parser(Python35Parser):
except_suite ::= c_stmts_opt COME_FROM POP_EXCEPT jump_except COME_FROM
jb_cfs ::= JUMP_BACK come_froms
ifelsestmtl ::= testexpr c_stmts_opt jb_cfs else_suitel
# In 3.6+, A sequence of statements ending in a RETURN can cause
# JUMP_FORWARD END_FINALLY to be omitted from try middle
@@ -102,14 +106,23 @@ class Python36Parser(Python35Parser):
fstring_single ::= expr FORMAT_VALUE
"""
self.add_unique_doc_rules(rules_str, customize)
elif opname == 'MAKE_FUNCTION_8' and self.seen_LOAD_DICTCOMP:
# Is there something general going on here?
rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST
MAKE_FUNCTION_8 expr
GET_ITER CALL_FUNCTION_1
"""
self.addRule(rule, nop_func)
elif opname == 'MAKE_FUNCTION_8':
if self.seen_LOAD_DICTCOMP:
# Is there something general going on here?
rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST
MAKE_FUNCTION_8 expr
GET_ITER CALL_FUNCTION_1
"""
self.addRule(rule, nop_func)
elif self.seen_LOAD_SETCOMP:
rule = """
set_comp ::= load_closure LOAD_SETCOMP LOAD_CONST
MAKE_FUNCTION_8 expr
GET_ITER CALL_FUNCTION_1
"""
self.addRule(rule, nop_func)
elif opname == 'BEFORE_ASYNC_WITH':
rules_str = """
stmt ::= async_with_stmt
@@ -200,11 +213,9 @@ class Python36Parser(Python35Parser):
self.add_unique_rule('expr ::= async_call', token.kind, uniq_param, customize)
if opname.startswith('CALL_FUNCTION_KW'):
self.addRule("expr ::= call_kw", nop_func)
self.addRule("expr ::= call_kw36", nop_func)
values = 'expr ' * token.attr
rule = 'call_kw ::= expr kwargs_36 %s' % token.kind
self.addRule(rule, nop_func)
rule = 'kwargs_36 ::= %s LOAD_CONST' % values
rule = "call_kw36 ::= expr %s LOAD_CONST %s" % (values, opname)
self.add_unique_rule(rule, token.kind, token.attr, customize)
elif opname == 'CALL_FUNCTION_EX_KW':
self.addRule("""expr ::= call_ex_kw

View File

@@ -242,17 +242,57 @@ class Scanner(object):
pass
return result_offset
def all_instr(self, start, end, instr, target=None, include_beyond_target=False):
def inst_matches(self, start, end, instr, target=None, include_beyond_target=False):
"""
Find all <instr> in the block from start to end.
<instr> is any python bytecode instruction or a list of opcodes
If <instr> is an opcode with a target (like a jump), a target
Find all `instr` in the block from start to end.
`instr` is a Python opcode or a list of opcodes
If `instr` is an opcode with a target (like a jump), a target
destination can be specified which must match precisely.
Return a list with indexes to them or [] if none found.
"""
try:
None in instr
except:
instr = [instr]
# FIXME: this is broken on 3.6+. Revise to use instructions self.insts
first = self.offset2inst_index[start]
result = []
for inst in self.insts[first:]:
if inst.opcode in instr:
if target is None:
result.append(inst.offset)
else:
t = self.get_target(inst.offset)
if include_beyond_target and t >= target:
result.append(inst.offset)
elif t == target:
result.append(inst.offset)
pass
pass
pass
if inst.offset >= end:
break
pass
# FIXME: put in a test
# check = self.all_instr(start, end, instr, target, include_beyond_target)
# assert result == check
return result
# FIXME: this is broken on 3.6+. Replace remaining (2.x-based) calls
# with inst_matches
def all_instr(self, start, end, instr, target=None, include_beyond_target=False):
"""
Find all `instr` in the block from start to end.
`instr` is any Python opcode or a list of opcodes
If `instr` is an opcode with a target (like a jump), a target
destination can be specified which must match precisely.
Return a list with indexes to them or [] if none found.
"""
code = self.code
assert(start >= 0 and end <= len(code))

View File

@@ -420,7 +420,7 @@ class Scanner2(Scanner):
(self.opc.PJIT, self.opc.JUMP_FORWARD),
(self.opc.PJIT, self.opc.JUMP_ABSOLUTE)])
prelim = self.all_instr(start, end, self.stmt_opcodes)
prelim = self.all_instr(start, end, self.statement_opcodes)
stmts = self.stmts = set(prelim)
pass_stmts = set()

View File

@@ -40,7 +40,7 @@ JUMP_OPS = opcode_26.JUMP_OPS
class Scanner26(scan.Scanner2):
def __init__(self, show_asm=False):
super(Scanner26, self).__init__(2.6, show_asm)
self.stmt_opcodes = frozenset([
self.statement_opcodes = frozenset([
self.opc.SETUP_LOOP, self.opc.BREAK_LOOP,
self.opc.SETUP_FINALLY, self.opc.END_FINALLY,
self.opc.SETUP_EXCEPT, self.opc.POP_BLOCK,

View File

@@ -23,7 +23,7 @@ class Scanner27(Scanner2):
super(Scanner27, self).__init__(2.7, show_asm, is_pypy)
# opcodes that start statements
self.stmt_opcodes = frozenset([
self.statement_opcodes = frozenset([
self.opc.SETUP_LOOP, self.opc.BREAK_LOOP,
self.opc.SETUP_FINALLY, self.opc.END_FINALLY,
self.opc.SETUP_EXCEPT,

View File

@@ -434,6 +434,7 @@ class Scanner3(Scanner):
.opname == 'FOR_ITER'
and self.insts[i+1].opname == 'JUMP_FORWARD')
if (is_continue or
(inst.offset in self.stmts and
(self.version != 3.0 or (hasattr(inst, 'linestart'))) and
@@ -616,7 +617,7 @@ class Scanner3(Scanner):
# Compose preliminary list of indices with statements,
# using plain statement opcodes
prelim = self.all_instr(start, end, self.statement_opcodes)
prelim = self.inst_matches(start, end, self.statement_opcodes)
# Initialize final container with statements with
# preliminary data
@@ -879,11 +880,12 @@ class Scanner3(Scanner):
pass
else:
fix = None
jump_ifs = self.all_instr(start, self.next_stmt[offset],
self.opc.POP_JUMP_IF_FALSE)
jump_ifs = self.inst_matches(start, self.next_stmt[offset],
self.opc.POP_JUMP_IF_FALSE)
last_jump_good = True
for j in jump_ifs:
if target == self.get_target(j):
# FIXME: remove magic number
if self.lines[j].next == j + 3 and last_jump_good:
fix = j
break
@@ -916,7 +918,8 @@ class Scanner3(Scanner):
if offset in self.ignore_if:
return
if (code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and
rtarget_is_ja = code[pre_rtarget] == self.opc.JUMP_ABSOLUTE
if ( rtarget_is_ja and
pre_rtarget in self.stmts and
pre_rtarget != offset and
prev_op[pre_rtarget] != offset and
@@ -936,10 +939,13 @@ class Scanner3(Scanner):
# or a conditional assignment like:
# x = 1 if x else 2
#
# For 3.5, in addition the JUMP_FORWARD above we could have
# JUMP_BACK or CONTINUE
#
# There are other contexts we may need to consider
# like whether the target is "END_FINALLY"
# or if the condition jump is to a forward location
if self.is_jump_forward(pre_rtarget):
if self.is_jump_forward(pre_rtarget) or (rtarget_is_ja and self.version >= 3.5):
if_end = self.get_target(pre_rtarget)
# If the jump target is back, we are looping
@@ -1141,13 +1147,14 @@ class Scanner3(Scanner):
assert(start>=0 and end<=len(self.code) and start <= end)
# Find all offsets of requested instructions
instr_offsets = self.all_instr(start, end, instr, target, include_beyond_target)
instr_offsets = self.inst_matches(start, end, instr, target,
include_beyond_target)
# Get all POP_JUMP_IF_TRUE (or) offsets
if self.version == 3.0:
jump_true_op = self.opc.JUMP_IF_TRUE
else:
jump_true_op = self.opc.POP_JUMP_IF_TRUE
pjit_offsets = self.all_instr(start, end, jump_true_op)
pjit_offsets = self.inst_matches(start, end, jump_true_op)
filtered = []
for pjit_offset in pjit_offsets:
pjit_tgt = self.get_target(pjit_offset) - 3

View File

@@ -193,11 +193,12 @@ class Scanner30(Scanner3):
pass
else:
fix = None
jump_ifs = self.all_instr(start, self.next_stmt[offset],
opc.JUMP_IF_FALSE)
jump_ifs = self.inst_matches(start, self.next_stmt[offset],
opc.JUMP_IF_FALSE)
last_jump_good = True
for j in jump_ifs:
if target == self.get_target(j):
# FIXME: remove magic number
if self.lines[j].next == j + 3 and last_jump_good:
fix = j
break

View File

@@ -28,6 +28,7 @@ from xdis.code import iscode
from uncompyle6.parsers.astnode import AST
from uncompyle6.scanners.tok import Token
from uncompyle6.semantics.helper import flatten_list
from spark_parser.ast import GenericASTTraversalPruningException
def customize_for_version(self, is_pypy, version):
if is_pypy:
@@ -407,8 +408,13 @@ def customize_for_version(self, is_pypy, version):
# Value 100 is important; it is exactly
# module/function precidence.
PRECEDENCE['call_kw'] = 100
PRECEDENCE['call_ex'] = 100
PRECEDENCE['call_kw'] = 100
PRECEDENCE['call_kw36'] = 100
PRECEDENCE['call_ex'] = 100
PRECEDENCE['call_ex_kw'] = 100
PRECEDENCE['call_ex_kw2'] = 100
PRECEDENCE['call_ex_kw3'] = 100
PRECEDENCE['call_ex_kw4'] = 100
TABLE_DIRECT.update({
'tryfinally36': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n',
@@ -420,6 +426,7 @@ def customize_for_version(self, is_pypy, version):
'fstring_multi': ( "f'''%c'''", 0),
'func_args36': ( "%c(**", 0),
'try_except36': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
'except_return': ( '%|except:\n%+%c%-', 3 ),
'unpack_list': ( '*%c', (0, 'list') ),
'call_ex' : (
'%c(%p)',
@@ -567,7 +574,8 @@ def customize_for_version(self, is_pypy, version):
assert call_function_ex == 'CALL_FUNCTION_EX_KW'
# FIXME: decide if the below test be on kwargs == 'dict'
if (call_function_ex.attr & 1 and
(not isinstance(kwargs, Token) and kwargs != 'attribute')):
(not isinstance(kwargs, Token) and kwargs != 'attribute')
and not kwargs[0].kind.startswith('kvlist')):
self.call36_dict(kwargs)
else:
self.write('**')
@@ -670,7 +678,11 @@ def customize_for_version(self, is_pypy, version):
pass
pass
else:
assert False, "Don't know how to untangle dictionary"
self.write("**")
try:
self.default(node)
except GenericASTTraversalPruningException:
pass
self.prec = p
self.indent_less(INDENT_PER_LEVEL)
@@ -730,40 +742,43 @@ def customize_for_version(self, is_pypy, version):
# return
# self.n_kwargs_only_36 = kwargs_only_36
def kwargs_36(node):
self.write('(')
keys = node[-1].attr
def n_call_kw36(node):
self.template_engine(("%c(", 0), node)
keys = node[-2].attr
num_kwargs = len(keys)
num_posargs = len(node) - (num_kwargs + 1)
num_posargs = len(node) - (num_kwargs + 2)
n = len(node)
assert n >= len(keys)+1, \
'not enough parameters keyword-tuple values'
# try:
# assert n >= len(keys)+1, \
# 'not enough parameters keyword-tuple values'
# except:
# from trepan.api import debug; debug()
sep = ''
# FIXME: adjust output for line breaks?
for i in range(num_posargs):
line_number = self.line_number
for i in range(1, num_posargs):
self.write(sep)
self.preorder(node[i])
sep = ', '
if line_number != self.line_number:
sep = ",\n" + self.indent + " "
else:
sep = ", "
line_number = self.line_number
i = num_posargs
j = 0
# FIXME: adjust output for line breaks?
while i < n-1:
while i < n-2:
self.write(sep)
self.write(keys[j] + '=')
self.preorder(node[i])
sep=', '
if line_number != self.line_number:
sep = ",\n" + self.indent + " "
else:
sep = ", "
i += 1
j += 1
self.write(')')
self.prune()
return
self.n_kwargs_36 = kwargs_36
self.n_call_kw36 = n_call_kw36
def starred(node):
l = len(node)

View File

@@ -532,8 +532,10 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
else:
default, kw_args, annotate, closure = args_node.attr
if default:
assert node[0] == 'expr', "expecting mkfunc default node to be an expr"
expr_node = node[0]
if node[0] == 'pos_arg':
expr_node = expr_node[0]
assert expr_node == 'expr', "expecting mkfunc default node to be an expr"
if (expr_node[0] == 'LOAD_CONST' and
isinstance(expr_node[0].attr, tuple)):
defparams = [repr(a) for a in expr_node[0].attr]
@@ -541,7 +543,21 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
defparams = [self.traverse(n, indent='') for n in expr_node[0][:-1]]
else:
defparams = []
# FIXME: handle kw, annotate and closure
i = -4
kw_pairs = 0
if closure:
# FIXME: fill in
i -= 1
if annotate:
# FIXME: fill in
i -= 1
if kw_args:
kw_node = node[i]
if kw_node == 'expr':
kw_node = kw_node[0]
if kw_node == 'dict':
kw_pairs = kw_node[-1].attr
pass
if 3.0 <= self.version <= 3.2:
@@ -580,10 +596,10 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
return
if self.version >= 3.0:
kw_pairs = args_node.attr[1]
if self.version < 3.6:
kw_pairs = args_node.attr[1]
else:
kw_pairs = 0
indent = self.indent
# build parameters
params = []

View File

@@ -1696,7 +1696,8 @@ class SourceWalker(GenericASTTraversal, object):
self.indent_more(INDENT_PER_LEVEL)
sep = INDENT_PER_LEVEL[:-1]
self.write('{')
if node[0] != 'dict_entry':
self.write('{')
line_number = self.line_number
if self.version >= 3.0 and not self.is_pypy:
@@ -1720,7 +1721,7 @@ class SourceWalker(GenericASTTraversal, object):
self.write(name, ': ')
value = self.traverse(l[i+1], indent=self.indent+(len(name)+2)*' ')
self.write(value)
sep = ","
sep = ", "
if line_number != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
line_number = self.line_number
@@ -1747,7 +1748,7 @@ class SourceWalker(GenericASTTraversal, object):
self.write(name, ': ')
value = self.traverse(l[i], indent=self.indent+(len(name)+2)*' ')
self.write(value)
sep = ","
sep = ", "
if line_number != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
line_number = self.line_number
@@ -1767,7 +1768,7 @@ class SourceWalker(GenericASTTraversal, object):
line_number = self.line_number
self.write(':')
self.write(self.traverse(value[0]))
sep = ","
sep = ", "
if line_number != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
line_number = self.line_number
@@ -1778,6 +1779,22 @@ class SourceWalker(GenericASTTraversal, object):
if sep.startswith(",\n"):
self.write(sep[1:])
pass
elif node[0].kind.startswith('dict_entry'):
assert self.version >= 3.5
template = ("%C", (0, len(node[0]), ", **"))
self.template_engine(template, node[0])
sep = ''
elif (node[-1].kind.startswith('BUILD_MAP_UNPACK')
or node[-1].kind.startswith('dict_entry')):
assert self.version >= 3.5
# FIXME: I think we can intermingle dict_comp's with other
# dictionary kinds of things. The most common though is
# a sequence of dict_comp's
kwargs = node[-1].attr
template = ("**%C", (0, kwargs, ", **"))
self.template_engine(template, node)
sep = ''
pass
else:
# Python 2 style kvlist. Find beginning of kvlist.
@@ -1833,7 +1850,7 @@ class SourceWalker(GenericASTTraversal, object):
value = self.traverse(kv[0], indent=self.indent+(len(name)+2)*' ')
pass
self.write(value)
sep = ","
sep = ", "
if line_number != self.line_number:
sep += "\n" + self.indent + " "
line_number = self.line_number
@@ -1842,7 +1859,8 @@ class SourceWalker(GenericASTTraversal, object):
pass
if sep.startswith(",\n"):
self.write(sep[1:])
self.write('}')
if node[0] != 'dict_entry':
self.write('}')
self.indent_less(INDENT_PER_LEVEL)
self.prec = p
self.prune()