diff --git a/NEWS b/NEWS index 624d9543..2c6ae2a8 100644 --- a/NEWS +++ b/NEWS @@ -1,3 +1,10 @@ +uncompyle6 2.13.0 2017-10-10 + +- Fixes in deparsing lambda expressions +- Improve table-semantics descriptions +- Document hacky customize arg count better (until we can remove it) +- Update to use xdis 3.7.0 or greater + uncompyle6 2.12.0 2017-09-26 - Use xdis 3.6.0 or greater now diff --git a/__pkginfo__.py b/__pkginfo__.py index 222ba24f..de914c13 100644 --- a/__pkginfo__.py +++ b/__pkginfo__.py @@ -39,7 +39,7 @@ entry_points = { 'pydisassemble=uncompyle6.bin.pydisassemble:main', ]} ftp_url = None -install_requires = ['spark-parser >= 1.6.1, < 1.7.0', +install_requires = ['spark-parser >= 1.7.0, < 1.8.0', 'xdis >= 3.6.0, < 3.7.0'] license = 'MIT' mailing_list = 'python-debugger@googlegroups.com' diff --git a/pytest/test_grammar.py b/pytest/test_grammar.py index 241d064d..b92e163d 100644 --- a/pytest/test_grammar.py +++ b/pytest/test_grammar.py @@ -11,7 +11,7 @@ def test_grammar(): remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens]) remain_tokens = set(remain_tokens) - opcode_set assert remain_tokens == set([]), \ - "Remaining tokens %s\n====\n%s" % (remain_tokens, p.dumpGrammar()) + "Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar()) p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY) lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/parser.py b/uncompyle6/parser.py index 9d771162..2fe57af5 100644 --- a/uncompyle6/parser.py +++ b/uncompyle6/parser.py @@ -89,17 +89,14 @@ class PythonParser(GenericASTBuilder): for i in dir(self): setattr(self, i, None) - def debug_reduce(self, rule, tokens, parent, i): + def debug_reduce(self, rule, tokens, parent, last_token_pos): """Customized format and print for our kind of tokens which gets called in debugging grammar reduce rules """ def fix(c): s = str(c) - i = s.find('_') - if i == -1: - return s - else: - return s[:i] + last_token_pos = s.find('_') + return s if last_token_pos == -1 else s[:last_token_pos] prefix = '' if parent and tokens: @@ -111,13 +108,13 @@ class PythonParser(GenericASTBuilder): if hasattr(p_token, 'offset'): prefix += "%3s" % fix(p_token.offset) if len(rule[1]) > 1: - prefix += '-%-3s ' % fix(tokens[i-1].offset) + prefix += '-%-3s ' % fix(tokens[last_token_pos-1].offset) else: prefix += ' ' else: prefix = ' ' - print("%s%s ::= %s" % (prefix, rule[0], ' '.join(rule[1]))) + print("%s%s ::= %s (%d)" % (prefix, rule[0], ' '.join(rule[1]), last_token_pos)) def error(self, instructions, index): # Find the last line boundary @@ -138,7 +135,7 @@ class PythonParser(GenericASTBuilder): raise ParserError(err_token, err_token.offset) def typestring(self, token): - return token.type + return token.kind def nonterminal(self, nt, args): if nt in self.collect and len(args) > 1: @@ -737,7 +734,7 @@ def get_python_parser( else: p = parse3.Python3ParserSingle(debug_parser) p.version = version - # p.dumpGrammar() # debug + # p.dump_grammar() # debug return p class PythonParserSingle(PythonParser): diff --git a/uncompyle6/parsers/parse15.py b/uncompyle6/parsers/parse15.py index fb2e8b14..b1f51c5a 100644 --- a/uncompyle6/parsers/parse15.py +++ b/uncompyle6/parsers/parse15.py @@ -29,8 +29,8 @@ class Python15ParserSingle(Python21Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python15Parser() - p.checkGrammar() - p.dumpGrammar() + p.check_grammar() + p.dump_grammar() # local variables: # tab-width: 4 diff --git a/uncompyle6/parsers/parse2.py b/uncompyle6/parsers/parse2.py index cc4fbc37..0e8f571d 100644 --- a/uncompyle6/parsers/parse2.py +++ b/uncompyle6/parsers/parse2.py @@ -417,4 +417,4 @@ class Python2ParserSingle(Python2Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python2Parser() - p.checkGrammar() + p.check_grammar() diff --git a/uncompyle6/parsers/parse21.py b/uncompyle6/parsers/parse21.py index ed26af82..7e52626a 100644 --- a/uncompyle6/parsers/parse21.py +++ b/uncompyle6/parsers/parse21.py @@ -33,8 +33,8 @@ class Python21ParserSingle(Python22Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python21Parser() - p.checkGrammar() - p.dumpGrammar() + p.check_grammar() + p.dump_grammar() # local variables: # tab-width: 4 diff --git a/uncompyle6/parsers/parse22.py b/uncompyle6/parsers/parse22.py index 4ea7039e..ae4ec4c7 100644 --- a/uncompyle6/parsers/parse22.py +++ b/uncompyle6/parsers/parse22.py @@ -26,8 +26,8 @@ class Python22ParserSingle(Python23Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python22Parser() - p.checkGrammar() - p.dumpGrammar() + p.check_grammar() + p.dump_grammar() # local variables: # tab-width: 4 diff --git a/uncompyle6/parsers/parse23.py b/uncompyle6/parsers/parse23.py index b2072e16..abbd41c7 100644 --- a/uncompyle6/parsers/parse23.py +++ b/uncompyle6/parsers/parse23.py @@ -67,8 +67,8 @@ class Python23ParserSingle(Python23Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python23Parser() - p.checkGrammar() - p.dumpGrammar() + p.check_grammar() + p.dump_grammar() # local variables: # tab-width: 4 diff --git a/uncompyle6/parsers/parse24.py b/uncompyle6/parsers/parse24.py index cfba72dd..e406d11e 100644 --- a/uncompyle6/parsers/parse24.py +++ b/uncompyle6/parsers/parse24.py @@ -72,4 +72,4 @@ class Python24ParserSingle(Python24Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python24Parser() - p.checkGrammar() + p.check_grammar() diff --git a/uncompyle6/parsers/parse25.py b/uncompyle6/parsers/parse25.py index 8111ff93..93dd342b 100644 --- a/uncompyle6/parsers/parse25.py +++ b/uncompyle6/parsers/parse25.py @@ -60,4 +60,4 @@ class Python25ParserSingle(Python26Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python25Parser() - p.checkGrammar() + p.check_grammar() diff --git a/uncompyle6/parsers/parse26.py b/uncompyle6/parsers/parse26.py index 79ae264d..c68efb8c 100644 --- a/uncompyle6/parsers/parse26.py +++ b/uncompyle6/parsers/parse26.py @@ -276,7 +276,7 @@ class Python26ParserSingle(Python2Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python26Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 2.6: lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/parsers/parse27.py b/uncompyle6/parsers/parse27.py index 614b30da..953ba5ac 100644 --- a/uncompyle6/parsers/parse27.py +++ b/uncompyle6/parsers/parse27.py @@ -129,7 +129,7 @@ class Python27ParserSingle(Python27Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python27Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 2.7: lhs, rhs, tokens, right_recursive = p.checkSets() @@ -148,4 +148,5 @@ if __name__ == '__main__': for t in remain_tokens]) remain_tokens = set(remain_tokens) - opcode_set print(remain_tokens) - # p.dumpGrammar() + p.check_grammar() + p.dump_grammar() diff --git a/uncompyle6/parsers/parse3.py b/uncompyle6/parsers/parse3.py index ed5e6c34..a338f6cc 100644 --- a/uncompyle6/parsers/parse3.py +++ b/uncompyle6/parsers/parse3.py @@ -433,7 +433,7 @@ class Python3Parser(PythonParser): @staticmethod def call_fn_name(token): """Customize CALL_FUNCTION to add the number of positional arguments""" - return '%s_%i' % (token.type, token.attr) + return '%s_%i' % (token.kind, token.attr) def custom_build_class_rule(self, opname, i, token, tokens, customize): ''' @@ -449,16 +449,16 @@ class Python3Parser(PythonParser): # FIXME: I bet this can be simplified # look for next MAKE_FUNCTION for i in range(i+1, len(tokens)): - if tokens[i].type.startswith('MAKE_FUNCTION'): + if tokens[i].kind.startswith('MAKE_FUNCTION'): break - elif tokens[i].type.startswith('MAKE_CLOSURE'): + elif tokens[i].kind.startswith('MAKE_CLOSURE'): break pass assert i < len(tokens), "build_class needs to find MAKE_FUNCTION or MAKE_CLOSURE" - assert tokens[i+1].type == 'LOAD_CONST', \ + assert tokens[i+1].kind == 'LOAD_CONST', \ "build_class expecting CONST after MAKE_FUNCTION/MAKE_CLOSURE" for i in range(i, len(tokens)): - if tokens[i].type == 'CALL_FUNCTION': + if tokens[i].kind == 'CALL_FUNCTION': call_fn_tok = tokens[i] break assert call_fn_tok, "build_class custom rule needs to find CALL_FUNCTION" @@ -499,7 +499,7 @@ class Python3Parser(PythonParser): # Yes, this computation based on instruction name is a little bit hoaky. nak = ( len(opname)-len('CALL_FUNCTION') ) // 3 - token.type = self.call_fn_name(token) + token.kind = self.call_fn_name(token) uniq_param = args_kw + args_pos if self.version == 3.5 and opname.startswith('CALL_FUNCTION_VAR'): # Python 3.5 changes the stack position of *args. KW args come @@ -511,33 +511,33 @@ class Python3Parser(PythonParser): kw = '' rule = ('call_function ::= expr expr ' + ('pos_arg ' * args_pos) + - ('kwarg ' * args_kw) + kw + token.type) - self.add_unique_rule(rule, token.type, uniq_param, customize) + ('kwarg ' * args_kw) + kw + token.kind) + self.add_unique_rule(rule, token.kind, uniq_param, customize) if self.version >= 3.6 and opname == 'CALL_FUNCTION_EX_KW': rule = ('call_function36 ::= ' 'expr build_tuple_unpack_with_call build_map_unpack_with_call ' 'CALL_FUNCTION_EX_KW_1') - self.add_unique_rule(rule, token.type, uniq_param, customize) + self.add_unique_rule(rule, token.kind, uniq_param, customize) rule = 'call_function ::= call_function36' else: rule = ('call_function ::= expr ' + ('pos_arg ' * args_pos) + ('kwarg ' * args_kw) + - 'expr ' * nak + token.type) + 'expr ' * nak + token.kind) - self.add_unique_rule(rule, token.type, uniq_param, customize) + self.add_unique_rule(rule, token.kind, uniq_param, customize) if self.version >= 3.5: rule = ('async_call_function ::= expr ' + ('pos_arg ' * args_pos) + ('kwarg ' * args_kw) + - 'expr ' * nak + token.type + + 'expr ' * nak + token.kind + ' GET_AWAITABLE LOAD_CONST YIELD_FROM') - self.add_unique_rule(rule, token.type, uniq_param, customize) - self.add_unique_rule('expr ::= async_call_function', token.type, uniq_param, customize) + self.add_unique_rule(rule, token.kind, uniq_param, customize) + self.add_unique_rule('expr ::= async_call_function', token.kind, uniq_param, customize) rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d' % (('expr ' * (args_pos-1)), opname, args_pos)) - self.add_unique_rule(rule, token.type, uniq_param, customize) + self.add_unique_rule(rule, token.kind, uniq_param, customize) def add_make_function_rule(self, rule, opname, attr, customize): """Python 3.3 added a an addtional LOAD_CONST before MAKE_FUNCTION and @@ -614,7 +614,7 @@ class Python3Parser(PythonParser): call_function ::= expr CALL_METHOD """ for i, token in enumerate(tokens): - opname = token.type + opname = token.kind opname_base = opname[:opname.rfind('_')] if opname == 'PyPy': @@ -892,8 +892,6 @@ class Python3Parser(PythonParser): return def reduce_is_invalid(self, rule, ast, tokens, first, last): - if not tokens: - return lhs = rule[0] if lhs in ('augassign1', 'augassign2') and ast[0][0] == 'and': return True @@ -913,7 +911,8 @@ class Python3Parser(PythonParser): last += 1 return tokens[first].attr == tokens[last].offset elif lhs == 'while1stmt': - if tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK'): + if (0 <= last < len(tokens) + and tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK')): # jump_back should be right afer SETUP_LOOP. Test? last += 1 while last < len(tokens) and isinstance(tokens[last].offset, str): @@ -957,10 +956,10 @@ def info(args): p = Python32Parser() elif arg == '3.0': p = Python30Parser() - p.checkGrammar() + p.check_grammar() if len(sys.argv) > 1 and sys.argv[1] == 'dump': print('-' * 50) - p.dumpGrammar() + p.dump_grammar() if __name__ == '__main__': import sys diff --git a/uncompyle6/parsers/parse32.py b/uncompyle6/parsers/parse32.py index 598099bc..8d64346e 100644 --- a/uncompyle6/parsers/parse32.py +++ b/uncompyle6/parsers/parse32.py @@ -42,7 +42,7 @@ class Python32Parser(Python3Parser): def add_custom_rules(self, tokens, customize): super(Python32Parser, self).add_custom_rules(tokens, customize) for i, token in enumerate(tokens): - opname = token.type + opname = token.kind if opname.startswith('MAKE_FUNCTION_A'): args_pos, args_kw, annotate_args = token.attr # Check that there are 2 annotated params? diff --git a/uncompyle6/parsers/parse34.py b/uncompyle6/parsers/parse34.py index 080236f6..bd6be275 100644 --- a/uncompyle6/parsers/parse34.py +++ b/uncompyle6/parsers/parse34.py @@ -29,7 +29,7 @@ class Python34ParserSingle(Python34Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python34Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 3.4: lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/parsers/parse35.py b/uncompyle6/parsers/parse35.py index b8cf3723..12bd5fbd 100644 --- a/uncompyle6/parsers/parse35.py +++ b/uncompyle6/parsers/parse35.py @@ -142,7 +142,7 @@ class Python35Parser(Python34Parser): def add_custom_rules(self, tokens, customize): super(Python35Parser, self).add_custom_rules(tokens, customize) for i, token in enumerate(tokens): - opname = token.type + opname = token.kind if opname == 'BUILD_MAP_UNPACK_WITH_CALL': nargs = token.attr % 256 map_unpack_n = "map_unpack_%s" % nargs @@ -152,7 +152,7 @@ class Python35Parser(Python34Parser): self.add_unique_rule(rule, opname, token.attr, customize) call_token = tokens[i+1] if self.version == 3.5: - rule = 'call_function ::= expr unmapexpr ' + call_token.type + rule = 'call_function ::= expr unmapexpr ' + call_token.kind self.add_unique_rule(rule, opname, token.attr, customize) pass pass @@ -164,7 +164,7 @@ class Python35ParserSingle(Python35Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python35Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 3.5: lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/parsers/parse36.py b/uncompyle6/parsers/parse36.py index 95f82cc7..7a369997 100644 --- a/uncompyle6/parsers/parse36.py +++ b/uncompyle6/parsers/parse36.py @@ -36,7 +36,7 @@ class Python36Parser(Python35Parser): def add_custom_rules(self, tokens, customize): super(Python36Parser, self).add_custom_rules(tokens, customize) for i, token in enumerate(tokens): - opname = token.type + opname = token.kind if opname == 'FORMAT_VALUE': rules_str = """ @@ -64,10 +64,10 @@ class Python36Parser(Python35Parser): if opname.startswith('CALL_FUNCTION_KW'): values = 'expr ' * token.attr - rule = 'call_function ::= expr kwargs_only_36 {token.type}'.format(**locals()) - self.add_unique_rule(rule, token.type, token.attr, customize) + rule = 'call_function ::= expr kwargs_only_36 {token.kind}'.format(**locals()) + self.add_unique_rule(rule, token.kind, token.attr, customize) rule = 'kwargs_only_36 ::= {values} LOAD_CONST'.format(**locals()) - self.add_unique_rule(rule, token.type, token.attr, customize) + self.add_unique_rule(rule, token.kind, token.attr, customize) else: super(Python36Parser, self).custom_classfunc_rule(opname, token, customize) @@ -78,7 +78,7 @@ class Python36ParserSingle(Python36Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python36Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 3.6: lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/parsers/parse37.py b/uncompyle6/parsers/parse37.py index 35f52eac..2b04a113 100644 --- a/uncompyle6/parsers/parse37.py +++ b/uncompyle6/parsers/parse37.py @@ -21,7 +21,7 @@ class Python37ParserSingle(Python37Parser, PythonParserSingle): if __name__ == '__main__': # Check grammar p = Python37Parser() - p.checkGrammar() + p.check_grammar() from uncompyle6 import PYTHON_VERSION, IS_PYPY if PYTHON_VERSION == 3.7: lhs, rhs, tokens, right_recursive = p.checkSets() diff --git a/uncompyle6/scanners/scanner22.py b/uncompyle6/scanners/scanner22.py index c3effbc2..00060faa 100644 --- a/uncompyle6/scanners/scanner22.py +++ b/uncompyle6/scanners/scanner22.py @@ -30,5 +30,5 @@ class Scanner22(scan.Scanner23): def ingest22(self, co, classname=None, code_objects={}, show_asm=None): tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm) - tokens = [t for t in tokens if t.type != 'SET_LINENO'] + tokens = [t for t in tokens if t.kind != 'SET_LINENO'] return tokens, customize diff --git a/uncompyle6/scanners/scanner26.py b/uncompyle6/scanners/scanner26.py index c7297343..46838538 100755 --- a/uncompyle6/scanners/scanner26.py +++ b/uncompyle6/scanners/scanner26.py @@ -217,8 +217,8 @@ class Scanner26(scan.Scanner2): # FIXME: this is a hack to catch stuff like: # if x: continue # the "continue" is not on a new line. - if len(tokens) and tokens[-1].type == 'JUMP_BACK': - tokens[-1].type = intern('CONTINUE') + if len(tokens) and tokens[-1].kind == 'JUMP_BACK': + tokens[-1].kind = intern('CONTINUE') elif op in self.opc.JABS_OPS: pattr = repr(oparg) @@ -258,18 +258,18 @@ class Scanner26(scan.Scanner2): and self.code[offset+3] not in (self.opc.END_FINALLY, self.opc.POP_BLOCK)): if ((offset in self.linestartoffsets and - tokens[-1].type == 'JUMP_BACK') + tokens[-1].kind == 'JUMP_BACK') or offset not in self.not_continue): op_name = 'CONTINUE' else: # FIXME: this is a hack to catch stuff like: # if x: continue # the "continue" is not on a new line. - if tokens[-1].type == 'JUMP_BACK': + if tokens[-1].kind == 'JUMP_BACK': # We need 'intern' since we have # already have processed the previous # token. - tokens[-1].type = intern('CONTINUE') + tokens[-1].kind = intern('CONTINUE') elif op == self.opc.LOAD_GLOBAL: if offset in self.load_asserts: diff --git a/uncompyle6/scanners/scanner27.py b/uncompyle6/scanners/scanner27.py index 7046865a..bcd93635 100755 --- a/uncompyle6/scanners/scanner27.py +++ b/uncompyle6/scanners/scanner27.py @@ -92,9 +92,9 @@ class Scanner27(Scanner2): # the "continue" is not on a new line. n = len(tokens) if (n > 2 and - tokens[-1].type == 'JUMP_BACK' and + tokens[-1].kind == 'JUMP_BACK' and self.code[offset+3] == self.opc.END_FINALLY): - tokens[-1].type = intern('CONTINUE') + tokens[-1].kind = intern('CONTINUE') pass diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 165f9401..f99c03c5 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -400,12 +400,12 @@ class Scanner3(Scanner): # the "continue" is not on a new line. # There are other situations where we don't catch # CONTINUE as well. - if tokens[-1].type == 'JUMP_BACK' and tokens[-1].attr <= argval: - if tokens[-2].type == 'BREAK_LOOP': + if tokens[-1].kind == 'JUMP_BACK' and tokens[-1].attr <= argval: + if tokens[-2].kind == 'BREAK_LOOP': del tokens[-1] else: # intern is used because we are changing the *previous* token - tokens[-1].type = intern('CONTINUE') + tokens[-1].kind = intern('CONTINUE') if last_op_was_break and opname == 'CONTINUE': last_op_was_break = False continue diff --git a/uncompyle6/scanners/scanner36.py b/uncompyle6/scanners/scanner36.py index a9d5bd01..19e6efd4 100644 --- a/uncompyle6/scanners/scanner36.py +++ b/uncompyle6/scanners/scanner36.py @@ -27,14 +27,14 @@ class Scanner36(Scanner3): # The lowest bit of flags indicates whether the # var-keyword argument is placed at the top of the stack if t.op == self.opc.CALL_FUNCTION_EX and t.attr & 1: - t.type = 'CALL_FUNCTION_EX_KW' + t.kind = 'CALL_FUNCTION_EX_KW' pass elif t.op == self.opc.CALL_FUNCTION_KW: - t.type = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals()) + t.kind = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals()) elif t.op == self.opc.BUILD_TUPLE_UNPACK_WITH_CALL: - t.type = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr + t.kind = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr elif t.op == self.opc.BUILD_MAP_UNPACK_WITH_CALL: - t.type = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr + t.kind = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr pass return tokens, customize diff --git a/uncompyle6/scanners/tok.py b/uncompyle6/scanners/tok.py index f21848c0..d750aabc 100644 --- a/uncompyle6/scanners/tok.py +++ b/uncompyle6/scanners/tok.py @@ -8,7 +8,7 @@ from uncompyle6 import PYTHON3 if PYTHON3: intern = sys.intern -class Token(): +class Token: """ Class representing a byte-code instruction. @@ -21,7 +21,7 @@ class Token(): # pattr = argrepr def __init__(self, opname, attr=None, pattr=None, offset=-1, linestart=None, op=None, has_arg=None, opc=None): - self.type = intern(opname) + self.kind = intern(opname) self.op = op self.has_arg = has_arg self.attr = attr @@ -36,20 +36,20 @@ class Token(): def __eq__(self, o): """ '==', but it's okay if offsets and linestarts are different""" if isinstance(o, Token): - # Both are tokens: compare type and attr + # Both are tokens: compare kind and attr # It's okay if offsets are different - return (self.type == o.type) and (self.pattr == o.pattr) + return (self.kind == o.kind) and (self.pattr == o.pattr) else: - return self.type == o + return self.kind == o def __repr__(self): - return str(self.type) + return str(self.kind) # def __str__(self): # pattr = self.pattr if self.pattr is not None else '' # prefix = '\n%3d ' % self.linestart if self.linestart else (' ' * 6) # return (prefix + - # ('%9s %-18s %r' % (self.offset, self.type, pattr))) + # ('%9s %-18s %r' % (self.offset, self.kind, pattr))) def __str__(self): return self.format(line_prefix='') @@ -59,7 +59,7 @@ class Token(): prefix = '\n%s%4d ' % (line_prefix, self.linestart) else: prefix = ' ' * (6 + len(line_prefix)) - offset_opname = '%6s %-17s' % (self.offset, self.type) + offset_opname = '%6s %-17s' % (self.offset, self.kind) if not self.has_arg: return "%s%s" % (prefix, offset_opname) @@ -83,14 +83,14 @@ class Token(): pattr = self.opc.cmp_op[self.attr] # And so on. See xdis/bytecode.py get_instructions_bytes pass - elif re.search('_\d+$', self.type): + elif re.search('_\d+$', self.kind): return "%s%s%s" % (prefix, offset_opname, argstr) else: pattr = '' return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr) def __hash__(self): - return hash(self.type) + return hash(self.kind) def __getitem__(self, i): raise IndexError diff --git a/uncompyle6/semantics/check_ast.py b/uncompyle6/semantics/check_ast.py index efea085d..f2b42bde 100644 --- a/uncompyle6/semantics/check_ast.py +++ b/uncompyle6/semantics/check_ast.py @@ -9,16 +9,16 @@ before reduction and don't reduce when there is a problem. """ def checker(ast, in_loop, errors): - in_loop = in_loop or ast.type in ('while1stmt', 'whileTruestmt', + in_loop = in_loop or ast.kind in ('while1stmt', 'whileTruestmt', 'whilestmt', 'whileelsestmt', 'while1elsestmt', 'for_block') - if ast.type in ('augassign1', 'augassign2') and ast[0][0] == 'and': + if ast.kind in ('augassign1', 'augassign2') and ast[0][0] == 'and': text = str(ast) error_text = '\n# improper augmented assigment (e.g. +=, *=, ...):\n#\t' + '\n# '.join(text.split("\n")) + '\n' errors.append(error_text) for node in ast: - if not in_loop and node.type in ('continue_stmt', 'break_stmt'): + if not in_loop and node.kind in ('continue_stmt', 'break_stmt'): text = str(node) error_text = '\n# not in loop:\n#\t' + '\n# '.join(text.split("\n")) errors.append(error_text) diff --git a/uncompyle6/semantics/make_function.py b/uncompyle6/semantics/make_function.py index e62fadb8..c461aa3a 100644 --- a/uncompyle6/semantics/make_function.py +++ b/uncompyle6/semantics/make_function.py @@ -17,7 +17,7 @@ def find_all_globals(node, globs): for n in node: if isinstance(n, AST): globs = find_all_globals(n, globs) - elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'): + elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'): globs.add(n.pattr) return globs @@ -26,7 +26,7 @@ def find_globals(node, globs): for n in node: if isinstance(n, AST): globs = find_globals(n, globs) - elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL'): + elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL'): globs.add(n.pattr) return globs @@ -36,7 +36,7 @@ def find_none(node): if n not in ('return_stmt', 'return_if_stmt'): if find_none(n): return True - elif n.type == 'LOAD_CONST' and n.pattr is None: + elif n.kind == 'LOAD_CONST' and n.pattr is None: return True return False @@ -64,7 +64,7 @@ def make_function3_annotate(self, node, isLambda, nested=1, return name # MAKE_FUNCTION_... or MAKE_CLOSURE_... - assert node[-1].type.startswith('MAKE_') + assert node[-1].kind.startswith('MAKE_') annotate_tuple = None for annotate_last in range(len(node)-1, -1, -1): @@ -80,7 +80,7 @@ def make_function3_annotate(self, node, isLambda, nested=1, i = -1 j = annotate_last-1 l = -len(node) - while j >= l and node[j].type in ('annotate_arg' 'annotate_tuple'): + while j >= l and node[j].kind in ('annotate_arg' 'annotate_tuple'): annotate_args[annotate_tup[i]] = node[j][0] i -= 1 j -= 1 @@ -106,7 +106,7 @@ def make_function3_annotate(self, node, isLambda, nested=1, lambda_index = None if lambda_index and isLambda and iscode(node[lambda_index].attr): - assert node[lambda_index].type == 'LOAD_LAMBDA' + assert node[lambda_index].kind == 'LOAD_LAMBDA' code = node[lambda_index].attr else: code = codeNode.attr @@ -318,7 +318,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None): return name # MAKE_FUNCTION_... or MAKE_CLOSURE_... - assert node[-1].type.startswith('MAKE_') + assert node[-1].kind.startswith('MAKE_') args_node = node[-1] if isinstance(args_node.attr, tuple): @@ -334,7 +334,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None): lambda_index = None if lambda_index and isLambda and iscode(node[lambda_index].attr): - assert node[lambda_index].type == 'LOAD_LAMBDA' + assert node[lambda_index].kind == 'LOAD_LAMBDA' code = node[lambda_index].attr else: code = codeNode.attr @@ -450,7 +450,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None): return name # MAKE_FUNCTION_... or MAKE_CLOSURE_... - assert node[-1].type.startswith('MAKE_') + assert node[-1].kind.startswith('MAKE_') args_node = node[-1] if isinstance(args_node.attr, tuple): @@ -484,7 +484,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None): lambda_index = None if lambda_index and isLambda and iscode(node[lambda_index].attr): - assert node[lambda_index].type == 'LOAD_LAMBDA' + assert node[lambda_index].kind == 'LOAD_LAMBDA' code = node[lambda_index].attr else: code = codeNode.attr @@ -585,7 +585,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None): for n in node: if n == 'pos_arg': continue - elif self.version >= 3.4 and not (n.type in ('kwargs', 'kwarg')): + elif self.version >= 3.4 and not (n.kind in ('kwargs', 'kwarg')): continue else: self.preorder(n) diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index 09428a5d..c92ac63b 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -438,13 +438,13 @@ class SourceWalker(GenericASTTraversal, object): for i in mapping[1:]: key = key[i] pass - if key.type.startswith('CALL_FUNCTION_VAR_KW'): + if key.kind.startswith('CALL_FUNCTION_VAR_KW'): # Python 3.5 changes the stack position of *args. kwargs come # after *args whereas in earlier Pythons, *args is at the end # which simpilfiies things from our perspective. # Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX # We will just swap the order to make it look like earlier Python 3. - entry = table[key.type] + entry = table[key.kind] kwarg_pos = entry[2][1] args_pos = kwarg_pos - 1 # Put last node[args_pos] after subsequent kwargs @@ -645,6 +645,20 @@ class SourceWalker(GenericASTTraversal, object): node == AST('return_stmt', [AST('ret_expr', [NONE]), Token('RETURN_VALUE')])) + # Python 3.x can have be dead code as a result of its optimization? + # So we'll add a # at the end of the return lambda so the rest is ignored + def n_return_lambda(self, node): + if 1 <= len(node) <= 2: + self.preorder(node[0]) + self.write(' # Avoid dead code: ') + self.prune() + else: + # We can't comment out like above because there may be a trailing ')' + # that needs to be written + assert len(node) == 3 and node[2] == 'LAMBDA_MARKER' + self.preorder(node[0]) + self.prune() + def n_return_stmt(self, node): if self.params['isLambda']: self.preorder(node[0]) @@ -719,12 +733,12 @@ class SourceWalker(GenericASTTraversal, object): def n_expr(self, node): p = self.prec - if node[0].type.startswith('binary_expr'): + if node[0].kind.startswith('binary_expr'): n = node[0][-1][0] else: n = node[0] - self.prec = PRECEDENCE.get(n.type, -2) + self.prec = PRECEDENCE.get(n.kind, -2) if n == 'LOAD_CONST' and repr(n.pattr)[0] == '-': self.prec = 6 @@ -807,9 +821,9 @@ class SourceWalker(GenericASTTraversal, object): self.prune() def n_delete_subscr(self, node): - if node[-2][0] == 'build_list' and node[-2][0][-1].type.startswith('BUILD_TUPLE'): + if node[-2][0] == 'build_list' and node[-2][0][-1].kind.startswith('BUILD_TUPLE'): if node[-2][0][-1] != 'BUILD_TUPLE_0': - node[-2][0].type = 'build_tuple2' + node[-2][0].kind = 'build_tuple2' self.default(node) n_store_subscr = n_binary_subscr = n_delete_subscr @@ -818,9 +832,9 @@ class SourceWalker(GenericASTTraversal, object): def n_tryfinallystmt(self, node): if len(node[1][0]) == 1 and node[1][0][0] == 'stmt': if node[1][0][0][0] == 'trystmt': - node[1][0][0][0].type = 'tf_trystmt' + node[1][0][0][0].kind = 'tf_trystmt' if node[1][0][0][0] == 'tryelsestmt': - node[1][0][0][0].type = 'tf_tryelsestmt' + node[1][0][0][0].kind = 'tf_tryelsestmt' self.default(node) def n_exec_stmt(self, node): @@ -845,26 +859,26 @@ class SourceWalker(GenericASTTraversal, object): if len(n) == 1 == len(n[0]) and n[0] == '_stmts': n = n[0][0][0] - elif n[0].type in ('lastc_stmt', 'lastl_stmt'): + elif n[0].kind in ('lastc_stmt', 'lastl_stmt'): n = n[0][0] else: if not preprocess: self.default(node) return - if n.type in ('ifstmt', 'iflaststmt', 'iflaststmtl'): - node.type = 'ifelifstmt' - n.type = 'elifstmt' - elif n.type in ('ifelsestmtr',): - node.type = 'ifelifstmt' - n.type = 'elifelsestmtr' - elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'): - node.type = 'ifelifstmt' + if n.kind in ('ifstmt', 'iflaststmt', 'iflaststmtl'): + node.kind = 'ifelifstmt' + n.kind = 'elifstmt' + elif n.kind in ('ifelsestmtr',): + node.kind = 'ifelifstmt' + n.kind = 'elifelsestmtr' + elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'): + node.kind = 'ifelifstmt' self.n_ifelsestmt(n, preprocess=True) if n == 'ifelifstmt': - n.type = 'elifelifstmt' - elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'): - n.type = 'elifelsestmt' + n.kind = 'elifelifstmt' + elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'): + n.kind = 'elifelsestmt' if not preprocess: self.default(node) @@ -873,7 +887,7 @@ class SourceWalker(GenericASTTraversal, object): def n_ifelsestmtr(self, node): if node[2] == 'COME_FROM': return_stmts_node = node[3] - node.type = 'ifelsestmtr2' + node.kind = 'ifelsestmtr2' else: return_stmts_node = node[2] if len(return_stmts_node) != 2: @@ -904,7 +918,7 @@ class SourceWalker(GenericASTTraversal, object): for n in return_stmts_node[0]: if (n[0] == 'ifstmt' and n[0][1][0] == 'return_if_stmts'): if prev_stmt_is_if_ret: - n[0].type = 'elifstmt' + n[0].kind = 'elifstmt' prev_stmt_is_if_ret = True else: prev_stmt_is_if_ret = False @@ -924,7 +938,7 @@ class SourceWalker(GenericASTTraversal, object): def n_elifelsestmtr(self, node): if node[2] == 'COME_FROM': return_stmts_node = node[3] - node.type = 'elifelsestmtr2' + node.kind = 'elifelsestmtr2' else: return_stmts_node = node[2] @@ -944,7 +958,7 @@ class SourceWalker(GenericASTTraversal, object): self.indent_less() for n in return_stmts_node[0]: - n[0].type = 'elifstmt' + n[0].kind = 'elifstmt' self.preorder(n) self.println(self.indent, 'else:') self.indent_more() @@ -954,7 +968,7 @@ class SourceWalker(GenericASTTraversal, object): def n_import_as(self, node): store_node = node[-1][-1] - assert store_node.type.startswith('STORE_') + assert store_node.kind.startswith('STORE_') iname = node[0].pattr # import name sname = store_node.pattr # store_name if iname and iname == sname or iname.startswith(sname + '.'): @@ -1074,7 +1088,7 @@ class SourceWalker(GenericASTTraversal, object): """ p = self.prec self.prec = 27 - if node[-1].type == 'list_iter': + if node[-1].kind == 'list_iter': n = node[-1] elif self.is_pypy and node[-1] == 'JUMP_BACK': n = node[-2] @@ -1198,7 +1212,7 @@ class SourceWalker(GenericASTTraversal, object): self.write('{') if node[0] in ['LOAD_SETCOMP', 'LOAD_DICTCOMP']: self.comprehension_walk3(node, 1, 0) - elif node[0].type == 'load_closure' and self.version >= 3.0: + elif node[0].kind == 'load_closure' and self.version >= 3.0: self.setcomprehension_walk3(node, collection_index=4) else: self.comprehension_walk(node, iter_index=4) @@ -1265,7 +1279,7 @@ class SourceWalker(GenericASTTraversal, object): # Python 2.7+ starts including set_comp_body # Python 3.5+ starts including setcomp_func - assert n.type in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast + assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast assert designator, "Couldn't find designator in list/set comprehension" self.preorder(n[0]) @@ -1315,7 +1329,7 @@ class SourceWalker(GenericASTTraversal, object): n = n[3] elif n in ('list_if', 'list_if_not'): # FIXME: just a guess - if n[0].type == 'expr': + if n[0].kind == 'expr': list_if = n else: list_if = n[1] @@ -1336,7 +1350,7 @@ class SourceWalker(GenericASTTraversal, object): def n_listcomp(self, node): self.write('[') - if node[0].type == 'load_closure': + if node[0].kind == 'load_closure': self.listcomprehension_walk2(node) else: self.comprehension_walk3(node, 1, 0) @@ -1373,7 +1387,7 @@ class SourceWalker(GenericASTTraversal, object): n = n[3] elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'): # FIXME: just a guess - if n[0].type == 'expr': + if n[0].kind == 'expr': list_if = n else: list_if = n[1] @@ -1525,10 +1539,10 @@ class SourceWalker(GenericASTTraversal, object): def print_super_classes3(self, node): n = len(node)-1 - if node.type != 'expr': - assert node[n].type.startswith('CALL_FUNCTION') + if node.kind != 'expr': + assert node[n].kind.startswith('CALL_FUNCTION') for i in range(n-2, 0, -1): - if not node[i].type in ['expr', 'LOAD_CLASSNAME']: + if not node[i].kind in ['expr', 'LOAD_CLASSNAME']: break pass @@ -1568,7 +1582,7 @@ class SourceWalker(GenericASTTraversal, object): line_number = self.line_number if self.version >= 3.0 and not self.is_pypy: - if node[0].type.startswith('kvlist'): + if node[0].kind.startswith('kvlist'): # Python 3.5+ style key/value list in mapexpr kv_node = node[0] l = list(kv_node) @@ -1591,11 +1605,11 @@ class SourceWalker(GenericASTTraversal, object): i += 2 pass pass - elif len(node) > 1 and node[1].type.startswith('kvlist'): + elif len(node) > 1 and node[1].kind.startswith('kvlist'): # Python 3.0..3.4 style key/value list in mapexpr kv_node = node[1] l = list(kv_node) - if len(l) > 0 and l[0].type == 'kv3': + if len(l) > 0 and l[0].kind == 'kv3': # Python 3.2 does this kv_node = node[1][0] l = list(kv_node) @@ -1620,7 +1634,7 @@ class SourceWalker(GenericASTTraversal, object): i += 3 pass pass - elif node[-1].type.startswith('BUILD_CONST_KEY_MAP'): + elif node[-1].kind.startswith('BUILD_CONST_KEY_MAP'): # Python 3.6+ style const map keys = node[-2].pattr values = node[:-2] @@ -1645,7 +1659,7 @@ class SourceWalker(GenericASTTraversal, object): pass else: # Python 2 style kvlist - assert node[-1].type.startswith('kvlist') + assert node[-1].kind.startswith('kvlist') kv_node = node[-1] # goto kvlist first_time = True @@ -1711,7 +1725,7 @@ class SourceWalker(GenericASTTraversal, object): p = self.prec self.prec = 100 lastnode = node.pop() - lastnodetype = lastnode.type + lastnodetype = lastnode.kind # If this build list is inside a CALL_FUNCTION_VAR, # then the first * has already been printed. @@ -1781,7 +1795,7 @@ class SourceWalker(GenericASTTraversal, object): self.prune() def n_unpack(self, node): - if node[0].type.startswith('UNPACK_EX'): + if node[0].kind.startswith('UNPACK_EX'): # Python 3+ before_count, after_count = node[0].attr for i in range(before_count+1): @@ -1796,8 +1810,8 @@ class SourceWalker(GenericASTTraversal, object): self.prune() return for n in node[1:]: - if n[0].type == 'unpack': - n[0].type = 'unpack_w_parens' + if n[0].kind == 'unpack': + n[0].kind = 'unpack_w_parens' self.default(node) n_unpack_w_parens = n_unpack @@ -1806,25 +1820,25 @@ class SourceWalker(GenericASTTraversal, object): # A horrible hack for Python 3.0 .. 3.2 if 3.0 <= self.version <= 3.2 and len(node) == 2: if (node[0][0] == 'LOAD_FAST' and node[0][0].pattr == '__locals__' and - node[1][0].type == 'STORE_LOCALS'): + node[1][0].kind == 'STORE_LOCALS'): self.prune() self.default(node) def n_assign2(self, node): for n in node[-2:]: if n[0] == 'unpack': - n[0].type = 'unpack_w_parens' + n[0].kind = 'unpack_w_parens' self.default(node) def n_assign3(self, node): for n in node[-3:]: if n[0] == 'unpack': - n[0].type = 'unpack_w_parens' + n[0].kind = 'unpack_w_parens' self.default(node) def n_except_cond2(self, node): if node[-2][0] == 'unpack': - node[-2][0].type = 'unpack_w_parens' + node[-2][0].kind = 'unpack_w_parens' self.default(node) def template_engine(self, entry, startnode): @@ -1833,7 +1847,7 @@ class SourceWalker(GenericASTTraversal, object): specifications such as %c, %C, and so on. """ - # self.println("----> ", startnode.type, ', ', entry[0]) + # self.println("----> ", startnode.kind, ', ', entry[0]) fmt = entry[0] arg = 1 i = 0 @@ -1861,7 +1875,7 @@ class SourceWalker(GenericASTTraversal, object): # Used mostly on the LHS of an assignment # BUILD_TUPLE_n is pretty printed and may take care of other uses. elif typ == ',': - if (node.type in ('unpack', 'unpack_w_parens') and + if (node.kind in ('unpack', 'unpack_w_parens') and node[0].attr == 1): self.write(',') elif typ == 'c': @@ -1933,8 +1947,8 @@ class SourceWalker(GenericASTTraversal, object): key = key[i] pass - if key.type in table: - self.template_engine(table[key.type], node) + if key.kind in table: + self.template_engine(table[key.kind], node) self.prune() def customize(self, customize): @@ -2154,10 +2168,10 @@ class SourceWalker(GenericASTTraversal, object): if isLambda: for t in tokens: - if t.type == 'RETURN_END_IF': - t.type = 'RETURN_END_IF_LAMBDA' - elif t.type == 'RETURN_VALUE': - t.type = 'RETURN_VALUE_LAMBDA' + if t.kind == 'RETURN_END_IF': + t.kind = 'RETURN_END_IF_LAMBDA' + elif t.kind == 'RETURN_VALUE': + t.kind = 'RETURN_VALUE_LAMBDA' tokens.append(Token('LAMBDA_MARKER')) try: ast = python_parser.parse(self.p, tokens, customize) @@ -2174,10 +2188,10 @@ class SourceWalker(GenericASTTraversal, object): # than fight (with the grammar to not emit "return None"). if self.hide_internal: if len(tokens) >= 2 and not noneInNames: - if tokens[-1].type in ('RETURN_VALUE', 'RETURN_VALUE_LAMBDA'): + if tokens[-1].kind in ('RETURN_VALUE', 'RETURN_VALUE_LAMBDA'): # Python 3.4's classes can add a "return None" which is # invalid syntax. - if tokens[-2].type == 'LOAD_CONST': + if tokens[-2].kind == 'LOAD_CONST': if isTopLevel or tokens[-2].pattr is None: del tokens[-2:] else: diff --git a/uncompyle6/verify.py b/uncompyle6/verify.py index a353c8e2..80f02c7d 100755 --- a/uncompyle6/verify.py +++ b/uncompyle6/verify.py @@ -242,18 +242,18 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, scanner.resetTokenClass() # restore Token class targets1 = dis.findlabels(code_obj1.co_code) - tokens1 = [t for t in tokens1 if t.type != 'COME_FROM'] - tokens2 = [t for t in tokens2 if t.type != 'COME_FROM'] + tokens1 = [t for t in tokens1 if t.kind != 'COME_FROM'] + tokens2 = [t for t in tokens2 if t.kind != 'COME_FROM'] i1 = 0; i2 = 0 offset_map = {}; check_jumps = {} while i1 < len(tokens1): if i2 >= len(tokens2): if len(tokens1) == len(tokens2) + 2 \ - and tokens1[-1].type == 'RETURN_VALUE' \ - and tokens1[-2].type == 'LOAD_CONST' \ + and tokens1[-1].kind == 'RETURN_VALUE' \ + and tokens1[-2].kind == 'LOAD_CONST' \ and tokens1[-2].pattr is None \ - and tokens1[-3].type == 'RETURN_VALUE': + and tokens1[-3].kind == 'RETURN_VALUE': break else: raise CmpErrorCodeLen(name, tokens1, tokens2) @@ -265,13 +265,13 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, raise CmpErrorCode(name, tokens1[idx1].offset, tokens1[idx1], tokens2[idx2], tokens1, tokens2) - if tokens1[i1].type != tokens2[i2].type: - if tokens1[i1].type == 'LOAD_CONST' == tokens2[i2].type: + if tokens1[i1].kind != tokens2[i2].kind: + if tokens1[i1].kind == 'LOAD_CONST' == tokens2[i2].kind: i = 1 - while tokens1[i1+i].type == 'LOAD_CONST': + while tokens1[i1+i].kind == 'LOAD_CONST': i += 1 - if tokens1[i1+i].type.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \ - and i == int(tokens1[i1+i].type.split('_')[-1]): + if tokens1[i1+i].kind.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \ + and i == int(tokens1[i1+i].kind.split('_')[-1]): t = tuple([ elem.pattr for elem in tokens1[i1:i1+i] ]) if t != tokens2[i2].pattr: raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1], @@ -279,60 +279,60 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, i1 += i + 1 i2 += 1 continue - elif i == 2 and tokens1[i1+i].type == 'ROT_TWO' and tokens2[i2+1].type == 'UNPACK_SEQUENCE_2': + elif i == 2 and tokens1[i1+i].kind == 'ROT_TWO' and tokens2[i2+1].kind == 'UNPACK_SEQUENCE_2': i1 += 3 i2 += 2 continue - elif i == 2 and tokens1[i1+i].type in BIN_OP_FUNCS: - f = BIN_OP_FUNCS[tokens1[i1+i].type] + elif i == 2 and tokens1[i1+i].kind in BIN_OP_FUNCS: + f = BIN_OP_FUNCS[tokens1[i1+i].kind] if f(tokens1[i1].pattr, tokens1[i1+1].pattr) == tokens2[i2].pattr: i1 += 3 i2 += 1 continue - elif tokens1[i1].type == 'UNARY_NOT': - if tokens2[i2].type == 'POP_JUMP_IF_TRUE': - if tokens1[i1+1].type == 'POP_JUMP_IF_FALSE': + elif tokens1[i1].kind == 'UNARY_NOT': + if tokens2[i2].kind == 'POP_JUMP_IF_TRUE': + if tokens1[i1+1].kind == 'POP_JUMP_IF_FALSE': i1 += 2 i2 += 1 continue - elif tokens2[i2].type == 'POP_JUMP_IF_FALSE': - if tokens1[i1+1].type == 'POP_JUMP_IF_TRUE': + elif tokens2[i2].kind == 'POP_JUMP_IF_FALSE': + if tokens1[i1+1].kind == 'POP_JUMP_IF_TRUE': i1 += 2 i2 += 1 continue - elif tokens1[i1].type in ('JUMP_FORWARD', 'JUMP_BACK') \ - and tokens1[i1-1].type == 'RETURN_VALUE' \ - and tokens2[i2-1].type in ('RETURN_VALUE', 'RETURN_END_IF') \ + elif tokens1[i1].kind in ('JUMP_FORWARD', 'JUMP_BACK') \ + and tokens1[i1-1].kind == 'RETURN_VALUE' \ + and tokens2[i2-1].kind in ('RETURN_VALUE', 'RETURN_END_IF') \ and int(tokens1[i1].offset) not in targets1: i1 += 1 continue - elif tokens1[i1].type == 'JUMP_FORWARD' and tokens2[i2].type == 'JUMP_BACK' \ - and tokens1[i1+1].type == 'JUMP_BACK' and tokens2[i2+1].type == 'JUMP_BACK' \ + elif tokens1[i1].kind == 'JUMP_FORWARD' and tokens2[i2].kind == 'JUMP_BACK' \ + and tokens1[i1+1].kind == 'JUMP_BACK' and tokens2[i2+1].kind == 'JUMP_BACK' \ and int(tokens1[i1].pattr) == int(tokens1[i1].offset) + 3: if int(tokens1[i1].pattr) == int(tokens1[i1+1].offset): i1 += 2 i2 += 2 continue - elif tokens1[i1].type == 'LOAD_NAME' and tokens2[i2].type == 'LOAD_CONST' \ + elif tokens1[i1].kind == 'LOAD_NAME' and tokens2[i2].kind == 'LOAD_CONST' \ and tokens1[i1].pattr == 'None' and tokens2[i2].pattr is None: pass - elif tokens1[i1].type == 'LOAD_GLOBAL' and tokens2[i2].type == 'LOAD_NAME' \ + elif tokens1[i1].kind == 'LOAD_GLOBAL' and tokens2[i2].kind == 'LOAD_NAME' \ and tokens1[i1].pattr == tokens2[i2].pattr: pass - elif tokens1[i1].type == 'LOAD_ASSERT' and tokens2[i2].type == 'LOAD_NAME' \ + elif tokens1[i1].kind == 'LOAD_ASSERT' and tokens2[i2].kind == 'LOAD_NAME' \ and tokens1[i1].pattr == tokens2[i2].pattr: pass - elif (tokens1[i1].type == 'RETURN_VALUE' and - tokens2[i2].type == 'RETURN_END_IF'): + elif (tokens1[i1].kind == 'RETURN_VALUE' and + tokens2[i2].kind == 'RETURN_END_IF'): pass - elif (tokens1[i1].type == 'BUILD_TUPLE_0' and + elif (tokens1[i1].kind == 'BUILD_TUPLE_0' and tokens2[i2].pattr == ()): pass else: raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1], tokens2[i2], tokens1, tokens2) - elif tokens1[i1].type in JUMP_OPS and tokens1[i1].pattr != tokens2[i2].pattr: - if tokens1[i1].type == 'JUMP_BACK': + elif tokens1[i1].kind in JUMP_OPS and tokens1[i1].pattr != tokens2[i2].pattr: + if tokens1[i1].kind == 'JUMP_BACK': dest1 = int(tokens1[i1].pattr) dest2 = int(tokens2[i2].pattr) if offset_map[dest1] != dest2: @@ -387,28 +387,28 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, class Token(scanner.Token): """Token class with changed semantics for 'cmp()'.""" def __cmp__(self, o): - t = self.type # shortcut - if t == 'BUILD_TUPLE_0' and o.type == 'LOAD_CONST' and o.pattr == (): + t = self.kind # shortcut + if t == 'BUILD_TUPLE_0' and o.kind == 'LOAD_CONST' and o.pattr == (): return 0 - if t == 'COME_FROM' == o.type: + if t == 'COME_FROM' == o.kind: return 0 - if t == 'PRINT_ITEM_CONT' and o.type == 'PRINT_ITEM': + if t == 'PRINT_ITEM_CONT' and o.kind == 'PRINT_ITEM': return 0 - if t == 'RETURN_VALUE' and o.type == 'RETURN_END_IF': + if t == 'RETURN_VALUE' and o.kind == 'RETURN_END_IF': return 0 - if t == 'JUMP_IF_FALSE_OR_POP' and o.type == 'POP_JUMP_IF_FALSE': + if t == 'JUMP_IF_FALSE_OR_POP' and o.kind == 'POP_JUMP_IF_FALSE': return 0 if JUMP_OPS and t in JUMP_OPS: # ignore offset - return t == o.type - return (t == o.type) or self.pattr == o.pattr + return t == o.kind + return (t == o.kind) or self.pattr == o.pattr def __repr__(self): - return '%s %s (%s)' % (str(self.type), str(self.attr), + return '%s %s (%s)' % (str(self.kind), str(self.attr), repr(self.pattr)) def __str__(self): - return '%s\t%-17s %r' % (self.offset, self.type, self.pattr) + return '%s\t%-17s %r' % (self.offset, self.kind, self.pattr) def compare_code_with_srcfile(pyc_filename, src_filename, weak_verify=False): """Compare a .pyc with a source code file.""" @@ -442,4 +442,4 @@ if __name__ == '__main__': t2 = Token('LOAD_CONST', -421, 'code_object _expandLang', 55) print(repr(t1)) print(repr(t2)) - print(t1.type == t2.type, t1.attr == t2.attr) + print(t1.kind == t2.kind, t1.attr == t2.attr) diff --git a/uncompyle6/version.py b/uncompyle6/version.py index f6cb5567..83bf3740 100644 --- a/uncompyle6/version.py +++ b/uncompyle6/version.py @@ -1,3 +1,3 @@ # This file is suitable for sourcing inside bash as # well as importing into Python -VERSION='2.12.0' +VERSION='2.13.0'