You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Start Python3 slices. Split off token routine.
This commit is contained in:
@@ -23,7 +23,7 @@ check:
|
||||
check-2.7: check-bytecode check-2.7-ok
|
||||
|
||||
#: Run working tests from Python 3.4
|
||||
check-3.4: check-bytecode
|
||||
check-3.4: check-bytecode check-bytecode-3.4
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4
|
||||
|
||||
#: Check deparsing only, but from a different Python version
|
||||
@@ -46,6 +46,10 @@ check-bytecode-2.7:
|
||||
check-bytecode-3.2:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.2
|
||||
|
||||
#: Check deparsing Python 3.2
|
||||
check-bytecode-3.4:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.2
|
||||
|
||||
#: short tests for bytecodes only for this version of Python
|
||||
check-native-short:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
||||
|
2
test/simple-source/slice/01-slice.py
Normal file
2
test/simple-source/slice/01-slice.py
Normal file
@@ -0,0 +1,2 @@
|
||||
ary = [1,2,3]
|
||||
ary[:2]
|
@@ -69,6 +69,9 @@ def_op('BINARY_TRUE_DIVIDE', 27)
|
||||
def_op('INPLACE_FLOOR_DIVIDE', 28)
|
||||
def_op('INPLACE_TRUE_DIVIDE', 29)
|
||||
|
||||
# Gone from Python 3 are
|
||||
# Python 2's SLICE+0 .. SLICE+3
|
||||
|
||||
def_op('STORE_MAP', 54)
|
||||
def_op('INPLACE_ADD', 55)
|
||||
def_op('INPLACE_SUBTRACT', 56)
|
||||
|
@@ -75,6 +75,9 @@ def_op('BINARY_TRUE_DIVIDE', 27)
|
||||
def_op('INPLACE_FLOOR_DIVIDE', 28)
|
||||
def_op('INPLACE_TRUE_DIVIDE', 29)
|
||||
|
||||
# Gone from Python 3 are
|
||||
# Python 2's SLICE+0 .. SLICE+3
|
||||
|
||||
def_op('STORE_MAP', 54)
|
||||
def_op('INPLACE_ADD', 55)
|
||||
def_op('INPLACE_SUBTRACT', 56)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import sys
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.scanners.tok import NoneToken
|
||||
|
||||
if PYTHON3:
|
||||
intern = sys.intern
|
||||
@@ -13,6 +14,11 @@ class AST(UserList):
|
||||
self.type = intern(type)
|
||||
UserList.__init__(self, kids)
|
||||
|
||||
def isNone(self):
|
||||
"""An AST None token. We can't use regular list comparisons
|
||||
because AST token offsets might be different"""
|
||||
return len(self.data) == 1 and self.data[0] == NoneToken
|
||||
|
||||
def __getslice__(self, low, high): return self.data[low:high]
|
||||
|
||||
def __eq__(self, o):
|
||||
|
@@ -35,7 +35,6 @@ class Python3Parser(PythonParser):
|
||||
self.new_rules.add(rule)
|
||||
self.addRule(rule, nop_func)
|
||||
customize[opname] = count
|
||||
# print("XXXX" , rule)
|
||||
pass
|
||||
return
|
||||
|
||||
@@ -578,14 +577,7 @@ class Python3Parser(PythonParser):
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
slice0 ::= expr SLICE+0
|
||||
slice0 ::= expr DUP_TOP SLICE+0
|
||||
slice1 ::= expr expr SLICE+1
|
||||
slice1 ::= expr expr DUP_TOPX_2 SLICE+1
|
||||
slice2 ::= expr expr SLICE+2
|
||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||
slice3 ::= expr expr expr SLICE+3
|
||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
|
@@ -15,10 +15,10 @@ for later use in deparsing.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
# FIXME: DRY
|
||||
if PYTHON3:
|
||||
@@ -27,54 +27,14 @@ if PYTHON3:
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
def long(l): l
|
||||
else:
|
||||
L65536 = long(65536) # NOQA
|
||||
|
||||
from uncompyle6.opcodes import opcode_25, opcode_26, opcode_27, opcode_32, opcode_34
|
||||
|
||||
|
||||
class Token:
|
||||
"""
|
||||
Class representing a byte-code token.
|
||||
|
||||
A byte-code token is equivalent to Python 3's dis.instruction or
|
||||
the contents of one line as output by dis.dis().
|
||||
"""
|
||||
# FIXME: match Python 3.4's terms:
|
||||
# type_ should be opname
|
||||
# linestart = starts_line
|
||||
# attr = argval
|
||||
# pattr = argrepr
|
||||
def __init__(self, type_, attr=None, pattr=None, offset=-1, linestart=None):
|
||||
self.type = intern(type_)
|
||||
self.attr = attr
|
||||
self.pattr = pattr
|
||||
self.offset = offset
|
||||
self.linestart = linestart
|
||||
|
||||
def __cmp__(self, o):
|
||||
if isinstance(o, Token):
|
||||
# both are tokens: compare type and pattr
|
||||
return cmp(self.type, o.type) or cmp(self.pattr, o.pattr)
|
||||
else:
|
||||
return cmp(self.type, o)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.type)
|
||||
|
||||
def __str__(self):
|
||||
pattr = self.pattr if self.pattr is not None else ''
|
||||
if self.linestart:
|
||||
return '\n%4d %6s\t%-17s %r' % (self.linestart, self.offset, self.type, pattr)
|
||||
else:
|
||||
return ' %6s\t%-17s %r' % (self.offset, self.type, pattr)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.type)
|
||||
|
||||
def __getitem__(self, i):
|
||||
raise IndexError
|
||||
|
||||
class Code:
|
||||
'''
|
||||
Class for representing code-objects.
|
||||
|
51
uncompyle6/scanners/tok.py
Normal file
51
uncompyle6/scanners/tok.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import sys
|
||||
from uncompyle6 import PYTHON3
|
||||
|
||||
if PYTHON3:
|
||||
intern = sys.intern
|
||||
|
||||
class Token:
|
||||
"""
|
||||
Class representing a byte-code token.
|
||||
|
||||
A byte-code token is equivalent to Python 3's dis.instruction or
|
||||
the contents of one line as output by dis.dis().
|
||||
"""
|
||||
# FIXME: match Python 3.4's terms:
|
||||
# type_ should be opname
|
||||
# linestart = starts_line
|
||||
# attr = argval
|
||||
# pattr = argrepr
|
||||
def __init__(self, type_, attr=None, pattr=None, offset=-1, linestart=None):
|
||||
self.type = intern(type_)
|
||||
self.attr = attr
|
||||
self.pattr = pattr
|
||||
self.offset = offset
|
||||
self.linestart = linestart
|
||||
|
||||
def __eq__(self, o):
|
||||
""" '==', but it's okay if offsets and linestarts are different"""
|
||||
if isinstance(o, Token):
|
||||
# Both are tokens: compare type and attr
|
||||
# It's okay if offsets are different
|
||||
return (self.type == o.type) and (self.pattr == o.pattr)
|
||||
else:
|
||||
return self.type == o
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.type)
|
||||
|
||||
def __str__(self):
|
||||
pattr = self.pattr if self.pattr is not None else ''
|
||||
if self.linestart:
|
||||
return '\n%4d %6s\t%-17s %r' % (self.linestart, self.offset, self.type, pattr)
|
||||
else:
|
||||
return ' %6s\t%-17s %r' % (self.offset, self.type, pattr)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.type)
|
||||
|
||||
def __getitem__(self, i):
|
||||
raise IndexError
|
||||
|
||||
NoneToken = Token('LOAD_CONST', offset=-1, attr=None, pattr=None)
|
@@ -69,8 +69,9 @@ from uncompyle6 import PYTHON3
|
||||
from uncompyle6.parser import get_python_parser
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6.parsers.spark import GenericASTTraversal
|
||||
from uncompyle6.scanner import Code, get_scanner
|
||||
from uncompyle6.scanners.tok import Token, NoneToken
|
||||
import uncompyle6.parser as python_parser
|
||||
from uncompyle6.scanner import Token, Code, get_scanner
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
@@ -91,7 +92,7 @@ RETURN_LOCALS = AST('return_stmt',
|
||||
Token('RETURN_VALUE')])
|
||||
|
||||
|
||||
NONE = AST('expr', [ Token('LOAD_CONST', pattr=None) ] )
|
||||
NONE = AST('expr', [ NoneToken ] )
|
||||
|
||||
RETURN_NONE = AST('stmt',
|
||||
[ AST('return_stmt',
|
||||
@@ -666,13 +667,13 @@ class Walker(GenericASTTraversal, object):
|
||||
def n_buildslice3(self, node):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
if node[0] != NONE:
|
||||
if not node[0].isNone():
|
||||
self.preorder(node[0])
|
||||
self.write(':')
|
||||
if node[1] != NONE:
|
||||
if not node[1].isNone():
|
||||
self.preorder(node[1])
|
||||
self.write(':')
|
||||
if node[2] != NONE:
|
||||
if not node[2].isNone():
|
||||
self.preorder(node[2])
|
||||
self.prec = p
|
||||
self.prune() # stop recursing
|
||||
@@ -680,21 +681,14 @@ class Walker(GenericASTTraversal, object):
|
||||
def n_buildslice2(self, node):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
if node[0] != NONE:
|
||||
if not node[0].isNone():
|
||||
self.preorder(node[0])
|
||||
self.write(':')
|
||||
if node[1] != NONE:
|
||||
if not node[1].isNone():
|
||||
self.preorder(node[1])
|
||||
self.prec = p
|
||||
self.prune() # stop recursing
|
||||
|
||||
# def n_l_stmts(self, node):
|
||||
# if node[0] == '_stmts':
|
||||
# if len(node[0]) >= 2 and node[0][1] == 'stmt':
|
||||
# if node[0][-1][0] == 'continue_stmt':
|
||||
# del node[0][-1]
|
||||
# self.default(node)
|
||||
|
||||
def n_expr(self, node):
|
||||
p = self.prec
|
||||
if node[0].type.startswith('binary_expr'):
|
||||
@@ -781,7 +775,7 @@ class Walker(GenericASTTraversal, object):
|
||||
"""
|
||||
self.write(self.indent, 'exec ')
|
||||
self.preorder(node[0])
|
||||
if node[1][0] != NONE:
|
||||
if not node[1][0].isNone():
|
||||
sep = ' in '
|
||||
for subnode in node[1]:
|
||||
self.write(sep); sep = ", "
|
||||
|
Reference in New Issue
Block a user