Merge branch 'python-3.3-to-3.5' into python-2.4

This commit is contained in:
rocky
2022-05-14 10:43:42 -04:00
39 changed files with 2160 additions and 357 deletions

View File

@@ -38,7 +38,7 @@ jobs:
command: | # Use pip to install dependengcies
sudo easy_install click==7.1.2
# Until next release use github xdis
sudo pip install git+git://github.com/rocky/python-xdis.git@python-2.4-to-2.7#egg=xdis
sudo pip install git+https://github.com/rocky/python-xdis@python-2.34to-2.7#egg=xdis
sudo pip install -e .
sudo pip install -r requirements-dev.txt

View File

@@ -54,6 +54,41 @@ def test_grammar():
expect_lhs.add("except_handler_else")
expect_lhs.add("kwarg")
expect_lhs.add("load_genexpr")
unused_rhs = unused_rhs.union(
set(
"""
except_pop_except generator_exp
""".split()
)
)
if PYTHON_VERSION_TRIPLE < (3, 7):
expect_lhs.add("annotate_arg")
expect_lhs.add("annotate_tuple")
unused_rhs.add("mkfunc_annotate")
unused_rhs.add("dict_comp")
unused_rhs.add("classdefdeco1")
unused_rhs.add("tryelsestmtl")
if PYTHON_VERSION_TRIPLE >= (3, 5):
expect_right_recursive.add(
(("l_stmts", ("lastl_stmt", "come_froms", "l_stmts")))
)
pass
pass
if PYTHON_VERSION_TRIPLE >= (3, 7):
expect_lhs.add("set_for")
unused_rhs.add("set_iter")
pass
pass
# FIXME
if PYTHON_VERSION_TRIPLE < (3, 8):
assert expect_lhs == set(lhs)
assert unused_rhs == set(rhs)
>>>>>>> python-3.3-to-3.5
assert expect_right_recursive == right_recursive
expect_dup_rhs = frozenset(

Binary file not shown.

BIN
test/bytecode_1.0/stat.pyc Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,8 @@
from test_support import *
print '4. Built-in functions'
print 'test_b1'
unload('test_b1')
import test_b1
print 'test_b2'
unload('test_b2')
import test_b2

View File

@@ -0,0 +1,48 @@
# These are from 3.6 test_coroutines.py
async def run_gen(f):
return (10 async for i in f)
async def run_list(f):
return [i async for i in f()]
# async def run_dict():
# return {i + 1 async for i in [10, 20]}
async def iterate(gen):
res = []
async for i in gen:
res.append(i)
return res
def test_comp_5(f):
# async def f(it):
# for i in it:
# yield i
async def run_list():
return [i
for
pair in
([10, 20])
async for i
in f
]
async def test2(x, buffer, f):
with x:
async for i in f:
if i:
break
else:
buffer()
buffer()
async def test3(x, buffer, f):
with x:
async for i in f:
if i:
continue
buffer()
else:
buffer.append()
buffer()

View File

@@ -0,0 +1,721 @@
# Long lists pose a slowdown in uncompiling.
"This program is self-checking!"
# Try an empty list to check that long-matching detection doesn't mess that up.
# In theory this should work even though we put cap on short lists which
# is checked below.
x = []
assert len(x) == 0 and isinstance(x, list)
# Try an short list to check that long-matching detection doesn't mess that up.
# This is a more general situation of the above.
x = [1, 1, 1]
# Until we have better "and" rules (which we have
# around, but not in decompyle3 or uncompyle6 yet)
# avoid 3-term "and"s
assert len(x) == 3
assert isinstance(x, list) and all(x)
# fmt: off
# Try a long list. This should not be slow
# as it has been in the past.
x = [
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
]
assert all(x)
assert len(x) == 300 and isinstance(x, list)
# Python before 2.7 doesn't have sets literal
# # Try a long set. This should not be slow
# # as it has been in the past.
# x = {
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
# }
# assert x == {1} and isinstance(x, set)
# Try using variables rather than constants
a = 1
# First, a list
x = [
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
a, a, a, a, a, a, a, a, a, a,
]
assert all(x)
assert len(x) == 300 and isinstance(x, list)
# Next, a set
# x = {
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# a, a, a, a, a, a, a, a, a, a,
# }
# assert x == {1} and isinstance(x, set)
# Check some dictionary keys.
# Ensure that in dictionary we produce quoted strings
x = {
"b": 1,
"c": 2,
"e": 3,
"g": 6,
"h": 7,
"j": 9,
"k": 11,
"return": 12,
}
assert sorted(x.keys()) == ["b", "c", "e", "g", "h", "j", "k", "return"]
# Ensure that in dictionary we produce integers, not strings
x = {1: 2, 3: 4}
assert tuple(x.keys()) == (1, 3)
# Try a long dictionary.
# This should not be slow as it has been in the past
values = {
"value1": x, # Note this is LOAD_NAME
"value2": 2 + 1, # Constant should be folded into "LOAD_CONST"
"value3": 3 + 1,
"value4": 4 + 1,
"value5": 5 + 1,
"value6": 6 + 1,
"value7": 7 + 1,
"value8": 8 + 1,
"value9": 9 + 1,
"value10": 10 + 1,
"value11": 11 + 1,
"value12": 12 + 1,
"value13": 13 + 1,
"value14": 14 + 1,
"value15": 15 + 1,
"value16": 16 + 1,
"value17": 17 + 1,
"value18": 18 + 1,
"value19": 19 + 1,
"value20": 20 + 1,
"value21": 21 + 1,
"value22": 22 + 1,
"value23": 23 + 1,
"value24": 24 + 1,
"value25": 25 + 1,
"value26": 26 + 1,
"value27": 27 + 1,
"value28": 28 + 1,
"value29": 29 + 1,
"value30": 30 + 1,
"value31": 31 + 1,
"value32": 32 + 1,
"value33": 33 + 1,
"value34": 34 + 1,
"value35": 35 + 1,
"value36": 36 + 1,
"value37": 37 + 1,
"value38": 38 + 1,
"value39": 39 + 1,
"value40": 40 + 1,
"value41": 41 + 1,
"value42": 42 + 1,
"value43": 43 + 1,
"value44": 44 + 1,
"value45": 45 + 1,
"value46": 46 + 1,
"value47": 47 + 1,
"value48": 48 + 1,
"value49": 49 + 1,
"value50": 50 + 1,
"value51": 51 + 1,
"value52": 52 + 1,
"value53": 53 + 1,
"value54": 54 + 1,
"value55": 55 + 1,
"value56": 56 + 1,
"value57": 57 + 1,
"value58": 58 + 1,
"value59": 59 + 1,
"value60": 60 + 1,
"value61": 61 + 1,
"value62": 62 + 1,
"value63": 63 + 1,
"value64": 64 + 1,
"value65": 65 + 1,
"value66": 66 + 1,
"value67": 67 + 1,
"value68": 68 + 1,
"value69": 69 + 1,
"value70": 70 + 1,
"value71": 71 + 1,
"value72": 72 + 1,
"value73": 73 + 1,
"value74": 74 + 1,
"value75": 75 + 1,
"value76": 76 + 1,
"value77": 77 + 1,
"value78": 78 + 1,
"value79": 79 + 1,
"value80": 80 + 1,
"value81": 81 + 1,
"value82": 82 + 1,
"value83": 83 + 1,
"value84": 84 + 1,
"value85": 85 + 1,
"value86": 86 + 1,
"value87": 87 + 1,
"value88": 88 + 1,
"value89": 89 + 1,
"value90": 90 + 1,
"value91": 91 + 1,
"value92": 92 + 1,
"value93": 93 + 1,
"value94": 94 + 1,
"value95": 95 + 1,
"value96": 96 + 1,
"value97": 97 + 1,
"value98": 98 + 1,
"value99": 99 + 1,
"value100": 100 + 1,
"value101": 101 + 1,
"value102": 102 + 1,
"value103": 103 + 1,
"value104": 104 + 1,
"value105": 105 + 1,
"value106": 106 + 1,
"value107": 107 + 1,
"value108": 108 + 1,
"value109": 109 + 1,
"value110": 110 + 1,
"value111": 111 + 1,
"value112": 112 + 1,
"value113": 113 + 1,
"value114": 114 + 1,
"value115": 115 + 1,
"value116": 116 + 1,
"value117": 117 + 1,
"value118": 118 + 1,
"value119": 119 + 1,
"value120": 120 + 1,
"value121": 121 + 1,
"value122": 122 + 1,
"value123": 123 + 1,
"value124": 124 + 1,
"value125": 125 + 1,
"value126": 126 + 1,
"value127": 127 + 1,
"value128": 128 + 1,
"value129": 129 + 1,
"value130": 130 + 1,
"value131": 131 + 1,
"value132": 132 + 1,
"value133": 133 + 1,
"value134": 134 + 1,
"value135": 135 + 1,
"value136": 136 + 1,
"value137": 137 + 1,
"value138": 138 + 1,
"value139": 139 + 1,
"value140": 140 + 1,
"value141": 141 + 1,
"value142": 142 + 1,
"value143": 143 + 1,
"value144": 144 + 1,
"value145": 145 + 1,
"value146": 146 + 1,
"value147": 147 + 1,
"value148": 148 + 1,
"value149": 149 + 1,
"value150": 150 + 1,
"value151": 151 + 1,
"value152": 152 + 1,
"value153": 153 + 1,
"value154": 154 + 1,
"value155": 155 + 1,
"value156": 156 + 1,
"value157": 157 + 1,
"value158": 158 + 1,
"value159": 159 + 1,
"value160": 160 + 1,
"value161": 161 + 1,
"value162": 162 + 1,
"value163": 163 + 1,
"value164": 164 + 1,
"value165": 165 + 1,
"value166": 166 + 1,
"value167": 167 + 1,
"value168": 168 + 1,
"value169": 169 + 1,
"value170": 170 + 1,
"value171": 171 + 1,
"value172": 172 + 1,
"value173": 173 + 1,
"value174": 174 + 1,
"value175": 175 + 1,
"value176": 176 + 1,
"value177": 177 + 1,
"value178": 178 + 1,
"value179": 179 + 1,
"value180": 180 + 1,
"value181": 181 + 1,
"value182": 182 + 1,
"value183": 183 + 1,
"value184": 184 + 1,
"value185": 185 + 1,
"value186": 186 + 1,
"value187": 187 + 1,
"value188": 188 + 1,
"value189": 189 + 1,
"value190": 190 + 1,
"value191": 191 + 1,
"value192": 192 + 1,
"value193": 193 + 1,
"value194": 194 + 1,
"value195": 195 + 1,
"value196": 196 + 1,
"value197": 197 + 1,
"value198": 198 + 1,
"value199": 199 + 1,
"value200": 200 + 1,
"value201": 201 + 1,
"value202": 202 + 1,
"value203": 203 + 1,
"value204": 204 + 1,
"value205": 205 + 1,
"value206": 206 + 1,
"value207": 207 + 1,
"value208": 208 + 1,
"value209": 209 + 1,
"value210": 210 + 1,
"value211": 211 + 1,
"value212": 212 + 1,
"value213": 213 + 1,
"value214": 214 + 1,
"value215": 215 + 1,
"value216": 216 + 1,
"value217": 217 + 1,
"value218": 218 + 1,
"value219": 219 + 1,
"value220": 220 + 1,
"value221": 221 + 1,
"value222": 222 + 1,
"value223": 223 + 1,
"value224": 224 + 1,
"value225": 225 + 1,
"value226": 226 + 1,
"value227": 227 + 1,
"value228": 228 + 1,
"value229": 229 + 1,
"value230": 230 + 1,
"value231": 231 + 1,
"value232": 232 + 1,
"value233": 233 + 1,
"value234": 234 + 1,
"value235": 235 + 1,
"value236": 236 + 1,
"value237": 237 + 1,
"value238": 238 + 1,
"value239": 239 + 1,
"value240": 240 + 1,
"value241": 241 + 1,
"value242": 242 + 1,
"value243": 243 + 1,
"value244": 244 + 1,
"value245": 245 + 1,
"value246": 246 + 1,
"value247": 247 + 1,
"value248": 248 + 1,
"value249": 249 + 1,
"value250": 250 + 1,
"value251": 251 + 1,
"value252": 252 + 1,
"value253": 253 + 1,
"value254": 254 + 1,
"value255": 255 + 1,
"value256": 256 + 1,
"value257": 257 + 1,
"value258": 258 + 1,
"value259": 259 + 1,
"value260": 260 + 1,
"value261": 261 + 1,
"value262": 262 + 1,
"value263": 263 + 1,
"value264": 264 + 1,
"value265": 265 + 1,
"value266": 266 + 1,
"value267": 267 + 1,
"value268": 268 + 1,
"value269": 269 + 1,
"value270": 270 + 1,
"value271": 271 + 1,
"value272": 272 + 1,
"value273": 273 + 1,
"value274": 274 + 1,
"value275": 275 + 1,
"value276": 276 + 1,
"value277": 277 + 1,
"value278": 278 + 1,
"value279": 279 + 1,
"value280": 280 + 1,
"value281": 281 + 1,
"value282": 282 + 1,
"value283": 283 + 1,
"value284": 284 + 1,
"value285": 285 + 1,
"value286": 286 + 1,
"value287": 287 + 1,
"value288": 288 + 1,
"value289": 289 + 1,
"value290": 290 + 1,
"value291": 291 + 1,
"value292": 292 + 1,
"value293": 293 + 1,
"value294": 294 + 1,
"value295": 295 + 1,
"value296": 296 + 1,
"value297": 297 + 1,
"value298": 298 + 1,
"value299": 299 + 1,
"value300": 300 + 1,
"value301": 301 + 1,
"value302": 302 + 1,
"value303": 303 + 1,
"value304": 304 + 1,
"value305": 305 + 1,
"value306": 306 + 1,
"value307": 307 + 1,
"value308": 308 + 1,
"value309": 309 + 1,
"value310": 310 + 1,
"value311": 311 + 1,
"value312": 312 + 1,
"value313": 313 + 1,
"value314": 314 + 1,
"value315": 315 + 1,
"value316": 316 + 1,
"value317": 317 + 1,
"value318": 318 + 1,
"value319": 319 + 1,
"value320": 320 + 1,
"value321": 321 + 1,
"value322": 322 + 1,
"value323": 323 + 1,
"value324": 324 + 1,
"value325": 325 + 1,
"value326": 326 + 1,
"value327": 327 + 1,
"value328": 328 + 1,
"value329": 329 + 1,
"value330": 330 + 1,
"value331": 331 + 1,
"value332": 332 + 1,
"value333": 333 + 1,
"value334": 334 + 1,
"value335": 335 + 1,
"value336": 336 + 1,
"value337": 337 + 1,
"value338": 338 + 1,
"value339": 339 + 1,
"value340": 340 + 1,
"value341": 341 + 1,
"value342": 342 + 1,
"value343": 343 + 1,
"value344": 344 + 1,
"value345": 345 + 1,
"value346": 346 + 1,
"value347": 347 + 1,
"value348": 348 + 1,
"value349": 349 + 1,
"value350": 350 + 1,
"value351": 351 + 1,
"value352": 352 + 1,
"value353": 353 + 1,
"value354": 354 + 1,
"value355": 355 + 1,
"value356": 356 + 1,
"value357": 357 + 1,
"value358": 358 + 1,
"value359": 359 + 1,
"value360": 360 + 1,
"value361": 361 + 1,
"value362": 362 + 1,
"value363": 363 + 1,
"value364": 364 + 1,
"value365": 365 + 1,
"value366": 366 + 1,
"value367": 367 + 1,
"value368": 368 + 1,
"value369": 369 + 1,
"value370": 370 + 1,
"value371": 371 + 1,
"value372": 372 + 1,
"value373": 373 + 1,
"value374": 374 + 1,
"value375": 375 + 1,
"value376": 376 + 1,
"value377": 377 + 1,
"value378": 378 + 1,
"value379": 379 + 1,
"value380": 380 + 1,
"value381": 381 + 1,
"value382": 382 + 1,
"value383": 383 + 1,
"value384": 384 + 1,
"value385": 385 + 1,
"value386": 386 + 1,
"value387": 387 + 1,
"value388": 388 + 1,
"value389": 389 + 1,
"value390": 390 + 1,
"value391": 391 + 1,
"value392": 392 + 1,
"value393": 393 + 1,
"value394": 394 + 1,
"value395": 395 + 1,
"value396": 396 + 1,
"value397": 397 + 1,
"value398": 398 + 1,
"value399": 399 + 1,
"value400": 400 + 1,
"value401": 401 + 1,
"value402": 402 + 1,
"value403": 403 + 1,
"value404": 404 + 1,
"value405": 405 + 1,
"value406": 406 + 1,
"value407": 407 + 1,
"value408": 408 + 1,
"value409": 409 + 1,
"value410": 410 + 1,
"value411": 411 + 1,
"value412": 412 + 1,
"value413": 413 + 1,
"value414": 414 + 1,
"value415": 415 + 1,
"value416": 416 + 1,
"value417": 417 + 1,
"value418": 418 + 1,
"value419": 419 + 1,
"value420": 420 + 1,
"value421": 421 + 1,
"value422": 422 + 1,
"value423": 423 + 1,
"value424": 424 + 1,
"value425": 425 + 1,
"value426": 426 + 1,
"value427": 427 + 1,
"value428": 428 + 1,
"value429": 429 + 1,
"value430": 430 + 1,
"value431": 431 + 1,
"value432": 432 + 1,
"value433": 433 + 1,
"value434": 434 + 1,
"value435": 435 + 1,
"value436": 436 + 1,
"value437": 437 + 1,
"value438": 438 + 1,
"value439": 439 + 1,
"value440": 440 + 1,
"value441": 441 + 1,
"value442": 442 + 1,
"value443": 443 + 1,
"value444": 444 + 1,
"value445": 445 + 1,
"value446": 446 + 1,
"value447": 447 + 1,
"value448": 448 + 1,
"value449": 449 + 1,
"value450": 450 + 1,
"value451": 451 + 1,
"value452": 452 + 1,
"value453": 453 + 1,
"value454": 454 + 1,
"value455": 455 + 1,
"value456": 456 + 1,
"value457": 457 + 1,
"value458": 458 + 1,
"value459": 459 + 1,
"value460": 460 + 1,
"value461": 461 + 1,
"value462": 462 + 1,
"value463": 463 + 1,
"value464": 464 + 1,
"value465": 465 + 1,
"value466": 466 + 1,
"value467": 467 + 1,
"value468": 468 + 1,
"value469": 469 + 1,
"value470": 470 + 1,
"value471": 471 + 1,
"value472": 472 + 1,
"value473": 473 + 1,
"value474": 474 + 1,
"value475": 475 + 1,
"value476": 476 + 1,
"value477": 477 + 1,
"value478": 478 + 1,
"value479": 479 + 1,
"value480": 480 + 1,
"value481": 481 + 1,
"value482": 482 + 1,
"value483": 483 + 1,
"value484": 484 + 1,
"value485": 485 + 1,
"value486": 486 + 1,
"value487": 487 + 1,
"value488": 488 + 1,
"value489": 489 + 1,
"value490": 490 + 1,
"value491": 491 + 1,
"value492": 492 + 1,
"value493": 493 + 1,
"value494": 494 + 1,
"value495": 495 + 1,
"value496": 496 + 1,
"value497": 497 + 1,
"value498": 498 + 1,
"value499": 499 + 1,
"value500": 500 + 1,
"value501": 501 + 1,
"value502": 502 + 1,
}
assert len(values.values()) == 502
# Try a long dictionary that fails because we have a binary op.
# We can get a expr32 grouping speedup
# which is slower than if this were all constant.
# The above was not implemented at the time this test was written.
values = {
"value1": a + 1, # This is a binary op not consant
"value2": 2,
"value3": 3,
"value4": 4,
"value5": 5,
"value6": 6,
"value7": 7,
"value8": 8,
"value9": 9,
"value10": 10,
"value11": 11,
"value12": 12,
"value13": 13,
"value14": 14,
"value15": 15,
"value16": 16,
"value17": 17,
"value18": 18,
"value19": 19,
"value20": 20,
"value21": 21,
"value22": 22,
"value23": 23,
"value24": 24,
"value25": 25,
"value26": 26,
"value27": 27,
"value28": 28,
"value29": 29,
"value30": 30,
"value31": 31,
"value32": 32,
"value33": 33,
}
assert len(values.values()) == 33

View File

@@ -13,7 +13,6 @@ SKIP_TESTS=(
[test_aifc.py]=1 #
[test_argparse.py]=1 # it fails on its own
[test_asdl_parser.py]=1 # it fails on its own
[test_asyncgen.py]=1 # parse error
[test_atexit.py]=1 # The atexit test looks for specific comments in error lines
[test_baseexception.py]=1 # test assert error
@@ -40,9 +39,9 @@ SKIP_TESTS=(
[test_collections.py]= # it fails on its own
[test_compile.py]=1 # Code introspects on co_consts in a non-decompilable way
[test_concurrent_futures.py]=1 # Takes long
[test_contextlib.py]=1 # test assertion failure
[test_contextlib_async.py]=1 # Investigate
[test_coroutines.py]=1 # parse error
# [test_coroutines.py]=1 # FIXME: async parse error
[test_curses.py]=1 # Parse error
[test_ctypes.py]=1 # it fails on its own

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2016, 2818-2021 by Rocky Bernstein
# Copyright (c) 2015-2016, 2818-2022 by Rocky Bernstein
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
@@ -60,8 +60,17 @@ def disco_loop(disasm, queue, real_out):
while len(queue) > 0:
co = queue.popleft()
if co.co_name != "<module>":
real_out.write("\n# %s line %d of %s\n" %
(co.co_name, co.co_firstlineno, co.co_filename))
if hasattr(co, "co_firstlineno"):
real_out.write(
"\n# %s line %d of %s\n"
% (co.co_name, co.co_firstlineno, co.co_filename)
)
else:
real_out.write(
"\n# %s %s\n"
% (co.co_name, co.co_filename)
)
print(
tokens, customize = disasm(co)
for t in tokens:
if iscode(t.pattr):

View File

@@ -75,6 +75,8 @@ class PythonParser(GenericASTBuilder):
"come_from_loops",
# Python 3.7+
"importlist37",
# Python < 1.4
"args_store",
]
self.collect = frozenset(nt_list)

View File

@@ -1,18 +1,33 @@
# Copyright (c) 2018 Rocky Bernstein
# Copyright (c) 2018, 2022 Rocky Bernstein
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
from uncompyle6.parser import PythonParserSingle
from uncompyle6.parser import PythonParserSingle, nop_func
from uncompyle6.parsers.parse15 import Python15Parser
class Python14Parser(Python15Parser):
def p_misc14(self, args):
"""
# Not much here yet, but will probably need to add UNARY_CALL, BINARY_CALL,
# RAISE_EXCEPTION, BUILD_FUNCTION, UNPACK_ARG, UNPACK_VARARG, LOAD_LOCAL,
# SET_FUNC_ARGS, and RESERVE_FAST
# Not much here yet, but will probably need to add UNARY_CALL,
# LOAD_LOCAL, SET_FUNC_ARGS
args ::= RESERVE_FAST UNPACK_ARG args_store
args_store ::= STORE_FAST*
call ::= expr tuple BINARY_CALL
expr ::= call
kv ::= DUP_TOP expr ROT_TWO LOAD_CONST STORE_SUBSCR
mkfunc ::= LOAD_CODE BUILD_FUNCTION
print_expr_stmt ::= expr PRINT_EXPR
raise_stmt2 ::= expr expr RAISE_EXCEPTION
star_args ::= RESERVE_FAST UNPACK_VARARG_1 args_store
stmt ::= args
stmt ::= print_expr_stmt
stmt ::= star_args
stmt ::= varargs
varargs ::= RESERVE_FAST UNPACK_VARARG_0 args_store
# Not strictly needed, but tidies up output
stmt ::= doc_junk
doc_junk ::= LOAD_CONST POP_TOP
@@ -42,7 +57,14 @@ class Python14Parser(Python15Parser):
jb_pop
POP_BLOCK else_suitel COME_FROM
""")
self.check_reduce['doc_junk'] = 'tokens'
self.check_reduce["doc_junk"] = "tokens"
for i, token in enumerate(tokens):
opname = token.kind
opname_base = opname[:opname.rfind("_")]
if opname_base == "UNPACK_VARARG":
if token.attr > 1:
self.addRule("star_args ::= RESERVE_FAST %s args_store" % opname, nop_func)
def reduce_is_invalid(self, rule, ast, tokens, first, last):

View File

@@ -427,6 +427,7 @@ class Python26Parser(Python2Parser):
# since the operand can be a relative offset rather than
# an absolute offset.
setup_inst = self.insts[self.offset2inst_index[tokens[first].offset]]
last = min(len(tokens)-1, last)
if self.version <= (2, 2) and tokens[last] == "COME_FROM":
last += 1
return tokens[last-1].off2int() > setup_inst.argval

View File

@@ -65,7 +65,9 @@ class Python3Parser(PythonParser):
list_comp ::= BUILD_LIST_0 list_iter
lc_body ::= expr LIST_APPEND
list_for ::= expr FOR_ITER store list_iter jb_or_c
list_for ::= expr_or_arg
FOR_ITER
store list_iter jb_or_c
# This is seen in PyPy, but possibly it appears on other Python 3?
list_if ::= expr jmp_false list_iter COME_FROM
@@ -77,10 +79,10 @@ class Python3Parser(PythonParser):
stmt ::= set_comp_func
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
set_comp_func ::= BUILD_SET_0 LOAD_ARG FOR_ITER store comp_iter
JUMP_BACK RETURN_VALUE RETURN_LAST
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
set_comp_func ::= BUILD_SET_0 LOAD_ARG FOR_ITER store comp_iter
COME_FROM JUMP_BACK RETURN_VALUE RETURN_LAST
comp_body ::= dict_comp_body
@@ -88,6 +90,8 @@ class Python3Parser(PythonParser):
dict_comp_body ::= expr expr MAP_ADD
set_comp_body ::= expr SET_ADD
expr_or_arg ::= LOAD_ARG
expr_or_arg ::= expr
# See also common Python p_list_comprehension
"""
@@ -95,7 +99,7 @@ class Python3Parser(PythonParser):
""""
expr ::= dict_comp
stmt ::= dict_comp_func
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
dict_comp_func ::= BUILD_MAP_0 LOAD_ARG FOR_ITER store
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
comp_iter ::= comp_if_not
@@ -1058,9 +1062,8 @@ class Python3Parser(PythonParser):
# A PyPy speciality - DRY with parse3
self.addRule(
"""
expr ::= attribute
attribute ::= expr LOOKUP_METHOD
""",
attribute ::= expr LOOKUP_METHOD
""",
nop_func,
)
custom_ops_processed.add(opname)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2020 Rocky Bernstein
# Copyright (c) 2016-2020, 2022 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -28,7 +28,13 @@ class Python36Parser(Python35Parser):
self.customized = {}
def p_36misc(self, args):
def p_36_jump(self, args):
"""
# Zero or one COME_FROM
# And/or expressions have this
come_from_opt ::= COME_FROM?
"""
def p_36_misc(self, args):
"""sstmt ::= sstmt RETURN_LAST
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
@@ -66,6 +72,33 @@ class Python36Parser(Python35Parser):
if_exp ::= expr jmp_false expr jf_cf expr COME_FROM
async_for_stmt36 ::= SETUP_LOOP expr
GET_AITER
LOAD_CONST YIELD_FROM
SETUP_EXCEPT GET_ANEXT LOAD_CONST
YIELD_FROM
store
POP_BLOCK JUMP_BACK COME_FROM_EXCEPT DUP_TOP
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY for_block
COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP POP_BLOCK
COME_FROM_LOOP
async_for_stmt36 ::= SETUP_LOOP expr
GET_AITER
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
YIELD_FROM
store
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY
COME_FROM
for_block
COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP POP_BLOCK
COME_FROM_LOOP
async_for_stmt ::= SETUP_LOOP expr
GET_AITER
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
@@ -79,20 +112,7 @@ class Python36Parser(Python35Parser):
COME_FROM_LOOP
stmt ::= async_for_stmt36
async_for_stmt36 ::= SETUP_LOOP expr
GET_AITER
LOAD_CONST YIELD_FROM
SETUP_EXCEPT GET_ANEXT LOAD_CONST
YIELD_FROM
store
POP_BLOCK JUMP_BACK COME_FROM_EXCEPT DUP_TOP
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY for_block
COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT
POP_TOP POP_BLOCK
COME_FROM_LOOP
stmt ::= async_forelse_stmt36
async_forelse_stmt ::= SETUP_LOOP expr
GET_AITER
@@ -106,6 +126,19 @@ class Python36Parser(Python35Parser):
for_block POP_BLOCK
else_suite COME_FROM_LOOP
async_forelse_stmt36 ::= SETUP_LOOP expr
GET_AITER
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
YIELD_FROM
store
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
for_block _come_froms
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
POP_BLOCK
else_suite COME_FROM_LOOP
# Adds a COME_FROM_ASYNC_WITH over 3.5
# FIXME: remove corresponding rule for 3.5?
@@ -158,11 +191,13 @@ class Python36Parser(Python35Parser):
compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD
stmt ::= genexpr_func
genexpr_func ::= LOAD_ARG _come_froms FOR_ITER store comp_iter JUMP_BACK
"""
# Some of this is duplicated from parse37. Eventually we'll probably rebase from
# that and then we can remove this.
def p_37conditionals(self, args):
def p_36_conditionals(self, args):
"""
expr ::= if_exp37
if_exp37 ::= expr expr jf_cfs expr COME_FROM
@@ -202,7 +237,18 @@ class Python36Parser(Python35Parser):
else_suite COME_FROM_LOOP
""")
self.check_reduce['call_kw'] = 'AST'
self.check_reduce["call_kw"] = "AST"
# Opcode names in the custom_ops_processed set have rules that get added
# unconditionally and the rules are constant. So they need to be done
# only once and if we see the opcode a second we don't have to consider
# adding more rules.
#
# Note: BUILD_TUPLE_UNPACK_WITH_CALL gets considered by
# default because it starts with BUILD. So we'll set to ignore it from
# the start.
custom_ops_processed = set()
for i, token in enumerate(tokens):
opname = token.kind
@@ -287,6 +333,175 @@ class Python36Parser(Python35Parser):
self.addRule(rule, nop_func)
rule = ('starred ::= %s %s' % ('expr ' * v, opname))
self.addRule(rule, nop_func)
elif opname == "GET_AITER":
self.addRule(
"""
expr ::= generator_exp_async
generator_exp_async ::= load_genexpr LOAD_STR MAKE_FUNCTION_0 expr
GET_AITER LOAD_CONST YIELD_FROM CALL_FUNCTION_1
stmt ::= genexpr_func_async
func_async_prefix ::= _come_froms
LOAD_CONST YIELD_FROM
SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
genexpr_func_async ::= LOAD_ARG func_async_prefix
store func_async_middle comp_iter
JUMP_BACK
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
expr ::= list_comp_async
list_comp_async ::= LOAD_LISTCOMP LOAD_STR MAKE_FUNCTION_0
expr GET_AITER
LOAD_CONST YIELD_FROM CALL_FUNCTION_1
GET_AWAITABLE LOAD_CONST
YIELD_FROM
expr ::= list_comp_async
list_afor2 ::= func_async_prefix
store func_async_middle list_iter
JUMP_BACK
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_iter ::= list_afor
""",
nop_func,
)
elif opname == "GET_AITER":
self.add_unique_doc_rules("get_aiter ::= expr GET_AITER", customize)
if not {"MAKE_FUNCTION_0", "MAKE_FUNCTION_CLOSURE"} in self.seen_ops:
self.addRule(
"""
expr ::= dict_comp_async
expr ::= generator_exp_async
expr ::= list_comp_async
dict_comp_async ::= LOAD_DICTCOMP
LOAD_STR
MAKE_FUNCTION_0
get_aiter
CALL_FUNCTION_1
dict_comp_async ::= BUILD_MAP_0 LOAD_ARG
dict_comp_async
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
func_async_prefix ::= _come_froms SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
generator_exp_async ::= load_genexpr LOAD_STR MAKE_FUNCTION_0
get_aiter CALL_FUNCTION_1
genexpr_func_async ::= LOAD_ARG func_async_prefix
store func_async_middle comp_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
# FIXME this is a workaround for probalby some bug in the Earley parser
# if we use get_aiter, then list_comp_async doesn't match, and I don't
# understand why.
expr_get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_afor2 ::= func_async_prefix
store func_async_middle list_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
list_comp_async ::= LOAD_LISTCOMP LOAD_STR MAKE_FUNCTION_0
expr_get_aiter CALL_FUNCTION_1
GET_AWAITABLE LOAD_CONST
YIELD_FROM
list_iter ::= list_afor
set_comp_async ::= LOAD_SETCOMP
LOAD_STR
MAKE_FUNCTION_0
get_aiter
CALL_FUNCTION_1
set_comp_async ::= LOAD_CLOSURE
BUILD_TUPLE_1
LOAD_SETCOMP
LOAD_STR MAKE_FUNCTION_CLOSURE
get_aiter CALL_FUNCTION_1
await
""",
nop_func,
)
custom_ops_processed.add(opname)
self.addRule(
"""
dict_comp_async ::= BUILD_MAP_0 LOAD_ARG
dict_comp_async
expr ::= dict_comp_async
expr ::= generator_exp_async
expr ::= list_comp_async
expr ::= set_comp_async
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY _come_froms
get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
list_iter ::= list_afor
set_afor ::= get_aiter set_afor2
set_iter ::= set_afor
set_iter ::= set_for
set_comp_async ::= BUILD_SET_0 LOAD_ARG
set_comp_async
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "GET_ANEXT":
self.addRule(
"""
func_async_prefix ::= _come_froms SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
func_async_prefix ::= _come_froms SETUP_FINALLY GET_ANEXT LOAD_CONST YIELD_FROM POP_BLOCK
func_async_prefix ::= _come_froms
LOAD_CONST YIELD_FROM
SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
func_async_middle ::= JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
list_afor2 ::= func_async_prefix
store list_iter
JUMP_BACK COME_FROM_FINALLY
END_ASYNC_FOR
list_afor2 ::= func_async_prefix
store func_async_middle list_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == 'SETUP_ANNOTATIONS':
# 3.6 Variable Annotations PEP 526
# This seems to come before STORE_ANNOTATION, and doesn't

View File

@@ -137,7 +137,7 @@ class Python37Parser(Python37BaseParser):
returns ::= _stmts return
stmt ::= genexpr_func
genexpr_func ::= LOAD_FAST _come_froms FOR_ITER store comp_iter JUMP_BACK
genexpr_func ::= LOAD_ARG _come_froms FOR_ITER store comp_iter JUMP_BACK
"""
pass
@@ -509,7 +509,7 @@ class Python37Parser(Python37BaseParser):
_ifstmts_jump ::= c_stmts_opt JUMP_ABSOLUTE JUMP_FORWARD _come_froms
"""
def p_35on(self, args):
def p_35_on(self, args):
"""
while1elsestmt ::= setup_loop l_stmts JUMP_BACK
@@ -567,7 +567,7 @@ class Python37Parser(Python37BaseParser):
iflaststmt ::= testexpr c_stmts_opt JUMP_FORWARD
"""
def p_37async(self, args):
def p_37_async(self, args):
"""
stmt ::= async_for_stmt37
stmt ::= async_for_stmt
@@ -589,6 +589,7 @@ class Python37Parser(Python37BaseParser):
# Order of LOAD_CONST YIELD_FROM is switched from 3.6 to save a LOAD_CONST
async_for_stmt37 ::= setup_loop expr
GET_AITER
_come_froms
SETUP_EXCEPT GET_ANEXT
LOAD_CONST YIELD_FROM
store
@@ -601,6 +602,7 @@ class Python37Parser(Python37BaseParser):
async_forelse_stmt ::= setup_loop expr
GET_AITER
_come_froms
SETUP_EXCEPT GET_ANEXT LOAD_CONST
YIELD_FROM
store
@@ -613,7 +615,7 @@ class Python37Parser(Python37BaseParser):
else_suite COME_FROM_LOOP
"""
def p_37chained(self, args):
def p_37_chained(self, args):
"""
testtrue ::= compare_chained37
testfalse ::= compare_chained37_false
@@ -658,7 +660,7 @@ class Python37Parser(Python37BaseParser):
compare_chained2a_false_37
"""
def p_37conditionals(self, args):
def p_37_conditionals(self, args):
"""
expr ::= if_exp37
if_exp37 ::= expr expr jf_cfs expr COME_FROM
@@ -711,7 +713,16 @@ class Python37Parser(Python37BaseParser):
list_comp ::= BUILD_LIST_0 list_iter
lc_body ::= expr LIST_APPEND
list_for ::= expr for_iter store list_iter jb_or_c
list_for ::= expr_or_arg
for_iter
store list_iter
jb_or_c _come_froms
set_for ::= expr_or_arg
for_iter
store set_iter
jb_or_c _come_froms
# This is seen in PyPy, but possibly it appears on other Python 3?
list_if ::= expr jmp_false list_iter COME_FROM
@@ -722,10 +733,10 @@ class Python37Parser(Python37BaseParser):
stmt ::= set_comp_func
set_comp_func ::= BUILD_SET_0 LOAD_FAST for_iter store comp_iter
set_comp_func ::= BUILD_SET_0 LOAD_ARG for_iter store comp_iter
JUMP_BACK RETURN_VALUE RETURN_LAST
set_comp_func ::= BUILD_SET_0 LOAD_FAST for_iter store comp_iter
set_comp_func ::= BUILD_SET_0 LOAD_ARG for_iter store comp_iter
COME_FROM JUMP_BACK RETURN_VALUE RETURN_LAST
comp_body ::= dict_comp_body
@@ -740,13 +751,16 @@ class Python37Parser(Python37BaseParser):
""""
expr ::= dict_comp
stmt ::= dict_comp_func
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST for_iter store
dict_comp_func ::= BUILD_MAP_0 LOAD_ARG for_iter store
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
comp_iter ::= comp_if
comp_iter ::= comp_if_not
comp_if_not ::= expr jmp_true comp_iter
comp_iter ::= comp_body
expr_or_arg ::= LOAD_ARG
expr_or_arg ::= expr
"""
def p_expr3(self, args):
@@ -1192,7 +1206,7 @@ class Python37Parser(Python37BaseParser):
compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD
"""
def p_37misc(self, args):
def p_37_misc(self, args):
"""
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
@@ -1209,6 +1223,16 @@ class Python37Parser(Python37BaseParser):
super(Python37Parser, self).customize_grammar_rules(tokens, customize)
self.check_reduce["call_kw"] = "AST"
# Opcode names in the custom_ops_processed set have rules that get added
# unconditionally and the rules are constant. So they need to be done
# only once and if we see the opcode a second we don't have to consider
# adding more rules.
#
# Note: BUILD_TUPLE_UNPACK_WITH_CALL gets considered by
# default because it starts with BUILD. So we'll set to ignore it from
# the start.
custom_ops_processed = set()
for i, token in enumerate(tokens):
opname = token.kind
@@ -1309,6 +1333,211 @@ class Python37Parser(Python37BaseParser):
self.addRule(rule, nop_func)
rule = "starred ::= %s %s" % ("expr " * v, opname)
self.addRule(rule, nop_func)
elif opname == "GET_AITER":
self.add_unique_doc_rules("get_aiter ::= expr GET_AITER", customize)
if not {"MAKE_FUNCTION_0", "MAKE_FUNCTION_CLOSURE"} in self.seen_ops:
self.addRule(
"""
expr ::= dict_comp_async
expr ::= generator_exp_async
expr ::= list_comp_async
dict_comp_async ::= LOAD_DICTCOMP
LOAD_STR
MAKE_FUNCTION_0
get_aiter
CALL_FUNCTION_1
dict_comp_async ::= BUILD_MAP_0 LOAD_ARG
dict_comp_async
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
func_async_prefix ::= _come_froms SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
generator_exp_async ::= load_genexpr LOAD_STR MAKE_FUNCTION_0
get_aiter CALL_FUNCTION_1
genexpr_func_async ::= LOAD_ARG func_async_prefix
store func_async_middle comp_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
# FIXME this is a workaround for probalby some bug in the Earley parser
# if we use get_aiter, then list_comp_async doesn't match, and I don't
# understand why.
expr_get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_afor2 ::= func_async_prefix
store func_async_middle list_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
list_comp_async ::= LOAD_LISTCOMP LOAD_STR MAKE_FUNCTION_0
expr_get_aiter CALL_FUNCTION_1
GET_AWAITABLE LOAD_CONST
YIELD_FROM
list_iter ::= list_afor
set_comp_async ::= LOAD_SETCOMP
LOAD_STR
MAKE_FUNCTION_0
get_aiter
CALL_FUNCTION_1
set_comp_async ::= LOAD_CLOSURE
BUILD_TUPLE_1
LOAD_SETCOMP
LOAD_STR MAKE_FUNCTION_CLOSURE
get_aiter CALL_FUNCTION_1
await
""",
nop_func,
)
custom_ops_processed.add(opname)
self.addRule(
"""
dict_comp_async ::= BUILD_MAP_0 LOAD_ARG
dict_comp_async
expr ::= dict_comp_async
expr ::= generator_exp_async
expr ::= list_comp_async
expr ::= set_comp_async
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY _come_froms
# async_iter ::= block_break SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
list_iter ::= list_afor
set_afor ::= get_aiter set_afor2
set_iter ::= set_afor
set_iter ::= set_for
set_comp_async ::= BUILD_SET_0 LOAD_ARG
set_comp_async
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "GET_ANEXT":
self.addRule(
"""
expr ::= genexpr_func_async
expr ::= BUILD_MAP_0 genexpr_func_async
expr ::= list_comp_async
dict_comp_async ::= BUILD_MAP_0 genexpr_func_async
async_iter ::= _come_froms
SETUP_EXCEPT GET_ANEXT LOAD_CONST YIELD_FROM
store_async_iter_end ::= store
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
# We use store_async_iter_end to make comp_iter come out in the right position,
# (after the logical "store")
genexpr_func_async ::= LOAD_ARG async_iter
store_async_iter_end
comp_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
list_afor2 ::= async_iter
store
list_iter
JUMP_LOOP
COME_FROM_FINALLY
END_ASYNC_FOR
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
set_afor2 ::= async_iter
store
func_async_middle
set_iter
JUMP_LOOP COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
set_afor2 ::= expr_or_arg
set_iter_async
set_comp_async ::= BUILD_SET_0 set_afor2
set_iter_async ::= async_iter
store
set_iter
JUMP_LOOP
_come_froms
END_ASYNC_FOR
return_expr_lambda ::= genexpr_func_async
LOAD_CONST RETURN_VALUE
RETURN_VALUE_LAMBDA
return_expr_lambda ::= BUILD_SET_0 genexpr_func_async
RETURN_VALUE_LAMBDA LAMBDA_MARKER
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "GET_AWAITABLE":
rule_str = """
await_expr ::= expr GET_AWAITABLE LOAD_CONST YIELD_FROM
expr ::= await_expr
"""
self.add_unique_doc_rules(rule_str, customize)
elif opname == "GET_ITER":
self.addRule(
"""
expr ::= get_iter
get_iter ::= expr GET_ITER
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "LOAD_ASSERT":
if "PyPy" in customize:
rules_str = """
stmt ::= JUMP_IF_NOT_DEBUG stmts COME_FROM
"""
self.add_unique_doc_rules(rules_str, customize)
elif opname == "LOAD_ATTR":
self.addRule(
"""
expr ::= attribute
attribute ::= expr LOAD_ATTR
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "SETUP_WITH":
rules_str = """
with ::= expr SETUP_WITH POP_TOP suite_stmts_opt COME_FROM_WITH
@@ -1349,14 +1578,18 @@ class Python37Parser(Python37BaseParser):
if frozenset(("GET_AWAITABLE", "YIELD_FROM")).issubset(self.seen_ops):
rule = (
"async_call ::= expr "
"""
await ::= GET_AWAITABLE LOAD_CONST YIELD_FROM
await_expr ::= expr await
expr ::= await_expr
async_call ::= expr """
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ "expr " * nak
+ token.kind
+ " GET_AWAITABLE LOAD_CONST YIELD_FROM"
)
self.add_unique_rule(rule, token.kind, uniq_param, customize)
self.add_unique_doc_rules(rule, customize)
self.add_unique_rule(
"expr ::= async_call", token.kind, uniq_param, customize
)

View File

@@ -367,7 +367,7 @@ class Python37BaseParser(PythonParser):
if opname == "BUILD_MAP_n":
# PyPy sometimes has no count. Sigh.
rule = (
"dict_comp_func ::= BUILD_MAP_n LOAD_FAST for_iter store "
"dict_comp_func ::= BUILD_MAP_n LOAD_ARG for_iter store "
"comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST"
)
self.add_unique_rule(rule, "dict_comp_func", 1, customize)
@@ -644,7 +644,7 @@ class Python37BaseParser(PythonParser):
func_async_middle ::= POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
END_FINALLY COME_FROM
genexpr_func_async ::= LOAD_FAST func_async_prefix
genexpr_func_async ::= LOAD_ARG func_async_prefix
store func_async_middle comp_iter
JUMP_BACK COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
@@ -660,7 +660,7 @@ class Python37BaseParser(PythonParser):
store func_async_middle list_iter
JUMP_BACK COME_FROM
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
list_comp_async ::= BUILD_LIST_0 LOAD_FAST list_afor2
list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
get_aiter ::= expr GET_AITER
list_afor ::= get_aiter list_afor2
list_iter ::= list_afor

View File

@@ -74,7 +74,7 @@ class Python38Parser(Python37Parser):
COME_FROM_FINALLY
END_ASYNC_FOR
genexpr_func_async ::= LOAD_FAST func_async_prefix
genexpr_func_async ::= LOAD_ARG func_async_prefix
store comp_iter
JUMP_BACK COME_FROM_FINALLY
END_ASYNC_FOR
@@ -137,7 +137,6 @@ class Python38Parser(Python37Parser):
# while1elsestmt ::= l_stmts JUMP_BACK
whileTruestmt ::= _come_froms l_stmts JUMP_BACK POP_BLOCK
while1stmt ::= _come_froms l_stmts COME_FROM_LOOP
while1stmt ::= _come_froms l_stmts COME_FROM JUMP_BACK COME_FROM_LOOP
whileTruestmt38 ::= _come_froms l_stmts JUMP_BACK
whileTruestmt38 ::= _come_froms l_stmts JUMP_BACK COME_FROM_EXCEPT_CLAUSE

View File

@@ -228,10 +228,7 @@ class Scanner(object):
# Offset: lineno pairs, only for offsets which start line.
# Locally we use list for more convenient iteration using indices
if self.version > (1, 4):
linestarts = list(self.opc.findlinestarts(code_obj))
else:
linestarts = [[0, 1]]
linestarts = list(self.opc.findlinestarts(code_obj))
self.linestarts = dict(linestarts)
# 'List-map' which shows line number of current op and offset of

View File

@@ -24,9 +24,9 @@ use in deparsing.
import sys
import uncompyle6.scanners.scanner2 as scan
from uncompyle6.scanner import L65536
# bytecode verification, verify(), uses JUMP_OPs from here
from xdis import iscode
from xdis.opcodes import opcode_26
from xdis.bytecode import _get_const_info
@@ -71,7 +71,7 @@ class Scanner26(scan.Scanner2):
bytecode = self.build_instructions(co)
# show_asm = 'after'
if show_asm in ('both', 'before'):
if show_asm in ("both", "before"):
for instr in bytecode.get_instructions(co):
print(instr.disassemble())
@@ -80,7 +80,7 @@ class Scanner26(scan.Scanner2):
customize = {}
if self.is_pypy:
customize['PyPy'] = 1
customize["PyPy"] = 0
codelen = len(self.code)
@@ -92,6 +92,7 @@ class Scanner26(scan.Scanner2):
# 'LOAD_ASSERT' is used in assert statements.
self.load_asserts = set()
for i in self.op_range(0, codelen):
# We need to detect the difference between:
# raise AssertionError
# and
@@ -114,9 +115,9 @@ class Scanner26(scan.Scanner2):
# Distinguish "print ..." from "print ...,"
if self.code[last_stmt] == self.opc.PRINT_ITEM:
if self.code[i] == self.opc.PRINT_ITEM:
replace[i] = 'PRINT_ITEM_CONT'
replace[i] = "PRINT_ITEM_CONT"
elif self.code[i] == self.opc.PRINT_NEWLINE:
replace[i] = 'PRINT_NEWLINE_CONT'
replace[i] = "PRINT_NEWLINE_CONT"
last_stmt = i
i = self.next_stmt[i]
@@ -172,7 +173,7 @@ class Scanner26(scan.Scanner2):
collection_type = op_name.split("_")[1]
next_tokens = self.bound_collection_from_tokens(
tokens, t, i, "CONST_%s" % collection_type
tokens, t, len(tokens), "CONST_%s" % collection_type
)
if next_tokens is not None:
tokens = next_tokens
@@ -180,29 +181,25 @@ class Scanner26(scan.Scanner2):
if op in self.opc.CONST_OPS:
const = co.co_consts[oparg]
# We can't use inspect.iscode() because we may be
# using a different version of Python than the
# one that this was byte-compiled on. So the code
# types may mismatch.
if hasattr(const, 'co_name'):
if iscode(const):
oparg = const
if const.co_name == '<lambda>':
assert op_name == 'LOAD_CONST'
op_name = 'LOAD_LAMBDA'
if const.co_name == "<lambda>":
assert op_name == "LOAD_CONST"
op_name = "LOAD_LAMBDA"
elif const.co_name == self.genexpr_name:
op_name = 'LOAD_GENEXPR'
elif const.co_name == '<dictcomp>':
op_name = 'LOAD_DICTCOMP'
elif const.co_name == '<setcomp>':
op_name = 'LOAD_SETCOMP'
op_name = "LOAD_GENEXPR"
elif const.co_name == "<dictcomp>":
op_name = "LOAD_DICTCOMP"
elif const.co_name == "<setcomp>":
op_name = "LOAD_SETCOMP"
else:
op_name = "LOAD_CODE"
# verify uses 'pattr' for comparison, since 'attr'
# verify() uses 'pattr' for comparison, since 'attr'
# now holds Code(const) and thus can not be used
# for comparison (todo: think about changing this)
# pattr = 'code_object @ 0x%x %s->%s' % \
# pattr = 'code_object @ 0x%x %s->%s' %\
# (id(const), const.co_filename, const.co_name)
pattr = '<code_object ' + const.co_name + '>'
pattr = "<code_object " + const.co_name + ">"
else:
if oparg < len(co.co_consts):
argval, _ = _get_const_info(oparg, co.co_consts)
@@ -226,11 +223,15 @@ class Scanner26(scan.Scanner2):
elif op in self.opc.JABS_OPS:
pattr = repr(oparg)
elif op in self.opc.LOCAL_OPS:
pattr = varnames[oparg]
if self.version < (1, 5):
pattr = names[oparg]
else:
pattr = varnames[oparg]
elif op in self.opc.COMPARE_OPS:
pattr = self.opc.cmp_op[oparg]
elif op in self.opc.FREE_OPS:
pattr = free[oparg]
if op in self.varargs_ops:
# CE - Hack for >= 2.5
# Now all values loaded via LOAD_CLOSURE are packed into
@@ -281,25 +282,36 @@ class Scanner26(scan.Scanner2):
elif op == self.opc.LOAD_GLOBAL:
if offset in self.load_asserts:
op_name = 'LOAD_ASSERT'
op_name = "LOAD_ASSERT"
elif op == self.opc.RETURN_VALUE:
if offset in self.return_end_ifs:
op_name = 'RETURN_END_IF'
op_name = "RETURN_END_IF"
linestart = self.linestarts.get(offset, None)
if offset not in replace:
tokens.append(Token(
op_name, oparg, pattr, offset, linestart, op,
has_arg, self.opc))
tokens.append(
Token(
op_name, oparg, pattr, offset, linestart, op, has_arg, self.opc
)
)
else:
tokens.append(Token(
replace[offset], oparg, pattr, offset, linestart, op,
has_arg, self.opc))
tokens.append(
Token(
replace[offset],
oparg,
pattr,
offset,
linestart,
op,
has_arg,
self.opc,
)
)
pass
pass
if show_asm in ('both', 'after'):
if show_asm in ("both", "after"):
for t in tokens:
print(t.format(line_prefix=""))
print()

View File

@@ -604,6 +604,10 @@ class Scanner3(Scanner):
# other parts like n_LOAD_CONST in pysource.py for example.
pattr = const
pass
elif opname == "LOAD_FAST" and argval == ".0":
# Used as the parameter of a list expression
opname = "LOAD_ARG"
elif opname in ("MAKE_FUNCTION", "MAKE_CLOSURE"):
if self.version >= (3, 6):
# 3.6+ doesn't have MAKE_CLOSURE, so opname == 'MAKE_FUNCTION'

View File

@@ -13,7 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Python 3.7 bytecode decompiler scanner
Python 3.7 bytecode decompiler scanner.
Does some additional massaging of xdis-disassembled instructions to
make things easier for decompilation.
@@ -22,6 +22,12 @@ This sets up opcodes Python's 3.7 and calls a generalized
scanner routine for Python 3.
"""
<<<<<<< HEAD
=======
from uncompyle6.scanner import CONST_COLLECTIONS
from uncompyle6.scanners.tok import Token
>>>>>>> python-3.3-to-3.5
from uncompyle6.scanners.scanner37base import Scanner37Base
# bytecode verification, verify(), uses JUMP_OPs from here
@@ -30,14 +36,101 @@ from xdis.opcodes import opcode_37 as opc
# bytecode verification, verify(), uses JUMP_OPS from here
JUMP_OPs = opc.JUMP_OPS
class Scanner37(Scanner37Base):
<<<<<<< HEAD
def __init__(self, show_asm=None, is_pypy=False):
Scanner37Base.__init__(self, (3, 7), show_asm)
self.is_pypy = is_pypy
=======
def __init__(self, show_asm=None, debug="", is_pypy=False):
Scanner37Base.__init__(self, (3, 7), show_asm, debug, is_pypy)
self.debug = debug
>>>>>>> python-3.3-to-3.5
return
pass
<<<<<<< HEAD
=======
def bound_collection_from_tokens(
self, tokens: list, next_tokens: list, t: Token, i: int, collection_type: str
) -> list:
count = t.attr
assert isinstance(count, int)
assert count <= i
if collection_type == "CONST_DICT":
# constant dictonaries work via BUILD_CONST_KEY_MAP and
# handle the values() like sets and lists.
# However the keys() are an LOAD_CONST of the keys.
# adjust offset to account for this
count += 1
# For small lists don't bother
if count < 5:
return next_tokens + [t]
collection_start = i - count
for j in range(collection_start, i):
if tokens[j].kind not in (
"LOAD_CODE",
"LOAD_CONST",
"LOAD_FAST",
"LOAD_GLOBAL",
"LOAD_NAME",
"LOAD_STR",
):
return next_tokens + [t]
collection_enum = CONST_COLLECTIONS.index(collection_type)
# If we get here, all instructions before tokens[i] are LOAD_CONST and we can replace
# add a boundary marker and change LOAD_CONST to something else.
new_tokens = next_tokens[:-count]
start_offset = tokens[collection_start].offset
new_tokens.append(
Token(
opname="COLLECTION_START",
attr=collection_enum,
pattr=collection_type,
offset="%s_0" % start_offset,
linestart=False,
has_arg=True,
has_extended_arg=False,
opc=self.opc,
)
)
for j in range(collection_start, i):
new_tokens.append(
Token(
opname="ADD_VALUE",
attr=tokens[j].attr,
pattr=tokens[j].pattr,
offset=tokens[j].offset,
linestart=tokens[j].linestart,
has_arg=True,
has_extended_arg=False,
opc=self.opc,
)
)
new_tokens.append(
Token(
opname="BUILD_%s" % collection_type,
attr=t.attr,
pattr=t.pattr,
offset=t.offset,
linestart=t.linestart,
has_arg=t.has_arg,
has_extended_arg=False,
opc=t.opc,
)
)
return new_tokens
>>>>>>> python-3.3-to-3.5
def ingest(
self, co, classname=None, code_objects={}, show_asm=None
):
@@ -59,7 +152,9 @@ class Scanner37(Scanner37Base):
grammar rules. Specifically, variable arg tokens like MAKE_FUNCTION or BUILD_LIST
cause specific rules for the specific number of arguments they take.
"""
tokens, customize = Scanner37Base.ingest(self, co, classname, code_objects, show_asm)
tokens, customize = Scanner37Base.ingest(
self, co, classname, code_objects, show_asm
)
new_tokens = []
for i, t in enumerate(tokens):
# things that smash new_tokens like BUILD_LIST have to come first.
@@ -75,9 +170,10 @@ class Scanner37(Scanner37Base):
next_tokens = self.bound_collection_from_tokens(
new_tokens, t, i, "CONST_%s" % collection_type
)
if next_tokens is not None:
new_tokens = next_tokens
continue
new_tokens = self.bound_collection_from_tokens(
tokens, new_tokens, t, i, "CONST_%s" % collection_type
)
continue
# The lowest bit of flags indicates whether the
# var-keyword argument is placed at the top of the stack
@@ -100,6 +196,7 @@ class Scanner37(Scanner37Base):
return new_tokens, customize
if __name__ == "__main__":
from xdis.version_info import PYTHON_VERSION_TRIPLE, version_tuple_to_str

View File

@@ -46,8 +46,10 @@ globals().update(op3.opmap)
class Scanner37Base(Scanner):
def __init__(self, version, show_asm=None, is_pypy=False):
def __init__(self, version: tuple, show_asm=None, debug="", is_pypy=False):
super(Scanner37Base, self).__init__(version, show_asm, is_pypy)
self.debug = debug
self.is_pypy = is_pypy
# Create opcode classification sets
# Note: super initilization above initializes self.opc
@@ -385,6 +387,11 @@ class Scanner37Base(Scanner):
if "." in inst.argval:
opname = "IMPORT_NAME_ATTR"
pass
elif opname == "LOAD_FAST" and argval == ".0":
# Used as the parameter of a list expression
opname = "LOAD_ARG"
elif opname in ("MAKE_FUNCTION", "MAKE_CLOSURE"):
flags = argval
opname = "MAKE_FUNCTION_%d" % (flags)
@@ -881,16 +888,6 @@ class Scanner37Base(Scanner):
pass
return
def is_jump_back(self, offset, extended_arg):
"""
Return True if the code at offset is some sort of jump back.
That is, it is ether "JUMP_FORWARD" or an absolute jump that
goes forward.
"""
if self.code[offset] != self.opc.JUMP_ABSOLUTE:
return False
return offset > self.get_target(offset, extended_arg)
def next_except_jump(self, start):
"""
Return the next jump that was generated by an except SomeException:

View File

@@ -34,14 +34,16 @@ JUMP_OPs = opc.JUMP_OPS
class Scanner38(Scanner37):
def __init__(self, show_asm=None):
Scanner37Base.__init__(self, (3, 8), show_asm)
self.debug = False
def __init__(self, show_asm=None, debug="", is_pypy=False):
Scanner37Base.__init__(self, (3, 8), show_asm, debug, is_pypy)
self.debug = debug
return
pass
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
def ingest(
self, co, classname=None, code_objects={}, show_asm=None
) -> tuple:
"""
Create "tokens" the bytecode of an Python code object. Largely these
are the opcode name, but in some cases that has been modified to make parsing
@@ -107,7 +109,7 @@ class Scanner38(Scanner37):
loop_ends.append(next_end)
# Turn JUMP opcodes into "BREAK_LOOP" opcodes.
# FIXME: this should be replaced by proper control flow.
# FIXME!!!!: this should be replaced by proper control flow.
if opname in ("JUMP_FORWARD", "JUMP_ABSOLUTE") and len(loop_ends):
jump_target = token.attr

View File

@@ -184,62 +184,92 @@ TABLE_R = {
# }
TABLE_DIRECT = {
"BINARY_ADD": ("+",),
"BINARY_SUBTRACT": ("-",),
"BINARY_MULTIPLY": ("*",),
"BINARY_DIVIDE": ("/",),
"BINARY_MATRIX_MULTIPLY": ("@",),
"BINARY_TRUE_DIVIDE": ("/",), # Not in <= 2.1
"BINARY_FLOOR_DIVIDE": ("//",),
"BINARY_MODULO": ("%%",),
"BINARY_POWER": ("**",),
"BINARY_LSHIFT": ("<<",),
"BINARY_RSHIFT": (">>",),
"BINARY_AND": ("&",),
"BINARY_OR": ("|",),
"BINARY_XOR": ("^",),
"INPLACE_ADD": ("+=",),
"INPLACE_SUBTRACT": ("-=",),
"INPLACE_MULTIPLY": ("*=",),
"INPLACE_MATRIX_MULTIPLY": ("@=",),
"INPLACE_DIVIDE": ("/=",),
"INPLACE_TRUE_DIVIDE": ("/=",), # Not in <= 2.1; 2.6 generates INPLACE_DIVIDE only?
"INPLACE_FLOOR_DIVIDE": ("//=",),
"INPLACE_MODULO": ("%%=",),
"INPLACE_POWER": ("**=",),
"INPLACE_LSHIFT": ("<<=",),
"INPLACE_RSHIFT": (">>=",),
"INPLACE_AND": ("&=",),
"INPLACE_OR": ("|=",),
"INPLACE_XOR": ("^=",),
"BINARY_ADD": ( "+" ,),
"BINARY_AND": ( "&" ,),
"BINARY_DIVIDE": ( "/" ,),
"BINARY_FLOOR_DIVIDE": ( "//" ,),
"BINARY_LSHIFT": ( "<<",),
"BINARY_MATRIX_MULTIPLY": ( "@" ,),
"BINARY_MODULO": ( "%%",),
"BINARY_MULTIPLY": ( "*" ,),
"BINARY_OR": ( "|" ,),
"BINARY_POWER": ( "**",),
"BINARY_RSHIFT": ( ">>",),
"BINARY_SUBTRACT": ( "-" ,),
"BINARY_TRUE_DIVIDE": ( "/" ,), # Not in <= 2.1; 2.6 generates INPLACE_DIVIDE only?
"BINARY_XOR": ( "^" ,),
"DELETE_FAST": ( "%|del %{pattr}\n", ),
"DELETE_GLOBAL": ( "%|del %{pattr}\n", ),
"DELETE_NAME": ( "%|del %{pattr}\n", ),
"IMPORT_FROM": ( "%{pattr}", ),
"IMPORT_NAME_ATTR": ( "%{pattr}", ),
"INPLACE_ADD": ( "+=" ,),
"INPLACE_AND": ( "&=" ,),
"INPLACE_DIVIDE": ( "/=" ,),
"INPLACE_FLOOR_DIVIDE": ( "//=" ,),
"INPLACE_LSHIFT": ( "<<=",),
"INPLACE_MATRIX_MULTIPLY": ( "@=" ,),
"INPLACE_MODULO": ( "%%=",),
"INPLACE_MULTIPLY": ( "*=" ,),
"INPLACE_OR": ( "|=" ,),
"INPLACE_POWER": ( "**=",),
"INPLACE_RSHIFT": ( ">>=",),
"INPLACE_SUBTRACT": ( "-=" ,),
"INPLACE_TRUE_DIVIDE": ( "/=" ,),
"INPLACE_XOR": ( "^=" ,),
"LOAD_ARG": ( "%{pattr}", ),
"LOAD_ASSERT": ( "%{pattr}", ),
"LOAD_CLASSNAME": ( "%{pattr}", ),
"LOAD_DEREF": ( "%{pattr}", ),
"LOAD_FAST": ( "%{pattr}", ),
"LOAD_GLOBAL": ( "%{pattr}", ),
"LOAD_LOCALS": ( "locals()", ),
"LOAD_NAME": ( "%{pattr}", ),
"LOAD_STR": ( "%{pattr}", ),
"STORE_DEREF": ( "%{pattr}", ),
"STORE_FAST": ( "%{pattr}", ),
"STORE_GLOBAL": ( "%{pattr}", ),
"STORE_NAME": ( "%{pattr}", ),
"UNARY_INVERT": ( "~"),
"UNARY_NEGATIVE": ( "-",),
"UNARY_NOT": ( "not ", ),
"UNARY_POSITIVE": ( "+",),
# bin_op (formerly "binary_expr") is the Python AST BinOp
"bin_op": ("%c %c %c", 0, (-1, "binary_operator"), (1, "expr")),
"UNARY_POSITIVE": ("+",),
"UNARY_NEGATIVE": ("-",),
"UNARY_INVERT": ("~"),
# unary_op (formerly "unary_expr") is the Python AST UnaryOp
"unary_op": ("%c%c", (1, "unary_operator"), (0, "expr")),
"unary_not": ("not %c", (0, "expr")),
"unary_convert": ("`%c`", (0, "expr"),),
"get_iter": ("iter(%c)", (0, "expr"),),
"slice0": ("%c[:]", (0, "expr"),),
"slice1": ("%c[%p:]", (0, "expr"), (1, 100)),
"slice2": ("%c[:%p]", (0, "expr"), (1, 100)),
"slice3": ("%c[%p:%p]", (0, "expr"), (1, 100), (2, 100)),
"set_iter": ( "%c", 0 ),
"slice0": (
"%c[:]",
(0, "expr"),
),
"slice1": (
"%c[%p:]",
(0, "expr"),
(1, NO_PARENTHESIS_EVER)
),
"slice2": ( "[%c:%p]",
(0, "expr"),
(1, NO_PARENTHESIS_EVER)
),
"slice3": (
"%c[%p:%p]",
(0, "expr"),
(1, NO_PARENTHESIS_EVER),
(2, NO_PARENTHESIS_EVER)
),
"IMPORT_FROM": ("%{pattr}",),
"IMPORT_NAME_ATTR": ("%{pattr}",),
"attribute": ("%c.%[1]{pattr}", (0, "expr")),
"LOAD_STR": ("%{pattr}",),
"LOAD_FAST": ("%{pattr}",),
"LOAD_NAME": ("%{pattr}",),
"LOAD_CLASSNAME": ("%{pattr}",),
"LOAD_GLOBAL": ("%{pattr}",),
"LOAD_DEREF": ("%{pattr}",),
"LOAD_LOCALS": ("locals()",),
"LOAD_ASSERT": ("%{pattr}",),
"DELETE_FAST": ("%|del %{pattr}\n",),
"DELETE_NAME": ("%|del %{pattr}\n",),
"DELETE_GLOBAL": ("%|del %{pattr}\n",),
"delete_subscript": (
"%|del %p[%c]\n",
(0, "expr", PRECEDENCE["subscript"]),
@@ -259,10 +289,6 @@ TABLE_DIRECT = {
),
"store_subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"STORE_FAST": ("%{pattr}",),
"STORE_NAME": ("%{pattr}",),
"STORE_GLOBAL": ("%{pattr}",),
"STORE_DEREF": ("%{pattr}",),
"unpack": ("%C%,", (1, maxint, ", ")),
# This nonterminal we create on the fly in semantic routines
"unpack_w_parens": ("(%C%,)", (1, maxint, ", ")),
@@ -284,7 +310,7 @@ TABLE_DIRECT = {
"set_comp_body": ("%c", 0),
"gen_comp_body": ("%c", 0),
"dict_comp_body": ("%c:%c", 1, 0),
"dict_comp_body": ("%c: %c", 1, 0),
"assign": ("%|%c = %p\n", -1, (0, 200)),
# The 2nd parameter should have a = suffix.
# There is a rule with a 4th parameter "store"

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2018-2019, 2021 by Rocky Bernstein
# Copyright (c) 2018-2019, 2021-2022 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -17,7 +17,7 @@
"""
from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6.semantics.consts import INDENT_PER_LEVEL, PRECEDENCE, TABLE_R, TABLE_DIRECT
from uncompyle6.semantics.consts import INDENT_PER_LEVEL, NO_PARENTHESIS_EVER, PRECEDENCE, TABLE_R, TABLE_DIRECT
from uncompyle6.semantics.helper import flatten_list
from uncompyle6.scanners.tok import Token
@@ -47,7 +47,7 @@ def customize_for_version(self, is_pypy, version):
if version[:2] >= (3, 7):
def n_call_kw_pypy37(node):
self.template_engine(("%p(", (0, 100)), node)
self.template_engine(("%p(", (0, NO_PARENTHESIS_EVER)), node)
assert node[-1] == "CALL_METHOD_KW"
arg_count = node[-1].attr
kw_names = node[-2]
@@ -194,7 +194,29 @@ def customize_for_version(self, is_pypy, version):
self.prune()
self.n_iftrue_stmt24 = n_iftrue_stmt24
else: # version <= 2.3:
elif version < (1, 4):
from uncompyle6.semantics.customize14 import customize_for_version14
customize_for_version14(self, version)
def n_call(node):
expr = node[0]
assert expr == "expr"
params = node[1]
if params == "tuple":
self.template_engine(("%p(", (0, NO_PARENTHESIS_EVER)), expr)
sep = ""
for param in params[:-1]:
self.write(sep)
self.preorder(param)
sep = ", "
self.write(")")
else:
self.template_engine(("%p(%P)",
(0, "expr", 100), (1,-1,", ", NO_PARENTHESIS_EVER)), node)
self.prune()
self.n_call = n_call
else: # 1.0 <= version <= 2.3:
TABLE_DIRECT.update({"if1_stmt": ("%|if 1\n%+%c%-", 5)})
if version <= (2, 1):
TABLE_DIRECT.update(

View File

@@ -0,0 +1,30 @@
# Copyright (c) 2022 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Isolate Python 1.4- version-specific semantic actions here.
"""
from uncompyle6.semantics.consts import TABLE_DIRECT
#######################
# Python 1.4- Changes #
#######################
def customize_for_version14(self, version):
TABLE_DIRECT.update(
{
"print_expr_stmt": (
("%|print %c\n", 0)
),
}
)

View File

@@ -80,142 +80,6 @@ def customize_for_version3(self, version):
self.default(node)
self.n_tryfinallystmt = tryfinallystmt
def listcomp_closure3(node):
"""List comprehensions in Python 3 when handled as a closure.
See if we can combine code.
"""
# FIXME: DRY with comprehension_walk_newer
p = self.prec
self.prec = 27
code_obj = node[1].attr
assert iscode(code_obj), node[1]
code = Code(code_obj, self.scanner, self.currentclass, self.debug_opts["asm"])
ast = self.build_ast(code._tokens, code._customize, code)
self.customize(code._customize)
# skip over: sstmt, stmt, return, return_expr
# and other singleton derivations
while len(ast) == 1 or (
ast in ("sstmt", "return") and ast[-1] in ("RETURN_LAST", "RETURN_VALUE")
):
self.prec = 100
ast = ast[0]
n = ast[1]
# Pick out important parts of the comprehension:
# * the variables we iterate over: "stores"
# * the results we accumulate: "n"
# collections is the name of the expression(s) we are iterating over
collections = [node[-3]]
list_ifs = []
if self.version[:2] == (3, 0) and n != "list_iter":
# FIXME 3.0 is a snowflake here. We need
# special code for this. Not sure if this is totally
# correct.
stores = [ast[3]]
assert ast[4] == "comp_iter"
n = ast[4]
# Find the list comprehension body. It is the inner-most
# node that is not comp_.. .
while n == "comp_iter":
if n[0] == "comp_for":
n = n[0]
stores.append(n[2])
n = n[3]
elif n[0] in ("comp_if", "comp_if_not"):
n = n[0]
# FIXME: just a guess
if n[0].kind == "expr":
list_ifs.append(n)
else:
list_ifs.append([1])
n = n[2]
pass
else:
break
pass
# Skip over n[0] which is something like: _[1]
self.preorder(n[1])
else:
assert n == "list_iter"
stores = []
# Find the list comprehension body. It is the inner-most
# node that is not list_.. .
while n == "list_iter":
# recurse one step
n = n[0]
# FIXME: adjust for set comprehension
if n == "list_for":
stores.append(n[2])
n = n[3]
if n[0] == "list_for":
# Dog-paddle down largely singleton reductions
# to find the collection (expr)
c = n[0][0]
if c == "expr":
c = c[0]
# FIXME: grammar is wonky here? Is this really an attribute?
if c == "attribute":
c = c[0]
collections.append(c)
pass
elif n in ("list_if", "list_if_not", "list_if_or_not"):
if n[0].kind == "expr":
list_ifs.append(n)
else:
list_ifs.append([1])
if n[-1] == "come_from_opt":
n = n[-2]
else:
n = n[-1]
pass
elif n == "list_if37":
list_ifs.append(n)
n = n[-1]
pass
elif n == "list_afor":
collections.append(n[0][0])
n = n[1]
stores.append(n[1][0])
if n[2].kind == "list_iter":
n = n[2]
else:
n = n[3]
pass
assert n == "lc_body", ast
self.preorder(n[0])
# FIXME: add indentation around "for"'s and "in"'s
n_colls = len(collections)
for i, store in enumerate(stores):
if i >= n_colls:
break
if collections[i] == "LOAD_DEREF" and co_flags_is_async(code_obj.co_flags):
self.write(" async")
pass
self.write(" for ")
self.preorder(store)
self.write(" in ")
self.preorder(collections[i])
if i < len(list_ifs):
self.preorder(list_ifs[i])
pass
pass
self.prec = p
self.listcomp_closure3 = listcomp_closure3
def n_classdef3(node):
"""Handle "classdef" nonterminal for 3.0 >= version 3.0 < 3.6
"""

View File

@@ -28,6 +28,7 @@ from uncompyle6.semantics.helper import flatten_list, gen_function_parens_adjust
# Python 3.5+ Changes #
#######################
def customize_for_version35(self, version):
# fmt: off
TABLE_DIRECT.update(
{
# nested await expressions like:
@@ -36,15 +37,24 @@ def customize_for_version35(self, version):
"await_expr": ("await %p", (0, PRECEDENCE["await_expr"]-1)),
"await_stmt": ("%|%c\n", 0),
"async_for_stmt": ("%|async for %c in %c:\n%+%|%c%-\n\n", 9, 1, 25),
"async_for_stmt": (
"%|async for %c in %c:\n%+%|%c%-\n\n",
(9, "store"),
(1, "expr"),
(25, "for_block"),
),
"async_forelse_stmt": (
"%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
9,
1,
25,
(27, "else_suite"),
(9, "store"),
(1, "expr"),
(25, "for_block"),
(-2, "else_suite"),
),
"async_with_stmt": (
"%|async with %c:\n%+%c%-",
(0, "expr"),
3
),
"async_with_stmt": ("%|async with %c:\n%+%c%-", (0, "expr"), 3),
"async_with_as_stmt": (
"%|async with %c as %c:\n%+%c%-",
(0, "expr"),
@@ -55,6 +65,7 @@ def customize_for_version35(self, version):
# "unmapexpr": ( "{**%c}", 0), # done by n_unmapexpr
}
)
# fmt: on
def async_call(node):
self.f.write("async ")

View File

@@ -61,7 +61,14 @@ def customize_for_version36(self, version):
"%|async for %c in %c:\n%+%c%-\n\n",
(9, "store"),
(1, "expr"),
(18, "for_block"),
(-9, "for_block"), # Count from end, since COME_FROM shifts things in the forward direction
),
"async_forelse_stmt36": (
"%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
(9, "store"),
(1, "expr"),
(-10, "for_block"),
(-2, "else_suite"),
),
"call_ex": ("%c(%p)", (0, "expr"), (1, 100)),
"except_return": ("%|except:\n%+%c%-", 3),
@@ -73,6 +80,12 @@ def customize_for_version36(self, version):
"ifstmtl": ("%|if %c:\n%+%c%-",
(0, "testexpr"), (1, "_ifstmts_jumpl")),
"list_afor": (
" async for %[1]{%c} in %c%[1]{%c}",
(1, "store"), (0, "get_aiter"), (3, "list_iter"),
),
"try_except36": ("%|try:\n%+%c%-%c\n\n", 1, -2),
"tryfinally36": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", (1, "returns"), 3),
"tryfinally_return_stmt": ("%|try:\n%+%c%-%|finally:\n%+%|return%-\n\n", 1),
@@ -680,6 +693,16 @@ def customize_for_version36(self, version):
self.n_joined_str = n_joined_str
def n_list_comp_async(node):
self.write("[")
if node[0].kind == "load_closure":
self.listcomp_closure3(node)
else:
self.comprehension_walk_newer(node, iter_index=3, code_index=0)
self.write("]")
self.prune()
self.n_list_comp_async = n_list_comp_async
# def kwargs_only_36(node):
# keys = node[-1].attr
# num_kwargs = len(keys)

View File

@@ -62,9 +62,9 @@ def customize_for_version37(self, version):
),
"async_for_stmt37": (
"%|async for %c in %c:\n%+%c%-\n\n",
(7, "store"),
(8, "store"),
(1, "expr"),
(16, "for_block"),
(17, "for_block"),
),
"async_with_stmt": ("%|async with %c:\n%+%c%-", (0, "expr"), 3),
"async_with_as_stmt": (
@@ -75,10 +75,10 @@ def customize_for_version37(self, version):
),
"async_forelse_stmt": (
"%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
(7, "store"),
(8, "store"),
(1, "expr"),
(17, "for_block"),
(25, "else_suite"),
(-10, "for_block"),
(-2, "else_suite"),
),
"attribute37": ("%c.%[1]{pattr}", (0, "expr")),
"attributes37": ("%[0]{pattr} import %c",
@@ -146,6 +146,7 @@ def customize_for_version37(self, version):
),
"ifstmtl": ("%|if %c:\n%+%c%-", (0, "testexpr"), (1, "_ifstmts_jumpl")),
'import_as37': ( '%|import %c as %c\n', 2, -2),
"import_from37": ("%|from %[2]{pattr} import %c\n", (3, "importlist37")),
"import_from_as37": (
"%|from %c as %c\n",
(2, "import_from_attr37"),

View File

@@ -17,7 +17,7 @@ Generators and comprehension functions
"""
from xdis import iscode
from xdis import co_flags_is_async, iscode
from uncompyle6.parser import get_python_parser
from uncompyle6.scanner import Code
@@ -25,7 +25,6 @@ from uncompyle6.semantics.consts import PRECEDENCE
from uncompyle6.semantics.helper import is_lambda_mode
from uncompyle6.scanners.tok import Token
class ComprehensionMixin(object):
"""
These functions hand nonterminal common actions that occur
@@ -38,7 +37,8 @@ class ComprehensionMixin(object):
"""
def closure_walk(self, node, collection_index):
"""Dictionary and comprehensions using closure the way they are done in Python3.
"""
Dictionary and comprehensions using closure the way they are done in Python3.
"""
p = self.prec
self.prec = 27
@@ -65,6 +65,10 @@ class ComprehensionMixin(object):
list_if = None
assert n == "comp_iter"
# Pick out important parts of the comprehension:
# * the variables we iterate over: "stores"
# * the results we accumulate: "n"
# Find inner-most node.
while n == "comp_iter":
n = n[0] # recurse one step
@@ -140,7 +144,7 @@ class ComprehensionMixin(object):
assert iscode(cn.attr)
code = Code(cn.attr, self.scanner, self.currentclass)
code = Code(cn.attr, self.scanner, self.currentclass, self.debug_opts["asm"])
# FIXME: is there a way we can avoid this?
# The problem is that in filter in top-level list comprehensions we can
@@ -192,9 +196,11 @@ class ComprehensionMixin(object):
self.write(" in ")
if node[2] == "expr":
iter_expr = node[2]
elif node[3] in ("expr", "get_aiter"):
iter_expr = node[3]
else:
iter_expr = node[-3]
assert iter_expr == "expr"
assert iter_expr in ("expr", "get_aiter"), iter_expr
self.preorder(iter_expr)
self.preorder(tree[iter_index])
self.prec = p
@@ -208,11 +214,16 @@ class ComprehensionMixin(object):
):
"""Non-closure-based comprehensions the way they are done in Python3
and some Python 2.7. Note: there are also other set comprehensions.
Note: there are also other comprehensions.
"""
# FIXME: DRY with listcomp_closure3
p = self.prec
self.prec = PRECEDENCE["lambda_body"] - 1
comp_for = None
# FIXME? Nonterminals in grammar maybe should be split out better?
# Maybe test on self.compile_mode?
if (
@@ -247,52 +258,114 @@ class ComprehensionMixin(object):
is_30_dict_comp = False
store = None
if node == "list_comp_async":
n = tree[2][1]
# We have two different kinds of grammar rules:
# list_comp_async ::= LOAD_LISTCOMP LOAD_STR MAKE_FUNCTION_0 expr ...
# and:
# list_comp_async ::= BUILD_LIST_0 LOAD_ARG list_afor2
if tree[0] == "expr" and tree[0][0] == "list_comp_async":
tree = tree[0][0]
if tree[0] == "BUILD_LIST_0":
list_afor2 = tree[2]
assert list_afor2 == "list_afor2"
store = list_afor2[1]
assert store == "store"
n = list_afor2[3] if list_afor2[3] == "list_iter" else list_afor2[2]
else:
# ???
pass
elif node.kind in ("dict_comp_async", "set_comp_async"):
# We have two different kinds of grammar rules:
# dict_comp_async ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr ...
# set_comp_async ::= LOAD_SETCOMP LOAD_STR MAKE_FUNCTION_0 expr ...
# and:
# dict_comp_async ::= BUILD_MAP_0 genexpr_func_async
# set_comp_async ::= BUILD_SET_0 genexpr_func_async
if tree[0] == "expr":
tree = tree[0]
if tree[0].kind in ("BUILD_MAP_0", "BUILD_SET_0"):
genexpr_func_async = tree[1]
if genexpr_func_async == "genexpr_func_async":
store = genexpr_func_async[2]
assert store.kind.startswith("store")
n = genexpr_func_async[4]
assert n == "comp_iter"
comp_for = collection_node
else:
set_afor2 = genexpr_func_async
assert set_afor2 == "set_afor2"
n = set_afor2[1]
store = n[1]
comp_for = node[3]
else:
# ???
pass
elif node == "list_afor":
comp_for = node[0]
list_afor2 = node[1]
assert list_afor2 == "list_afor2"
store = list_afor2[1]
assert store == "store"
n = list_afor2[2]
elif node == "set_afor2":
comp_for = node[0]
set_iter_async = node[1]
assert set_iter_async == "set_iter_async"
store = set_iter_async[1]
assert store == "store"
n = set_iter_async[2]
else:
n = tree[iter_index]
if tree in (
"set_comp_func",
"dict_comp_func",
"genexpr_func_async",
"generator_exp",
"list_comp",
"set_comp",
"set_comp_func",
"set_comp_func_header",
):
for k in tree:
if k == "comp_iter":
if k.kind in ("comp_iter", "list_iter", "set_iter", "await_expr"):
n = k
elif k == "store":
store = k
pass
pass
pass
elif tree in ("dict_comp", "set_comp"):
assert self.version == (3, 0)
for k in tree:
if k in ("dict_comp_header", "set_comp_header"):
n = k
elif k == "store":
store = k
elif k == "dict_comp_iter":
is_30_dict_comp = True
n = (k[3], k[1])
elif tree.kind in ("list_comp_async", "dict_comp_async", "set_afor2"):
if self.version == (3, 0):
for k in tree:
if k in ("dict_comp_header", "set_comp_header"):
n = k
elif k == "store":
store = k
elif k == "dict_comp_iter":
is_30_dict_comp = True
n = (k[3], k[1])
pass
elif k == "comp_iter":
n = k[0]
pass
pass
elif k == "comp_iter":
n = k[0]
pass
pass
elif tree == "list_comp_async":
store = tree[2][1]
else:
assert n == "list_iter", n
if n.kind in ("RETURN_VALUE_LAMBDA", "return_expr_lambda"):
self.prune()
assert n in ("list_iter", "comp_iter"), n
# FIXME: I'm not totally sure this is right.
# Find the list comprehension body. It is the inner-most
# node that is not list_.. .
if_node = None
comp_for = None
comp_store = None
if n == "comp_iter":
if n == "comp_iter" and store is None:
comp_for = n
comp_store = tree[3]
@@ -383,7 +456,10 @@ class ComprehensionMixin(object):
self.preorder(store)
self.write(" in ")
self.preorder(node[in_node_index])
if comp_for:
self.preorder(comp_for)
else:
self.preorder(node[in_node_index])
# Here is where we handle nested list iterations.
if tree == "list_comp" and self.version != (3, 0):
@@ -449,7 +525,7 @@ class ComprehensionMixin(object):
tree = tree[1]
while len(tree) == 1 or (
tree in ("stmt", "sstmt", "return", "return_expr", "return_expr_lambda")
tree in ("stmt", "sstmt", "return", "return_expr")
):
self.prec = 100
if tree[0] in ("dom_start", "dom_start_opt"):
@@ -457,3 +533,136 @@ class ComprehensionMixin(object):
else:
tree = tree[0]
return tree
def listcomp_closure3(self, node):
"""
List comprehensions in Python 3 when handled as a closure.
See if we can combine code.
"""
# FIXME: DRY with comprehension_walk_newer
p = self.prec
self.prec = 27
code_obj = node[1].attr
assert iscode(code_obj), node[1]
code = Code(code_obj, self.scanner, self.currentclass, self.debug_opts["asm"])
tree = self.build_ast(code._tokens, code._customize, code)
self.customize(code._customize)
# skip over: sstmt, stmt, return, return_expr
# and other singleton derivations
while len(tree) == 1 or (
tree in ("sstmt", "return") and tree[-1] in ("RETURN_LAST", "RETURN_VALUE")
):
self.prec = 100
tree = tree[0]
n = tree[1]
# Pick out important parts of the comprehension:
# * the variables we iterate over: "stores"
# * the results we accumulate: "n"
# collections is the name of the expression(s) we are iterating over
collections = [node[-3]]
list_ifs = []
if self.version[:2] == (3, 0) and n != "list_iter":
# FIXME 3.0 is a snowflake here. We need
# special code for this. Not sure if this is totally
# correct.
stores = [tree[3]]
assert tree[4] == "comp_iter"
n = tree[4]
# Find the list comprehension body. It is the inner-most
# node that is not comp_.. .
while n == "comp_iter":
if n[0] == "comp_for":
n = n[0]
stores.append(n[2])
n = n[3]
elif n[0] in ("comp_if", "comp_if_not"):
n = n[0]
# FIXME: just a guess
if n[0].kind == "expr":
list_ifs.append(n)
else:
list_ifs.append([1])
n = n[2]
pass
else:
break
pass
# Skip over n[0] which is something like: _[1]
self.preorder(n[1])
else:
assert n == "list_iter"
stores = []
# Find the list comprehension body. It is the inner-most
# node that is not list_.. .
while n == "list_iter":
# recurse one step
n = n[0]
# FIXME: adjust for set comprehension
if n == "list_for":
stores.append(n[2])
n = n[3]
if n[0] == "list_for":
# Dog-paddle down largely singleton reductions
# to find the collection (expr)
c = n[0][0]
if c == "expr":
c = c[0]
# FIXME: grammar is wonky here? Is this really an attribute?
if c == "attribute":
c = c[0]
collections.append(c)
pass
elif n in ("list_if", "list_if_not", "list_if_or_not"):
if n[0].kind == "expr":
list_ifs.append(n)
else:
list_ifs.append([1])
n = n[-2] if n[-1] == "come_from_opt" else n[-1]
pass
elif n == "list_if37":
list_ifs.append(n)
n = n[-1]
pass
elif n == "list_afor":
collections.append(n[0][0])
n = n[1]
stores.append(n[1][0])
n = n[2] if n[2].kind == "list_iter" else n[3]
pass
assert n == "lc_body", tree
self.preorder(n[0])
# FIXME: add indentation around "for"'s and "in"'s
n_colls = len(collections)
for i, store in enumerate(stores):
if i >= n_colls:
break
token = collections[i]
if not isinstance(token, Token):
token = token.first_child()
if token == "LOAD_DEREF" and co_flags_is_async(code_obj.co_flags):
self.write(" async")
pass
self.write(" for ")
self.preorder(store)
self.write(" in ")
self.preorder(collections[i])
if i < len(list_ifs):
self.preorder(list_ifs[i])
pass
pass
self.prec = p

View File

@@ -0,0 +1,190 @@
# Copyright (c) 2015-2022 by Rocky Bernstein
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
All the crazy things we have to do to handle Python functions in Python before 3.0.
The saga of changes continues in 3.0 and above and in other files.
"""
from uncompyle6.scanner import Code
from uncompyle6.semantics.parser_error import ParserError
from uncompyle6.parser import ParserError as ParserError2
from uncompyle6.semantics.helper import (
print_docstring,
find_all_globals,
find_globals_and_nonlocals,
find_none,
)
from xdis import iscode
def make_function1(self, node, is_lambda, nested=1, code_node=None):
"""
Dump function defintion, doc string, and function body.
This code is specialied for Python 2.
"""
def build_param(tree, param_names: list) -> tuple:
"""build parameters:
- handle defaults
- handle format tuple parameters
"""
# if formal parameter is a tuple, the paramater name
# starts with a dot (eg. '.1', '.2')
args = tree[0]
del tree[0]
params = []
assert args.kind in ("star_args", "args", "varargs")
has_star_arg = args.kind in ("star_args", "varargs")
args_store = args[2]
if args_store == "args_store":
for arg in args_store:
params.append(param_names[arg.attr])
return has_star_arg, params
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
assert node[-1].kind.startswith("BUILD_")
defparams = []
# args_node = node[-1]
# if isinstance(args_node.attr, tuple):
# # positional args are after kwargs
# defparams = node[1 : args_node.attr[0] + 1]
# pos_args, kw_args, annotate_argc = args_node.attr
# else:
# defparams = node[: args_node.attr]
# kw_args = 0
# pass
lambda_index = None
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
assert node[lambda_index].kind == "LOAD_LAMBDA"
code = node[lambda_index].attr
else:
code = code_node.attr
assert iscode(code)
code = Code(code, self.scanner, self.currentclass)
# add defaults values to parameter names
argc = code.co_argcount
paramnames = list(code.co_varnames[:argc])
# defaults are for last n parameters, thus reverse
paramnames.reverse()
defparams.reverse()
try:
tree = self.build_ast(
code._tokens,
code._customize,
code,
is_lambda=is_lambda,
noneInNames=("None" in code.co_names),
)
except (ParserError, ParserError2) as p:
self.write(str(p))
if not self.tolerate_errors:
self.ERROR = p
return
indent = self.indent
# build parameters
has_star_arg, params = build_param(tree, code.co_names)
if has_star_arg:
params[-1] = "*" + params[-1]
# dump parameter list (with default values)
if is_lambda:
self.write("lambda ", ", ".join(params))
# If the last statement is None (which is the
# same thing as "return None" in a lambda) and the
# next to last statement is a "yield". Then we want to
# drop the (return) None since that was just put there
# to have something to after the yield finishes.
# FIXME: this is a bit hoaky and not general
if (
len(tree) > 1
and self.traverse(tree[-1]) == "None"
and self.traverse(tree[-2]).strip().startswith("yield")
):
del tree[-1]
# Now pick out the expr part of the last statement
tree_expr = tree[-1]
while tree_expr.kind != "expr":
tree_expr = tree_expr[0]
tree[-1] = tree_expr
pass
else:
self.write("(", ", ".join(params))
# if kw_args > 0:
# if not (4 & code.co_flags):
# if argc > 0:
# self.write(", *, ")
# else:
# self.write("*, ")
# pass
# else:
# self.write(", ")
# for n in node:
# if n == "pos_arg":
# continue
# else:
# self.preorder(n)
# break
# pass
# if code_has_star_star_arg(code):
# if argc > 0:
# self.write(", ")
# self.write("**%s" % code.co_varnames[argc + kw_pairs])
if is_lambda:
self.write(": ")
else:
self.println("):")
if (
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
): # ugly
# docstring exists, dump it
print_docstring(self, indent, code.co_consts[0])
if not is_lambda:
assert tree == "stmts"
all_globals = find_all_globals(tree, set())
globals, nonlocals = find_globals_and_nonlocals(
tree, set(), set(), code, self.version
)
# Python 1 doesn't support the "nonlocal" statement
for g in sorted((all_globals & self.mod_globs) | globals):
self.println(self.indent, "global ", g)
self.mod_globs -= all_globals
has_none = "None" in code.co_names
rn = has_none and not find_none(tree)
tree.code = code
self.gen_source(
tree, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn
)
code._tokens = None # save memory
code._customize = None # save memory

View File

@@ -226,6 +226,7 @@ class NonterminalActions:
self.indent_more(INDENT_PER_LEVEL)
sep = ""
line_len = len(self.indent)
if is_dict:
keys = flat_elems[-1].attr
assert isinstance(keys, tuple)
@@ -235,26 +236,44 @@ class NonterminalActions:
value = elem.pattr
if elem.linestart is not None:
if elem.linestart != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
next_indent = self.indent + INDENT_PER_LEVEL[:-1]
line_len = len(next_indent)
sep += "\n" + next_indent
self.line_number = elem.linestart
else:
if sep != "":
sep += " "
self.write("%s %s: %s" % (sep, repr(keys[i]), value))
sep = ","
sep += ", "
elif line_len > 80:
next_indent = self.indent + INDENT_PER_LEVEL[:-1]
line_len = len(next_indent)
sep += "\n" + next_indent
sep_key_value = "%s %s: %s" % (sep, repr(keys[i]), value)
line_len += len(sep_key_value)
self.write(sep_key_value)
sep = ", "
else:
for elem in flat_elems:
assert elem.kind == "ADD_VALUE"
value = elem.pattr
if elem.linestart is not None:
if elem.linestart != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
next_indent = self.indent + INDENT_PER_LEVEL[:-1]
line_len += len(next_indent)
sep += "\n" + next_indent
self.line_number = elem.linestart
else:
if sep != "":
sep += " "
line_len += len(sep)
elif line_len > 80:
next_indent = self.indent + INDENT_PER_LEVEL[:-1]
line_len = len(next_indent)
sep += "\n" + next_indent
line_len += len(sep) + len(str(value)) + 1
self.write(sep, value)
sep = ","
sep = ", "
self.write(endchar)
self.indent_less(INDENT_PER_LEVEL)
@@ -662,17 +681,20 @@ class NonterminalActions:
self.write("(")
iter_index = 3
if self.version > (3, 2):
code_index = -6
if self.version > (3, 6):
# Python 3.7+ adds optional "come_froms" at node[0]
if self.version >= (3, 6):
if node[0].kind in ("load_closure", "load_genexpr") and self.version >= (3, 8):
code_index = -6
is_lambda = self.is_lambda
if node[0].kind == "load_genexpr":
self.is_lambda = False
self.closure_walk(node, collection_index=4)
self.is_lambda = is_lambda
else:
code_index = -6
# Python 3.7+ adds optional "come_froms" at node[0] so count from the end
if node == "generator_exp_async" and self.version[:2] == (3, 6):
code_index = 0
else:
code_index = -6
if self.version < (3, 8):
iter_index = 4
else:

View File

@@ -143,6 +143,7 @@ from uncompyle6.scanner import Code, get_scanner
import uncompyle6.parser as python_parser
from uncompyle6.semantics.check_ast import checker
from uncompyle6.semantics.make_function1 import make_function1
from uncompyle6.semantics.make_function2 import make_function2
from uncompyle6.semantics.make_function3 import make_function3
from uncompyle6.semantics.make_function36 import make_function36
@@ -151,9 +152,7 @@ from uncompyle6.semantics.customize import customize_for_version
from uncompyle6.semantics.gencomp import ComprehensionMixin
from uncompyle6.semantics.helper import (
print_docstring,
find_code_node,
find_globals_and_nonlocals,
flatten_list,
)
from uncompyle6.scanners.tok import Token
@@ -176,7 +175,6 @@ from uncompyle6.semantics.consts import (
TAB,
TABLE_R,
escape,
minint,
)
@@ -551,7 +549,9 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
# Python changes make function this much that we need at least 3 different routines,
# and probably more in the future.
def make_function(self, node, is_lambda, nested=1, code_node=None, annotate=None):
if self.version <= (2, 7):
if self.version <= (1, 2):
make_function1(self, node, is_lambda, nested, code_node)
elif self.version <= (2, 7):
make_function2(self, node, is_lambda, nested, code_node)
elif (3, 0) <= self.version <= (3, 5):
make_function3(self, node, is_lambda, nested, code_node)
@@ -1006,6 +1006,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
result = "(%s)" % result
return result
# return self.traverse(node[1])
return "(" + name
raise Exception("Can't find tuple parameter " + name)
def build_class(self, code):