Merge branch 'python-3.0-to-3.2' into python-2.4

This commit is contained in:
rocky
2023-03-25 02:45:06 -04:00
26 changed files with 248 additions and 257 deletions

View File

@@ -1,76 +0,0 @@
version: 2
filters:
branches:
only: python-2.4
jobs:
build:
parallelism: 1
shell: /bin/bash --login
# CircleCI 2.0 does not support environment variables that refer to each other the same way as 1.0 did.
# If any of these refer to each other, rewrite them so that they don't or see https://circleci.com/docs/2.0/env-vars/#interpolating-environment-variables-to-set-other-environment-variables .
environment:
CIRCLE_ARTIFACTS: /tmp/circleci-artifacts
CIRCLE_TEST_REPORTS: /tmp/circleci-test-results
COMPILE: --compile
# To see the list of pre-built images that CircleCI provides for most common languages see
# https://circleci.com/docs/2.0/circleci-images/
docker:
- image: circleci/python:2.7
steps:
# Machine Setup
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
# The following `checkout` command checks out your code to your working directory. In 1.0 we did this implicitly. In 2.0 you can choose where in the course of a job your code should be checked out.
- checkout
# Prepare for artifact and test results collection equivalent to how it was done on 1.0.
# In many cases you can simplify this from what is generated here.
# 'See docs on artifact collection here https://circleci.com/docs/2.0/artifacts/'
- run: mkdir -p $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS
# Dependencies
# This would typically go in either a build or a build-and-test job when using workflows
# Restore the dependency cache
- restore_cache:
keys:
- v2-dependencies-{{ .Branch }}-
# fallback to using the latest cache if no exact match is found
- v2-dependencies-
- run:
command: | # Use pip to install dependengcies
# Install the version of click that works with Python 2.7.
# DO this before upgrading setuptool
sudo easy_install click==7.1.2
# Install a version of xdis that works for this code
pip install --user git+https://github.com/rocky/python-xdis@python-2.4/#egg=xdis
# Install a version of spark-parser that works for this code
pip install git+https://github.com/rocky/spark-parser@python-2.4/#egg=spark-parser
pip install --user -e .
pip install --user -r requirements-dev.txt
# Save dependency cache
- save_cache:
key: v2-dependencies-{{ .Branch }}-{{ epoch }}
paths:
# This is a broad list of cache paths to include many possible development environments
# You can probably delete some of these entries
- vendor/bundle
- ~/virtualenvs
- ~/.m2
- ~/.ivy2
- ~/.bundle
- ~/.cache/bower
# Test
# This would typically be a build job when using workflows, possibly combined with build
# This is based on your 1.0 configuration file or project settings
- run: make check-2.7
# - run: cd test/stdlib && bash -x ./runtests-circleci.sh
# Teardown
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
# Save test results
- store_test_results:
path: /tmp/circleci-test-results
# Save artifacts
- store_artifacts:
path: /tmp/circleci-artifacts
- store_artifacts:
path: /tmp/circleci-test-results

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1 @@
blank_issues_enabled: False

View File

@@ -75,7 +75,7 @@ entry_points = {
]
}
ftp_url = None
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.1.0"]
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.2.0"]
license = "GPL3"
mailing_list = "python-debugger@googlegroups.com"

View File

@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
echo "This script should be *sourced* rather than run directly through bash"
exit 1
fi
export PYVERSIONS='3.6.15 pypy3.6-7.3.1 3.7.16 pypy-3.7-7.3.9 pypy3.8-7.3.10 pyston-2.3.5 3.8.16'
export PYVERSIONS='3.6.15 pypy3.6-7.3.1 3.7.16 pypy3.7-7.3.9 pypy3.8-7.3.10 pyston-2.3.5 3.8.16'

View File

@@ -1,5 +1,5 @@
#!/bin/bash
PYTHON_VERSION=3.7.14
PYTHON_VERSION=3.7.16
function checkout_version {
local repo=$1

View File

@@ -7,5 +7,5 @@ PYTHON ?= python
test check pytest:
@PYTHON_VERSION=`$(PYTHON) -V 2>&1 | cut -d ' ' -f 2 | cut -d'.' -f1,2`; \
if [[ $$PYTHON_VERSION > 3.2 ]] || [[ $$PYTHON_VERSION == 2.7 ]] || [[ $$PYTHON_VERSION == 2.6 ]]; then \
py.test; \
$(PYTHON) -m pytest .; \
fi

View File

@@ -126,6 +126,7 @@ def test_grammar():
opcode_set.add("THEN")
check_tokens(tokens, opcode_set)
elif PYTHON_VERSION_TRIPLE[:2] == (3, 4):
ignore_set.add("LOAD_ARG") # Used in grammar for comprehension. But not in 3.4
ignore_set.add("LOAD_CLASSNAME")
ignore_set.add("STORE_LOCALS")
opcode_set = set(s.opc.opname).union(ignore_set)

View File

@@ -3,3 +3,5 @@ pytest
-e .
xdis>=6.0.4
configobj~=5.0.6
setuptools~=65.5.1

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,18 @@
# Related to #426
# This file is RUNNABLE!
"""This program is self-checking!"""
a = 5
class MakeClosureTest():
# This function uses MAKE_CLOSURE with annotation args
def __init__(self, dev: str, b: bool):
super().__init__()
self.dev = dev
self.b = b
self.a = a
x = MakeClosureTest("dev", True)
assert x.dev == "dev"
assert x.b == True
assert x.a == 5

View File

@@ -27,11 +27,20 @@ Step 2: Run the test:
test_pythonlib.py --mylib --verify # decompile verify 'mylib'
"""
import getopt, os, py_compile, sys, shutil, tempfile, time
from __future__ import print_function
import getopt
import os
import py_compile
import shutil
import sys
import tempfile
import time
from fnmatch import fnmatch
from xdis.version_info import PYTHON_VERSION_TRIPLE
from uncompyle6.main import main
from xdis.version_info import PYTHON_VERSION, PYTHON_VERSION_TRIPLE
def get_srcdir():

View File

@@ -199,7 +199,7 @@ class PythonParser(GenericASTBuilder):
if instructions[finish].linestart:
break
pass
if start > 0:
if start >= 0:
err_token = instructions[index]
print("Instruction context:")
for i in range(start, finish):
@@ -213,10 +213,16 @@ class PythonParser(GenericASTBuilder):
raise ParserError(None, -1, self.debug["reduce"])
def get_pos_kw(self, token):
"""Return then the number of positional parameters and
represented by the attr field of token"""
"""
Return then the number of positional parameters and keyword
parfameters represented by the attr (operand) field of
token.
This appears in CALL_FUNCTION or CALL_METHOD (PyPy) tokens
"""
# Low byte indicates number of positional paramters,
# high byte number of keyword parameters
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith("CALL_METHOD")
args_pos = token.attr & 0xFF
args_kw = (token.attr >> 8) & 0xFF
return args_pos, args_kw

View File

@@ -543,9 +543,9 @@ class Python3Parser(PythonParser):
# token found, while this one doesn't.
if self.version < (3, 6):
call_function = self.call_fn_name(call_fn_tok)
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s" "%s" % (
("expr " * (args_pos - 1) + ("kwarg " * args_kw)),
("expr " * (pos_args_count - 1) + ("kwarg " * kw_args_count)),
call_function,
)
else:
@@ -554,10 +554,10 @@ class Python3Parser(PythonParser):
if call_function.startswith("CALL_FUNCTION_KW"):
self.addRule("classdef ::= build_class_kw store", nop_func)
if is_pypy:
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
pos_args_count, kw_args_count = self.get_pos_kw(call_fn_tok)
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %s%s%s" % (
"expr " * (args_pos - 1),
"kwarg " * (args_kw),
"expr " * (pos_args_count - 1),
"kwarg " * (kw_args_count),
call_function,
)
else:
@@ -583,7 +583,7 @@ class Python3Parser(PythonParser):
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
"""
args_pos, args_kw = self.get_pos_kw(token)
pos_args_count, kw_args_count = self.get_pos_kw(token)
# Additional exprs for * and ** args:
# 0 if neither
@@ -592,7 +592,7 @@ class Python3Parser(PythonParser):
# Yes, this computation based on instruction name is a little bit hoaky.
nak = (len(opname) - len("CALL_FUNCTION")) // 3
uniq_param = args_kw + args_pos
uniq_param = kw_args_count + pos_args_count
# Note: 3.5+ have subclassed this method; so we don't handle
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
@@ -601,16 +601,16 @@ class Python3Parser(PythonParser):
token.kind = self.call_fn_name(token)
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ token.kind
)
else:
token.kind = self.call_fn_name(token)
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ "expr " * nak
+ token.kind
)
@@ -618,11 +618,11 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, token.kind, uniq_param, customize)
if "LOAD_BUILD_CLASS" in self.seen_ops:
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and args_pos > 1:
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and pos_args_count > 1:
rule = "classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d" % (
("expr " * (args_pos - 1)),
("expr " * (pos_args_count - 1)),
opname,
args_pos,
pos_args_count,
)
self.add_unique_rule(rule, token.kind, uniq_param, customize)
@@ -941,14 +941,14 @@ class Python3Parser(PythonParser):
elif opname_base == "CALL_METHOD":
# PyPy and Python 3.7+ only - DRY with parse2
args_pos, args_kw = self.get_pos_kw(token)
pos_args_count, kw_args_count = self.get_pos_kw(token)
# number of apply equiv arguments:
nak = (len(opname_base) - len("CALL_METHOD")) // 3
rule = (
"call ::= expr "
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ ("pos_arg " * pos_args_count)
+ ("kwarg " * kw_args_count)
+ "expr " * nak
+ opname
)
@@ -1082,7 +1082,7 @@ class Python3Parser(PythonParser):
"""
self.addRule(rule, nop_func)
args_pos, args_kw, annotate_args = token.attr
pos_args_count, kw_args_count, annotate_args = token.attr
# FIXME: Fold test into add_make_function_rule
if self.version < (3, 3):
@@ -1091,7 +1091,7 @@ class Python3Parser(PythonParser):
j = 2
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %sload_closure LOAD_LAMBDA %%s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1099,7 +1099,7 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1115,7 +1115,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"listcomp ::= %sload_closure LOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname)
% ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1124,7 +1124,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname)
% ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1135,13 +1135,13 @@ class Python3Parser(PythonParser):
self.add_unique_rule(
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
"expr GET_ITER CALL_FUNCTION_1"
% ("pos_arg " * args_pos, opname),
% ("pos_arg " * pos_args_count, opname),
opname,
token.attr,
customize,
)
if args_kw > 0:
if kw_args_count > 0:
kwargs_str = "kwargs "
else:
kwargs_str = ""
@@ -1153,36 +1153,40 @@ class Python3Parser(PythonParser):
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE %s"
% (
kwargs_str,
"pos_arg " * args_pos,
"annotate_arg " * (annotate_args - 1),
"pos_arg " * pos_args_count,
"annotate_arg " * (annotate_args),
opname,
)
)
else:
rule = "mkfunc ::= %s%sload_closure LOAD_CODE %s" % (
kwargs_str,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
elif self.version == (3, 3):
self.add_unique_rule(rule, opname, token.attr, customize)
elif (3, 3) <= self.version < (3, 6):
if annotate_args > 0:
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE LOAD_STR %s"
% (
kwargs_str,
"pos_arg " * args_pos,
"annotate_arg " * (annotate_args - 1),
"pos_arg " * pos_args_count,
"annotate_arg " * (annotate_args),
opname,
)
)
else:
rule = "mkfunc ::= %s%sload_closure LOAD_CODE LOAD_STR %s" % (
kwargs_str,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
elif self.version >= (3, 4):
if self.version >= (3, 4):
if not self.is_pypy:
load_op = "LOAD_STR"
else:
@@ -1192,16 +1196,16 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple load_closure %s %s"
% (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs_str,
"annotate_arg " * (annotate_args - 1),
"annotate_arg " * (annotate_args),
load_op,
opname,
)
)
else:
rule = "mkfunc ::= %s%s load_closure LOAD_CODE %s %s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs_str,
load_op,
opname,
@@ -1209,16 +1213,16 @@ class Python3Parser(PythonParser):
self.add_unique_rule(rule, opname, token.attr, customize)
if args_kw == 0:
if kw_args_count == 0:
rule = "mkfunc ::= %sload_closure load_genexpr %s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
if self.version < (3, 4):
rule = "mkfunc ::= %sload_closure LOAD_CODE %s" % (
"expr " * args_pos,
"expr " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
@@ -1229,10 +1233,10 @@ class Python3Parser(PythonParser):
if self.version >= (3, 6):
# The semantics of MAKE_FUNCTION in 3.6 are totally different from
# before.
args_pos, args_kw, annotate_args, closure = token.attr
stack_count = args_pos + args_kw + annotate_args
pos_args_count, kw_args_count, annotate_args, closure = token.attr
stack_count = pos_args_count + kw_args_count + annotate_args
if closure:
if args_pos:
if pos_args_count:
rule = "lambda_body ::= %s%s%s%s" % (
"expr " * stack_count,
"load_closure " * closure,
@@ -1265,14 +1269,14 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
)
rule_pat = (
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1294,7 +1298,7 @@ class Python3Parser(PythonParser):
rule_pat = (
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1"
% ("expr " * args_pos, opname)
% ("expr " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1302,8 +1306,8 @@ class Python3Parser(PythonParser):
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
("pos_arg " * args_pos),
("kwarg " * args_kw),
("pos_arg " * pos_args_count),
("kwarg " * kw_args_count),
opname,
)
self.add_make_function_rule(
@@ -1312,9 +1316,9 @@ class Python3Parser(PythonParser):
continue
if self.version < (3, 6):
args_pos, args_kw, annotate_args = token.attr
pos_args_count, kw_args_count, annotate_args = token.attr
else:
args_pos, args_kw, annotate_args, closure = token.attr
pos_args_count, kw_args_count, annotate_args, closure = token.attr
if self.version < (3, 3):
j = 1
@@ -1324,7 +1328,7 @@ class Python3Parser(PythonParser):
if has_get_iter_call_function1:
rule_pat = (
"generator_exp ::= %sload_genexpr %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
@@ -1336,7 +1340,7 @@ class Python3Parser(PythonParser):
# Todo: For Pypy we need to modify this slightly
rule_pat = (
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
"GET_ITER CALL_FUNCTION_1" % ("expr " * args_pos, opname)
"GET_ITER CALL_FUNCTION_1" % ("expr " * pos_args_count, opname)
)
self.add_make_function_rule(
rule_pat, opname, token.attr, customize
@@ -1345,13 +1349,13 @@ class Python3Parser(PythonParser):
# FIXME: Fold test into add_make_function_rule
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
rule_pat = "lambda_body ::= %s%sLOAD_LAMBDA %%s%s" % (
("pos_arg " * args_pos),
("kwarg " * args_kw),
("pos_arg " * pos_args_count),
("kwarg " * kw_args_count),
opname,
)
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
if args_kw == 0:
if kw_args_count == 0:
kwargs = "no_kwargs"
self.add_unique_rule("no_kwargs ::=", opname, token.attr, customize)
else:
@@ -1361,13 +1365,13 @@ class Python3Parser(PythonParser):
# positional args after keyword args
rule = "mkfunc ::= %s %s%s%s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE ",
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
rule = "mkfunc ::= %s%s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE ",
opname,
)
@@ -1375,14 +1379,14 @@ class Python3Parser(PythonParser):
# positional args after keyword args
rule = "mkfunc ::= %s %s%s%s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
"LOAD_CODE LOAD_STR ",
opname,
)
elif self.version >= (3, 6):
# positional args before keyword args
rule = "mkfunc ::= %s%s %s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs,
"LOAD_CODE LOAD_STR ",
opname,
@@ -1390,7 +1394,7 @@ class Python3Parser(PythonParser):
elif self.version >= (3, 4):
# positional args before keyword args
rule = "mkfunc ::= %s%s %s%s" % (
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
kwargs,
"LOAD_CODE LOAD_STR ",
opname,
@@ -1398,7 +1402,7 @@ class Python3Parser(PythonParser):
else:
rule = "mkfunc ::= %s%sexpr %s" % (
kwargs,
"pos_arg " * args_pos,
"pos_arg " * pos_args_count,
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
@@ -1408,8 +1412,8 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
% (
("pos_arg " * (args_pos)),
("call " * (annotate_args - 1)),
("pos_arg " * pos_args_count),
("call " * annotate_args),
opname,
)
)
@@ -1417,8 +1421,8 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%sannotate_tuple LOAD_CODE LOAD_STR %s"
% (
("pos_arg " * (args_pos)),
("annotate_arg " * (annotate_args - 1)),
("pos_arg " * pos_args_count),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1429,21 +1433,21 @@ class Python3Parser(PythonParser):
if self.version == (3, 3):
# 3.3 puts kwargs before pos_arg
pos_kw_tuple = (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("kwargs " * kw_args_count),
("pos_arg " * pos_args_count),
)
else:
# 3.4 and 3.5puts pos_arg before kwargs
pos_kw_tuple = (
"pos_arg " * (args_pos),
("kwargs " * args_kw),
"pos_arg " * (pos_args_count),
("kwargs " * kw_args_count),
)
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE LOAD_STR EXTENDED_ARG %s"
% (
pos_kw_tuple[0],
pos_kw_tuple[1],
("call " * (annotate_args - 1)),
("call " * annotate_args),
opname,
)
)
@@ -1453,7 +1457,7 @@ class Python3Parser(PythonParser):
% (
pos_kw_tuple[0],
pos_kw_tuple[1],
("annotate_arg " * (annotate_args - 1)),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1462,9 +1466,9 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
% (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("annotate_arg " * (annotate_args - 1)),
("kwargs " * kw_args_count),
("pos_arg " * (pos_args_count)),
("annotate_arg " * annotate_args),
opname,
)
)
@@ -1472,9 +1476,9 @@ class Python3Parser(PythonParser):
rule = (
"mkfunc_annotate ::= %s%s%sannotate_tuple LOAD_CODE EXTENDED_ARG %s"
% (
("kwargs " * args_kw),
("pos_arg " * (args_pos)),
("call " * (annotate_args - 1)),
("kwargs " * kw_args_count),
("pos_arg " * pos_args_count),
("call " * annotate_args),
opname,
)
)

View File

@@ -73,8 +73,8 @@ class Python30Parser(Python31Parser):
# Need to keep LOAD_FAST as index 1
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
set_comp_func ::= set_comp_header
LOAD_FAST FOR_ITER store comp_iter
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
LOAD_ARG FOR_ITER store comp_iter
JUMP_BACK COME_FROM POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
list_comp_header ::= BUILD_LIST_0 DUP_TOP STORE_FAST
list_comp ::= list_comp_header

View File

@@ -90,7 +90,7 @@ class Python32Parser(Python3Parser):
"LOAD_CONST LOAD_CODE EXTENDED_ARG %s"
) % (
("pos_arg " * args_pos),
("annotate_arg " * (annotate_args - 1)),
("annotate_arg " * (annotate_args)),
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2017, 2019-2020, 2022 Rocky Bernstein
# Copyright (c) 2016-2017, 2019-2020, 2022-2023 Rocky Bernstein
"""
Python 3.7 base code. We keep non-custom-generated grammar rules out of this file.
"""
@@ -431,35 +431,39 @@ class Python37BaseParser(PythonParser):
"BUILD_TUPLE",
"BUILD_TUPLE_UNPACK",
):
v = token.attr
collection_size = token.attr
is_LOAD_CLOSURE = False
if opname_base == "BUILD_TUPLE":
# If is part of a "load_closure", then it is not part of a
# "list".
is_LOAD_CLOSURE = True
for j in range(v):
for j in range(collection_size):
if tokens[i - j - 1].kind != "LOAD_CLOSURE":
is_LOAD_CLOSURE = False
break
if is_LOAD_CLOSURE:
rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v), opname)
rule = "load_closure ::= %s%s" % (
("LOAD_CLOSURE " * collection_size),
opname,
)
self.add_unique_rule(rule, opname, token.attr, customize)
if not is_LOAD_CLOSURE or v == 0:
if not is_LOAD_CLOSURE or collection_size == 0:
# We do this complicated test to speed up parsing of
# pathelogically long literals, especially those over 1024.
build_count = token.attr
thousands = build_count // 1024
thirty32s = (build_count // 32) % 32
thousands = collection_size // 1024
thirty32s = (collection_size // 32) % 32
if thirty32s > 0:
rule = "expr32 ::=%s" % (" expr" * 32)
self.add_unique_rule(rule, opname_base, build_count, customize)
self.add_unique_rule(
rule, opname_base, collection_size, customize
)
pass
if thousands > 0:
self.add_unique_rule(
"expr1024 ::=%s" % (" expr32" * 32),
opname_base,
build_count,
collection_size,
customize,
)
pass
@@ -468,7 +472,7 @@ class Python37BaseParser(PythonParser):
("%s ::= " % collection)
+ "expr1024 " * thousands
+ "expr32 " * thirty32s
+ "expr " * (build_count % 32)
+ "expr " * (collection_size % 32)
+ opname
)
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
@@ -478,8 +482,8 @@ class Python37BaseParser(PythonParser):
if token.attr == 2:
self.add_unique_rules(
[
"expr ::= build_slice2",
"build_slice2 ::= expr expr BUILD_SLICE_2",
"expr ::= slice2",
"slice2 ::= expr expr BUILD_SLICE_2",
],
customize,
)
@@ -489,8 +493,8 @@ class Python37BaseParser(PythonParser):
)
self.add_unique_rules(
[
"expr ::= build_slice3",
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
"expr ::= slice3",
"slice3 ::= expr expr expr BUILD_SLICE_3",
],
customize,
)
@@ -524,6 +528,7 @@ class Python37BaseParser(PythonParser):
if opname == "CALL_FUNCTION" and token.attr == 1:
rule = """
expr ::= dict_comp
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
GET_ITER CALL_FUNCTION_1
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
@@ -558,11 +563,12 @@ class Python37BaseParser(PythonParser):
nak = (len(opname_base) - len("CALL_METHOD")) // 3
rule = (
"call ::= expr "
+ ("expr " * args_pos)
+ ("pos_arg " * args_pos)
+ ("kwarg " * args_kw)
+ "expr " * nak
+ opname
)
self.add_unique_rule(rule, opname, token.attr, customize)
elif opname == "CONTINUE":
@@ -1252,21 +1258,9 @@ class Python37BaseParser(PythonParser):
try:
if fn:
return fn(self, lhs, n, rule, ast, tokens, first, last)
except:
except Exception:
import sys, traceback
print(
("Exception in %s %s\n"
+ "rule: %s\n"
+ "offsets %s .. %s")
% (
fn.__name__,
sys.exc_info()[1],
rule2str(rule),
tokens[first].offset,
tokens[last].offset,
)
)
print(traceback.print_tb(sys.exc_info()[2], -1))
raise ParserError(tokens[last], tokens[last].off2int(), self.debug["rules"])

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016, 2018-2022 by Rocky Bernstein
# Copyright (c) 2016, 2018-2023 by Rocky Bernstein
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
@@ -31,8 +31,8 @@ from xdis import (
Bytecode,
canonic_python_version,
code2num,
instruction_size,
extended_arg_val,
instruction_size,
next_offset,
)
from xdis.version_info import PYTHON_VERSION_TRIPLE
@@ -601,8 +601,25 @@ class Scanner(object):
return self.Token
def parse_fn_counts(argc):
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
# TODO: after the next xdis release, use from there instead.
def parse_fn_counts_30_35(argc):
"""
In Python 3.0 to 3.5 MAKE_CLOSURE and MAKE_FUNCTION encode
arguments counts of positional, default + named, and annotation
arguments a particular kind of encoding where each of
the entry a a packe byted value of the lower 24 bits
of ``argc``. The high bits of argc may have come from
an EXTENDED_ARG instruction. Here, we unpack the values
from the ``argc`` int and return a triple of the
positional args, named_args, and annotation args.
"""
annotate_count = (argc >> 16) & 0x7FFF
# For some reason that I don't understand, annotate_args is off by one
# when there is an EXENDED_ARG instruction from what is documented in
# https://docs.python.org/3.4/library/dis.html#opcode-MAKE_CLOSURE
if annotate_count > 1:
annotate_count -= 1
return ((argc & 0xFF), (argc >> 8) & 0xFF, annotate_count)
def get_scanner(version, is_pypy=False, show_asm=None):

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2019, 2021-2022 by Rocky Bernstein
# Copyright (c) 2015-2019, 2021-2023 by Rocky Bernstein
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
#
@@ -44,7 +44,7 @@ from xdis import iscode, instruction_size, Instruction
from xdis.bytecode import _get_const_info
from uncompyle6.scanners.tok import Token
from uncompyle6.scanner import parse_fn_counts
from uncompyle6.scanner import parse_fn_counts_30_35
import xdis
# Get all the opcodes into globals
@@ -626,23 +626,31 @@ class Scanner3(Scanner):
flags >>= 1
attr = attr[:4] # remove last value: attr[5] == False
else:
pos_args, name_pair_args, annotate_args = parse_fn_counts(
pos_args, name_pair_args, annotate_args = parse_fn_counts_30_35(
inst.argval
)
pattr = "%d positional, %d keyword only, %d annotated" % (
pos_args,
name_pair_args,
annotate_args,
pattr = "%s positional, %s keyword only, %s annotated" % (
pos_args, name_pair_args, annotate_args
)
if name_pair_args > 0:
if name_pair_args > 0 and annotate_args > 0:
# FIXME: this should probably be K_
opname = "%s_N%d" % (opname, name_pair_args)
opname += "_N%s_A%s" % (name_pair_args, annotate_args)
pass
if annotate_args > 0:
opname = "%s_A_%d" % (opname, annotate_args)
elif annotate_args > 0:
opname += "_A_%s" % annotate_args
pass
opname = "%s_%d" % (opname, pos_args)
elif name_pair_args > 0:
opname += "_N_%s" % name_pair_args
pass
else:
# Rule customization mathics, MAKE_FUNCTION_...
# so make sure to add the "_"
opname += "_0"
attr = (pos_args, name_pair_args, annotate_args)
new_tokens.append(
Token(
opname=opname,

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2021 by Rocky Bernstein
# Copyright (c) 2016-2021, 2023 by Rocky Bernstein
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
#
@@ -184,7 +184,7 @@ class Token: # Python 2.4 can't have empty ()
elif name == "LOAD_ASSERT":
return "%s%s %s" % (prefix, offset_opname, pattr)
elif self.op in self.opc.NAME_OPS:
if self.opc.version >= 3.0:
if self.opc.version_tuple >= (3, 0):
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
elif name == "EXTENDED_ARG":
return "%s%s%s 0x%x << %s = %s" % (

View File

@@ -170,6 +170,7 @@ def customize_for_version36(self, version):
class_name = node[1][1].attr
if self.is_pypy and class_name.find("<locals>") > 0:
class_name = class_name.split(".")[-1]
else:
class_name = node[1][2].attr
build_class = node
@@ -206,23 +207,24 @@ def customize_for_version36(self, version):
elif build_class[1][0] == "load_closure":
# Python 3 with closures not functions
load_closure = build_class[1]
if hasattr(load_closure[-3], "attr"):
# Python 3.3 classes with closures work like this.
# Note have to test before 3.2 case because
# index -2 also has an attr.
subclass_code = load_closure[-3].attr
elif hasattr(load_closure[-2], "attr"):
# Python 3.2 works like this
subclass_code = load_closure[-2].attr
else:
raise "Internal Error n_classdef: cannot find class body"
subclass_code = None
for i in range(-4, -1):
if load_closure[i] == "LOAD_CODE":
subclass_code = load_closure[i].attr
break
if subclass_code is None:
raise RuntimeError(
"Internal Error n_classdef: cannot find " "class body"
)
if hasattr(build_class[3], "__len__"):
if not subclass_info:
subclass_info = build_class[3]
elif hasattr(build_class[2], "__len__"):
subclass_info = build_class[2]
else:
raise "Internal Error n_classdef: cannot superclass name"
raise RuntimeError(
"Internal Error n_classdef: cannot " "superclass name"
)
elif node == "classdefdeco2":
subclass_info = node
subclass_code = build_class[1][0].attr

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2019-2022 by Rocky Bernstein
# Copyright (c) 2019-2023 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -16,12 +16,8 @@
"""
import re
from uncompyle6.semantics.consts import (
PRECEDENCE,
TABLE_DIRECT,
INDENT_PER_LEVEL,
)
from uncompyle6.semantics.consts import INDENT_PER_LEVEL, PRECEDENCE, TABLE_DIRECT
from uncompyle6.semantics.helper import flatten_list
FSTRING_CONVERSION_MAP = {1: "!s", 2: "!r", 3: "!a", "X": ":X"}
@@ -54,10 +50,13 @@ def customize_for_version37(self, version):
{
"and_not": ("%c and not %c", (0, "expr"), (2, "expr")),
"ann_assign": (
"%|%[2]{attr}: %c\n", 0,
"%|%[2]{attr}: %c\n",
0,
),
"ann_assign_init": (
"%|%[2]{attr}: %c = %c\n", 0, 1,
"%|%[2]{attr}: %c = %c\n",
0,
1,
),
"async_for_stmt": (
"%|async for %c in %c:\n%+%c%-\n\n",
@@ -89,9 +88,8 @@ def customize_for_version37(self, version):
"attributes37": (
"%[0]{pattr} import %c",
(0, "IMPORT_NAME_ATTR"),
(1, "IMPORT_FROM")
(1, "IMPORT_FROM"),
),
# nested await expressions like:
# return await (await bar())
# need parenthesis.
@@ -126,19 +124,24 @@ def customize_for_version37(self, version):
(0, PRECEDENCE["compare"] - 1),
(-2, PRECEDENCE["compare"] - 1),
),
"compare_chained2a_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2b_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2a_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1)),
"compare_chained2a_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2b_false_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2a_false_37": (
'%[1]{pattr.replace("-", " ")} %p',
(0, PRECEDENCE["compare"] - 1),
),
"compare_chained2c_37": (
'%[3]{pattr.replace("-", " ")} %p %p',
(0, PRECEDENCE["compare"] - 1),
(6, PRECEDENCE["compare"] - 1),
),
'if_exp37': (
'%p if %c else %c',
(1, 'expr', 27), 0, 3
),
"if_exp37": ("%p if %c else %c", (1, "expr", 27), 0, 3),
"except_return": ("%|except:\n%+%c%-", 3),
"if_exp_37a": (
"%p if %p else %p",
@@ -153,9 +156,7 @@ def customize_for_version37(self, version):
(5, "expr", 27),
),
"ifstmtl": ("%|if %c:\n%+%c%-", (0, "testexpr"), (1, "_ifstmts_jumpl")),
'import_as37': (
"%|import %c as %c\n", 2, -2
),
"import_as37": ("%|import %c as %c\n", 2, -2),
"import_from37": ("%|from %[2]{pattr} import %c\n", (3, "importlist37")),
"import_from_as37": (
"%|from %c as %c\n",
@@ -178,7 +179,6 @@ def customize_for_version37(self, version):
(0, "get_aiter"),
(3, "list_iter"),
),
"list_if37": (" if %p%c", (0, 27), 1),
"list_if37_not": (" if not %p%c", (0, 27), 1),
"testfalse_not_or": ("not %c or %c", (0, "expr"), (2, "expr")),

View File

@@ -474,9 +474,14 @@ class ComprehensionMixin(object):
self.preorder(n[1])
else:
if self.version == (3, 0):
if isinstance(n, Token):
body = store
elif len(n) > 1:
body = n[1]
else:
body = n[0]
else:
body = n[0]
self.preorder(body)
if node == "list_comp_async":

View File

@@ -1096,7 +1096,12 @@ class NonterminalActions:
self.write("{")
if node[0] in ["LOAD_SETCOMP", "LOAD_DICTCOMP"]:
if self.version == (3, 0):
if len(node) >= 6:
iter_index = 6
else:
assert node[1].kind.startswith("MAKE_FUNCTION")
iter_index = 2
pass
else:
iter_index = 1
self.comprehension_walk_newer(node, iter_index=iter_index, code_index=0)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2019-2022 by Rocky Bernstein
# Copyright (c) 2019-2023 by Rocky Bernstein
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -119,15 +119,10 @@ class TreeTransform(GenericASTTraversal, object):
mkfunc_pattr = node[-1].pattr
if isinstance(mkfunc_pattr, tuple):
assert len(mkfunc_pattr, 4) and isinstance(mkfunc_pattr, int)
is_closure = node[-1].pattr[3] != 0
else:
# FIXME: This is what we had before. It is hoaky and probably wrong.
is_closure = mkfunc_pattr == "closure"
assert len(mkfunc_pattr) == 4 and isinstance(mkfunc_pattr, int)
if (
(not is_closure)
and len(code.co_consts) > 0
len(code.co_consts) > 0
and isinstance(code.co_consts[0], str)
):
docstring_node = SyntaxTree(

View File

@@ -14,4 +14,4 @@
# This file is suitable for sourcing inside POSIX shell as
# well as importing into Python
# fmt: off
__version__="3.9.0" # noqa
__version__="3.9.1.dev0" # noqa