From 9b550b9dda124f7f96120e2ddd789e4059bf462f Mon Sep 17 00:00:00 2001 From: rocky Date: Sun, 14 Apr 2019 06:11:16 -0400 Subject: [PATCH] PEP E225 with a nod to Deepcommit --- pytest/test_docstring.py | 2 +- pytest/validate.py | 2 +- test/decompyle/test_augmentedAssign.py | 2 +- test/dis-compare.py | 2 +- .../branching/02_ifelse_lambda.py | 2 +- test/simple_source/bug275/01_while1.py | 4 ++-- .../bug36/00_return_return_bug.py | 2 +- .../stmts/01_augmented_assign.py | 2 +- test/simple_source/stmts/10_del.py | 2 +- test/test_pyenvlib.py | 2 +- uncompyle6/parsers/parse2.py | 2 +- uncompyle6/parsers/parse3.py | 20 +++++++++---------- uncompyle6/parsers/parse36.py | 2 +- uncompyle6/scanner.py | 4 ++-- uncompyle6/scanners/scanner2.py | 2 +- uncompyle6/scanners/scanner3.py | 2 +- uncompyle6/scanners/tok.py | 2 +- uncompyle6/semantics/aligner.py | 2 +- uncompyle6/semantics/fragments.py | 2 +- uncompyle6/semantics/pysource.py | 10 +++++----- uncompyle6/version.py | 2 +- 21 files changed, 36 insertions(+), 36 deletions(-) diff --git a/pytest/test_docstring.py b/pytest/test_docstring.py index b967b6fd..21157b5f 100644 --- a/pytest/test_docstring.py +++ b/pytest/test_docstring.py @@ -47,7 +47,7 @@ class PrintFake(): out = out[:-self.pending_newlines] self.f.write(out) def println(self, *data): - if data and not(len(data) == 1 and data[0] ==''): + if data and not(len(data) == 1 and data[0] == ''): self.write(*data) self.pending_newlines = max(self.pending_newlines, 1) return diff --git a/pytest/validate.py b/pytest/validate.py index 7a66168b..ead43859 100644 --- a/pytest/validate.py +++ b/pytest/validate.py @@ -80,7 +80,7 @@ def are_instructions_equal(i1, i2): :return: True if the two instructions are approximately equal, otherwise False. """ - result = (1==1 + result = (1 == 1 and i1.opname == i2.opname and i1.opcode == i2.opcode and i1.arg == i2.arg diff --git a/test/decompyle/test_augmentedAssign.py b/test/decompyle/test_augmentedAssign.py index 7c5a3df9..72743aec 100644 --- a/test/decompyle/test_augmentedAssign.py +++ b/test/decompyle/test_augmentedAssign.py @@ -18,7 +18,7 @@ a *= b; print a # a = a*b = 2 a -= a; print a # a = a-a = 0 a += 7*3; print a # == 21 -l= [1,2,3] +l = [1,2,3] l[1] *= 3; print l[1]; # 6 l[1][2][3] = 7 l[1][2][3] *= 3; diff --git a/test/dis-compare.py b/test/dis-compare.py index 86418a08..8dc03eb5 100755 --- a/test/dis-compare.py +++ b/test/dis-compare.py @@ -76,7 +76,7 @@ def get_srcdir(): src_dir = get_srcdir() os.chdir(src_dir) -files=[ +files = [ 'if', 'ifelse', # 'keyword', diff --git a/test/simple_source/branching/02_ifelse_lambda.py b/test/simple_source/branching/02_ifelse_lambda.py index 75dc2a0f..b94f7dcd 100644 --- a/test/simple_source/branching/02_ifelse_lambda.py +++ b/test/simple_source/branching/02_ifelse_lambda.py @@ -1,7 +1,7 @@ # We have to do contortions here because # lambda's have to be more or less on a line -f = lambda x: 1 if x<2 else 3 +f = lambda x: 1 if x < 2 else 3 assert f(3) == 3 assert f(1) == 1 diff --git a/test/simple_source/bug275/01_while1.py b/test/simple_source/bug275/01_while1.py index d7ef0649..07df5734 100644 --- a/test/simple_source/bug275/01_while1.py +++ b/test/simple_source/bug275/01_while1.py @@ -3,7 +3,7 @@ # while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM # while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE COME_FROM # which is included in later code generation -ms=0 -if ms==1: +ms = 0 +if ms == 1: while 1: pass diff --git a/test/simple_source/bug36/00_return_return_bug.py b/test/simple_source/bug36/00_return_return_bug.py index a7c48547..12edbd1e 100644 --- a/test/simple_source/bug36/00_return_return_bug.py +++ b/test/simple_source/bug36/00_return_return_bug.py @@ -2,7 +2,7 @@ #Not detecting 2nd return is outside of # if/then. Fix was to ensure COME_FROM def return_return_bug(foo): - if foo =='say_hello': + if foo == 'say_hello': return "hello" return "world" diff --git a/test/simple_source/stmts/01_augmented_assign.py b/test/simple_source/stmts/01_augmented_assign.py index 5d279816..f92aa2c5 100644 --- a/test/simple_source/stmts/01_augmented_assign.py +++ b/test/simple_source/stmts/01_augmented_assign.py @@ -20,7 +20,7 @@ a *= b; # print a # a = a*b = 2 a -= a; # print a # a = a-a = 0 a += 7*3; # print a # == 21 -l= [1,2,3] +l = [1,2,3] l[1] *= 3; # print l[1]; # 6 l[1][2][3] = 7 l[1][2][3] *= 3; diff --git a/test/simple_source/stmts/10_del.py b/test/simple_source/stmts/10_del.py index 338c4124..f7d23524 100644 --- a/test/simple_source/stmts/10_del.py +++ b/test/simple_source/stmts/10_del.py @@ -28,7 +28,7 @@ def foo(): z = {} def a(): - b =1 + b = 1 global z del z def b(y): diff --git a/test/test_pyenvlib.py b/test/test_pyenvlib.py index 7ab6497b..4870b654 100755 --- a/test/test_pyenvlib.py +++ b/test/test_pyenvlib.py @@ -36,7 +36,7 @@ python_versions = [v for v in magics.python_versions if # FIXME: we should remove Python versions that we don't support. # These include Jython, and Python bytecode changes pre release. -TEST_VERSIONS=( +TEST_VERSIONS = ( 'pypy-2.4.0', 'pypy-2.6.1', 'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta', 'native') + tuple(python_versions) diff --git a/uncompyle6/parsers/parse2.py b/uncompyle6/parsers/parse2.py index 3a675f66..2263d827 100644 --- a/uncompyle6/parsers/parse2.py +++ b/uncompyle6/parsers/parse2.py @@ -467,7 +467,7 @@ class Python2Parser(PythonParser): pass self.add_unique_rules([ ('mkfunc ::= %s load_closure LOAD_CONST %s' % - ('expr '* token.attr, opname))], customize) + ('expr ' * token.attr, opname))], customize) if self.version >= 2.7: if i > 0: diff --git a/uncompyle6/parsers/parse3.py b/uncompyle6/parsers/parse3.py index 98321047..cd891056 100644 --- a/uncompyle6/parsers/parse3.py +++ b/uncompyle6/parsers/parse3.py @@ -874,12 +874,12 @@ class Python3Parser(PythonParser): j = 2 if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'): rule_pat = ('mklambda ::= %sload_closure LOAD_LAMBDA %%s%s' % - ('pos_arg '* args_pos, opname)) + ('pos_arg ' * args_pos, opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) if has_get_iter_call_function1: rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr " - "GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname)) + "GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) if has_get_iter_call_function1: @@ -899,7 +899,7 @@ class Python3Parser(PythonParser): if (is_pypy or (i >= j and tokens[i-j] == 'LOAD_DICTCOMP')): self.add_unique_rule('dict_comp ::= %sload_closure LOAD_DICTCOMP %s ' 'expr GET_ITER CALL_FUNCTION_1' % - ('pos_arg '* args_pos, opname), + ('pos_arg ' * args_pos, opname), opname, token.attr, customize) if args_kw > 0: @@ -961,10 +961,10 @@ class Python3Parser(PythonParser): if has_get_iter_call_function1: rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr " - "GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname)) + "GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr " - "GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname)) + "GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LISTCOMP'): if self.version >= 3.6: @@ -980,8 +980,8 @@ class Python3Parser(PythonParser): if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LAMBDA'): rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' % - (('pos_arg '* args_pos), - ('kwarg '* args_kw), + (('pos_arg ' * args_pos), + ('kwarg ' * args_kw), opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) continue @@ -998,7 +998,7 @@ class Python3Parser(PythonParser): if has_get_iter_call_function1: rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr " - "GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname)) + "GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LISTCOMP'): @@ -1014,8 +1014,8 @@ class Python3Parser(PythonParser): # FIXME: Fold test into add_make_function_rule if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'): rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' % - (('pos_arg '* args_pos), - ('kwarg '* args_kw), + (('pos_arg ' * args_pos), + ('kwarg ' * args_kw), opname)) self.add_make_function_rule(rule_pat, opname, token.attr, customize) diff --git a/uncompyle6/parsers/parse36.py b/uncompyle6/parsers/parse36.py index ba38b5c8..f702f012 100644 --- a/uncompyle6/parsers/parse36.py +++ b/uncompyle6/parsers/parse36.py @@ -348,7 +348,7 @@ class Python36Parser(Python35Parser): build_tuple_unpack_with_call %s CALL_FUNCTION_EX - """ % 'expr '* token.attr, nop_func) + """ % 'expr ' * token.attr, nop_func) pass # FIXME: Is this right? diff --git a/uncompyle6/scanner.py b/uncompyle6/scanner.py index 43bdf151..60c5c443 100755 --- a/uncompyle6/scanner.py +++ b/uncompyle6/scanner.py @@ -271,7 +271,7 @@ class Scanner(object): code = self.code # Make sure requested positions do not go out of # code bounds - if not (start>=0 and end<=len(code)): + if not (start >= 0 and end <= len(code)): return None try: @@ -435,7 +435,7 @@ class Scanner(object): # j = self.stmts.index(inst.offset) # self.lines[j] = offset - new_inst= inst._replace(starts_line=starts_line, + new_inst = inst._replace(starts_line=starts_line, is_jump_target=is_jump_target, offset=offset) inst = new_inst diff --git a/uncompyle6/scanners/scanner2.py b/uncompyle6/scanners/scanner2.py index b3cf9df4..1e8ffa55 100644 --- a/uncompyle6/scanners/scanner2.py +++ b/uncompyle6/scanners/scanner2.py @@ -1179,7 +1179,7 @@ class Scanner2(Scanner): Return a list with indexes to them or [] if none found. """ - assert(start>=0 and end<=len(self.code) and start <= end) + assert(start >= 0 and end <= len(self.code) and start <= end) try: None in instr except: instr = [instr] diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 9ad9c799..8da10a2d 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -1069,7 +1069,7 @@ class Scanner3(Scanner): optionally ing specified offset, and return list found offsets which are not within any POP_JUMP_IF_TRUE jumps. """ - assert(start>=0 and end<=len(self.code) and start <= end) + assert(start >= 0 and end <= len(self.code) and start <= end) # Find all offsets of requested instructions instr_offsets = self.inst_matches(start, end, instr, target, diff --git a/uncompyle6/scanners/tok.py b/uncompyle6/scanners/tok.py index c88c2ecd..4a4ee2f5 100644 --- a/uncompyle6/scanners/tok.py +++ b/uncompyle6/scanners/tok.py @@ -93,7 +93,7 @@ class Token(): if not self.pattr.startswith('to '): pattr = "to " + self.pattr elif self.op in self.opc.JABS_OPS: - self.pattr= str(self.pattr) + self.pattr = str(self.pattr) if not self.pattr.startswith('to '): pattr = "to " + str(self.pattr) pass diff --git a/uncompyle6/semantics/aligner.py b/uncompyle6/semantics/aligner.py index 89458b54..0f93089c 100644 --- a/uncompyle6/semantics/aligner.py +++ b/uncompyle6/semantics/aligner.py @@ -30,7 +30,7 @@ class AligningWalker(SourceWalker, object): self.current_line_number = 0 def println(self, *data): - if data and not(len(data) == 1 and data[0] ==''): + if data and not(len(data) == 1 and data[0] == ''): self.write(*data) self.pending_newlines = max(self.pending_newlines, 1) diff --git a/uncompyle6/semantics/fragments.py b/uncompyle6/semantics/fragments.py index 6d228664..968a69cc 100644 --- a/uncompyle6/semantics/fragments.py +++ b/uncompyle6/semantics/fragments.py @@ -586,7 +586,7 @@ class FragmentsWalker(pysource.SourceWalker, object): n = n[0] # recurse one step if n == 'list_for': n = n[3] elif n == 'list_if': n = n[2] - elif n == 'list_if_not': n= n[2] + elif n == 'list_if_not': n = n[2] assert n == 'lc_body' if node[0].kind.startswith('BUILD_LIST'): start = len(self.f.getvalue()) diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index 773e2307..f8daa4db 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -395,7 +395,7 @@ class SourceWalker(GenericASTTraversal, object): self.f.write(out) def println(self, *data): - if data and not(len(data) == 1 and data[0] ==''): + if data and not(len(data) == 1 and data[0] == ''): self.write(*data) self.pending_newlines = max(self.pending_newlines, 1) @@ -855,7 +855,7 @@ class SourceWalker(GenericASTTraversal, object): n = n[0] # iterate one nesting deeper if n == 'list_for': n = n[3] elif n == 'list_if': n = n[2] - elif n == 'list_if_not': n= n[2] + elif n == 'list_if_not': n = n[2] assert n == 'lc_body' self.write( '[ ') @@ -915,7 +915,7 @@ class SourceWalker(GenericASTTraversal, object): n = n[0] # iterate one nesting deeper if n == 'list_for': n = n[3] elif n == 'list_if': n = n[2] - elif n == 'list_if_not': n= n[2] + elif n == 'list_if_not': n = n[2] assert n == 'lc_body' self.write( '[ ') @@ -1048,7 +1048,7 @@ class SourceWalker(GenericASTTraversal, object): while (len(ast) == 1 or (ast in ('sstmt', 'return') and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))): - self.prec=100 + self.prec = 100 ast = ast[0] # Pick out important parts of the comprehension: @@ -1188,7 +1188,7 @@ class SourceWalker(GenericASTTraversal, object): while (len(ast) == 1 or (ast in ('sstmt', 'return') and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))): - self.prec=100 + self.prec = 100 ast = ast[0] n = ast[1] diff --git a/uncompyle6/version.py b/uncompyle6/version.py index f2c0ea90..a42c04c7 100644 --- a/uncompyle6/version.py +++ b/uncompyle6/version.py @@ -12,4 +12,4 @@ # along with this program. If not, see . # This file is suitable for sourcing inside bash as # well as importing into Python -VERSION='3.2.6' +VERSION='3.2.6' # noqa