You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Travis CI: Run more f-string tests on Python 3.7
This commit is contained in:
@@ -144,7 +144,7 @@ if PYTHON_VERSION > 2.6:
|
|||||||
run_test(fstring)
|
run_test(fstring)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason='need Python 3.6')
|
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need Python 3.6+')
|
||||||
@pytest.mark.parametrize('fstring', [
|
@pytest.mark.parametrize('fstring', [
|
||||||
"f'{abc}{abc!s}'",
|
"f'{abc}{abc!s}'",
|
||||||
"f'{abc}0'",
|
"f'{abc}0'",
|
||||||
|
@@ -7,7 +7,7 @@ def test_grammar():
|
|||||||
|
|
||||||
def check_tokens(tokens, opcode_set):
|
def check_tokens(tokens, opcode_set):
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
assert remain_tokens == set([]), \
|
assert remain_tokens == set([]), \
|
||||||
|
@@ -84,7 +84,7 @@ if __name__ == '__main__':
|
|||||||
""".split()))
|
""".split()))
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
import re
|
import re
|
||||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
print(remain_tokens)
|
print(remain_tokens)
|
||||||
|
@@ -267,7 +267,7 @@ if __name__ == '__main__':
|
|||||||
""".split()))
|
""".split()))
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
import re
|
import re
|
||||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
print(remain_tokens)
|
print(remain_tokens)
|
||||||
|
@@ -366,7 +366,7 @@ if __name__ == '__main__':
|
|||||||
""".split()))
|
""".split()))
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
import re
|
import re
|
||||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
print(remain_tokens)
|
print(remain_tokens)
|
||||||
|
@@ -104,7 +104,7 @@ if __name__ == '__main__':
|
|||||||
""".split()))
|
""".split()))
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
import re
|
import re
|
||||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
print(remain_tokens)
|
print(remain_tokens)
|
||||||
|
@@ -107,7 +107,7 @@ class Token():
|
|||||||
pattr = self.opc.cmp_op[self.attr]
|
pattr = self.opc.cmp_op[self.attr]
|
||||||
# And so on. See xdis/bytecode.py get_instructions_bytes
|
# And so on. See xdis/bytecode.py get_instructions_bytes
|
||||||
pass
|
pass
|
||||||
elif re.search('_\d+$', self.kind):
|
elif re.search(r'_\d+$', self.kind):
|
||||||
return "%s%s%s" % (prefix, offset_opname, argstr)
|
return "%s%s%s" % (prefix, offset_opname, argstr)
|
||||||
else:
|
else:
|
||||||
pattr = ''
|
pattr = ''
|
||||||
|
Reference in New Issue
Block a user