Python3: remove "return None" at end of main for uncompyle. Fix up verify for Python3. First automated Python 3.4 tests via "makecheck-3.4" in test directory.

This commit is contained in:
rocky
2015-12-17 18:49:16 -05:00
parent 29f02edf79
commit a309a77ea7
118 changed files with 129 additions and 4430 deletions

View File

@@ -6,4 +6,4 @@ python:
- '2.7'
script:
- python ./setup.py develop && COMPILE='--compile' make test
- python ./setup.py develop && COMPILE='--compile' make check-2.7

View File

@@ -17,11 +17,16 @@ PHONY=check clean pytest dist distclean lint flake8 test test-unit test-function
all: check
#: Same as "check"
test check: pytest check-short
test check: pytest check-long
#: Run tests
check-long: pytest
$(MAKE) -C test check-2.7
#: Run tests
check-short: pytest
$(MAKE) -C test $@
$(MAKE) -C test check-short-2.7
#: check that disassembly exactly matches Python lib's dis
check-disasm:

View File

@@ -9,4 +9,4 @@ dependencies:
- pip install -r test-requirements.txt
test:
override:
- python ./setup.py develop && make check
- python ./setup.py develop && make check-2.7

View File

@@ -13,11 +13,11 @@ os.chdir(src_dir)
@pytest.mark.parametrize(("test_tuple", "function_to_test"), [
(
('../test/bytecode_2.5/test_import.pyc', 'testdata/test_import_25.right',),
('../test/bytecode_2.7/if.pyc', 'testdata/if-2.7.right',),
disassemble_file
),
(
('../test/bytecode_2.7/test1.pyc', 'testdata/test1.right',),
('../test/bytecode_2.7/ifelse.pyc', 'testdata/ifelse-2.7.right',),
disassemble_file
),
])

13
pytest/testdata/if-2.7.right vendored Normal file
View File

@@ -0,0 +1,13 @@
# Python 2.7
# Embedded file name: ./source_3.4/branching/if.py
1 0 LOAD_NAME 'a'
3 POP_JUMP_IF_FALSE '15'
2 6 LOAD_NAME 'c'
9 STORE_NAME 'b'
12 JUMP_FORWARD '15'
15_0 COME_FROM '12'
15 LOAD_CONST ''
18 RETURN_VALUE ''

16
pytest/testdata/ifelse-2.7.right vendored Normal file
View File

@@ -0,0 +1,16 @@
# Python 2.7
# Embedded file name: ./source_3.4/branching/ifelse.py
1 0 LOAD_NAME 'a'
3 POP_JUMP_IF_FALSE '15'
2 6 LOAD_NAME 'c'
9 STORE_NAME 'b'
12 JUMP_FORWARD '21'
4 15 LOAD_NAME 'e'
18 STORE_NAME 'd'
21_0 COME_FROM '12'
21 LOAD_CONST ''
24 RETURN_VALUE ''

View File

@@ -1,88 +0,0 @@
# Python 2.7
# Embedded file name: /src/external-vcs/github/rocky/python-uncompyle6/__pkginfo__.py
1 0 LOAD_CONST 'uncompyle6 packaging information'
3 STORE_NAME '__doc__'
13 6 LOAD_CONST '\nCopyright (C) 2015 Rocky Bernstein <rocky@gnu.org>.\n'
9 STORE_NAME 'copyright'
15 12 LOAD_CONST 'Development Status :: 3 - Alpha'
16 15 LOAD_CONST 'Intended Audience :: Developers'
17 18 LOAD_CONST 'Operating System :: OS Independent'
18 21 LOAD_CONST 'Programming Language :: Python'
19 24 LOAD_CONST 'Topic :: Software Development :: Debuggers'
20 27 LOAD_CONST 'Topic :: Software Development :: Libraries :: Python Modules'
30 BUILD_LIST_6 ''
33 STORE_NAME 'classifiers'
24 36 LOAD_CONST 'Rocky Bernstein'
39 STORE_NAME 'author'
25 42 LOAD_CONST 'rb@dustyfeet.com'
45 STORE_NAME 'author_email'
26 48 LOAD_CONST ''
51 STORE_NAME 'ftp_url'
28 54 LOAD_CONST 'python-debugger@googlegroups.com'
57 STORE_NAME 'mailing_list'
29 60 LOAD_CONST 'uncompyle6'
63 STORE_NAME 'modname'
30 66 LOAD_CONST 'uncompyle6'
69 LOAD_CONST 'uncompyle6.opcodes'
72 BUILD_LIST_2 ''
75 STORE_NAME 'packages'
31 78 LOAD_CONST ''
81 STORE_NAME 'py_modules'
32 84 LOAD_CONST 'Python byte-code disassembler and source-code converter'
87 STORE_NAME 'short_desc'
33 90 LOAD_CONST 'bin/uncompyle6'
93 LOAD_CONST 'bin/pydisassemble'
96 BUILD_LIST_2 ''
99 STORE_NAME 'scripts'
35 102 LOAD_CONST -1
105 LOAD_CONST ''
108 IMPORT_NAME 'os.path'
111 STORE_NAME 'os'
38 114 LOAD_CONST '<code_object get_srcdir>'
117 MAKE_FUNCTION_0 ''
120 STORE_NAME 'get_srcdir'
43 123 BUILD_MAP ''
126 STORE_NAME 'ns'
44 129 LOAD_CONST '2.0'
132 STORE_NAME 'version'
45 135 LOAD_CONST 'https://github.com/rocky/python-uncompyle6/'
138 STORE_NAME 'web'
48 141 LOAD_NAME 'True'
144 STORE_NAME 'zip_safe'
51 147 LOAD_CONST '<code_object read>'
150 MAKE_FUNCTION_0 ''
153 STORE_NAME 'read'
54 156 LOAD_NAME 'read'
159 LOAD_CONST 'README.rst'
162 CALL_FUNCTION_1 ''
165 LOAD_CONST '\n'
168 BINARY_ADD ''
169 STORE_NAME 'long_description'
172 LOAD_CONST ''
175 RETURN_VALUE ''

View File

@@ -3,11 +3,20 @@ PHONY=check clean dist distclean test test-unit test-functional rmChangeLog clea
GIT2CL ?= git2cl
PYTHON ?= python
PYTHON_VERSION = $(shell $(PYTHON) -V | cut -d ' ' -f 2 | cut -d'.' -f1,2)
NATIVE_CHECK = check-$(PYTHON_VERSION)
# Set COMPILE='--compile' to force compilation before check
COMPILE ?=
#: Run all tests
check: check-short check-2.7-ok
#: Run working tests from Python 2.7
check-2.7: check-short-2.7 check-bytecode check-2.7-ok
#: Run working tests from Python 3.4
check-3.4: check-short-2.7 check-bytecode check-native-short
check:
@echo "For now, use check-2.7 or check.3.4" && false
## FIXME: there is a bug in our code that I don't
## find in uncompyle2 that causes this to fail.
@@ -24,9 +33,12 @@ check-disasm:
check-bytecode:
$(PYTHON) test_pythonlib.py --bytecode-2.5
#: Run quick tests
check-short:
$(PYTHON) test_pythonlib.py --base-2.7 --verify $(COMPILE)
#: short tests for bytecodes only for this version of Python
check-native-short:
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
check-short-2.7:
$(PYTHON) test_pythonlib.py --bytecode-2.7 --verify $(COMPILE)
#: Run longer Python 2.7's lib files known to be okay
check-2.7-ok:

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

1
test/bytecode_2.7/README Normal file
View File

@@ -0,0 +1 @@
These are byte-compiled programs compiled by Python 2.7

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

1
test/bytecode_3.4/README Normal file
View File

@@ -0,0 +1 @@
These are byte-compiled programs compiled by Python 3.4

BIN
test/bytecode_3.4/add.pyc Normal file

Binary file not shown.

BIN
test/bytecode_3.4/and.pyc Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,455 +0,0 @@
#----------------------------------------------------------------------
# Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
# and Andrew Kuchling. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# o Redistributions of source code must retain the above copyright
# notice, this list of conditions, and the disclaimer that follows.
#
# o Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions, and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# o Neither the name of Digital Creations nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
# IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DIGITAL
# CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#----------------------------------------------------------------------
"""Support for Berkeley DB 4.3 through 5.3 with a simple interface.
For the full featured object oriented interface use the bsddb.db module
instead. It mirrors the Oracle Berkeley DB C API.
"""
import sys
absolute_import = (sys.version_info[0] >= 3)
if (sys.version_info >= (2, 6)) and (sys.version_info < (3, 0)) :
import warnings
if sys.py3kwarning and (__name__ != 'bsddb3') :
warnings.warnpy3k("in 3.x, the bsddb module has been removed; "
"please use the pybsddb project instead",
DeprecationWarning, 2)
warnings.filterwarnings("ignore", ".*CObject.*", DeprecationWarning,
"bsddb.__init__")
try:
if __name__ == 'bsddb3':
# import _pybsddb binary as it should be the more recent version from
# a standalone pybsddb addon package than the version included with
# python as bsddb._bsddb.
if absolute_import :
# Because this syntaxis is not valid before Python 2.5
exec("from . import _pybsddb")
else :
import _pybsddb
_bsddb = _pybsddb
from bsddb3.dbutils import DeadlockWrap as _DeadlockWrap
else:
import _bsddb
from bsddb.dbutils import DeadlockWrap as _DeadlockWrap
except ImportError:
# Remove ourselves from sys.modules
import sys
del sys.modules[__name__]
raise
# bsddb3 calls it db, but provide _db for backwards compatibility
db = _db = _bsddb
__version__ = db.__version__
error = db.DBError # So bsddb.error will mean something...
#----------------------------------------------------------------------
import sys, os
from weakref import ref
if sys.version_info < (2, 6) :
import UserDict
MutableMapping = UserDict.DictMixin
else :
import collections
MutableMapping = collections.MutableMapping
class _iter_mixin(MutableMapping):
def _make_iter_cursor(self):
cur = _DeadlockWrap(self.db.cursor)
key = id(cur)
self._cursor_refs[key] = ref(cur, self._gen_cref_cleaner(key))
return cur
def _gen_cref_cleaner(self, key):
# use generate the function for the weakref callback here
# to ensure that we do not hold a strict reference to cur
# in the callback.
return lambda ref: self._cursor_refs.pop(key, None)
def __iter__(self):
self._kill_iteration = False
self._in_iter += 1
try:
try:
cur = self._make_iter_cursor()
# FIXME-20031102-greg: race condition. cursor could
# be closed by another thread before this call.
# since we're only returning keys, we call the cursor
# methods with flags=0, dlen=0, dofs=0
key = _DeadlockWrap(cur.first, 0,0,0)[0]
yield key
next = getattr(cur, "next")
while 1:
try:
key = _DeadlockWrap(next, 0,0,0)[0]
yield key
except _bsddb.DBCursorClosedError:
if self._kill_iteration:
raise RuntimeError('Database changed size '
'during iteration.')
cur = self._make_iter_cursor()
# FIXME-20031101-greg: race condition. cursor could
# be closed by another thread before this call.
_DeadlockWrap(cur.set, key,0,0,0)
next = getattr(cur, "next")
except _bsddb.DBNotFoundError:
pass
except _bsddb.DBCursorClosedError:
# the database was modified during iteration. abort.
pass
# When Python 2.4 not supported in bsddb3, we can change this to "finally"
except :
self._in_iter -= 1
raise
self._in_iter -= 1
def iteritems(self):
if not self.db:
return
self._kill_iteration = False
self._in_iter += 1
try:
try:
cur = self._make_iter_cursor()
# FIXME-20031102-greg: race condition. cursor could
# be closed by another thread before this call.
kv = _DeadlockWrap(cur.first)
key = kv[0]
yield kv
next = getattr(cur, "next")
while 1:
try:
kv = _DeadlockWrap(next)
key = kv[0]
yield kv
except _bsddb.DBCursorClosedError:
if self._kill_iteration:
raise RuntimeError('Database changed size '
'during iteration.')
cur = self._make_iter_cursor()
# FIXME-20031101-greg: race condition. cursor could
# be closed by another thread before this call.
_DeadlockWrap(cur.set, key,0,0,0)
next = getattr(cur, "next")
except _bsddb.DBNotFoundError:
pass
except _bsddb.DBCursorClosedError:
# the database was modified during iteration. abort.
pass
# When Python 2.4 not supported in bsddb3, we can change this to "finally"
except :
self._in_iter -= 1
raise
self._in_iter -= 1
class _DBWithCursor(_iter_mixin):
"""
A simple wrapper around DB that makes it look like the bsddbobject in
the old module. It uses a cursor as needed to provide DB traversal.
"""
def __init__(self, db):
self.db = db
self.db.set_get_returns_none(0)
# FIXME-20031101-greg: I believe there is still the potential
# for deadlocks in a multithreaded environment if someone
# attempts to use the any of the cursor interfaces in one
# thread while doing a put or delete in another thread. The
# reason is that _checkCursor and _closeCursors are not atomic
# operations. Doing our own locking around self.dbc,
# self.saved_dbc_key and self._cursor_refs could prevent this.
# TODO: A test case demonstrating the problem needs to be written.
# self.dbc is a DBCursor object used to implement the
# first/next/previous/last/set_location methods.
self.dbc = None
self.saved_dbc_key = None
# a collection of all DBCursor objects currently allocated
# by the _iter_mixin interface.
self._cursor_refs = {}
self._in_iter = 0
self._kill_iteration = False
def __del__(self):
self.close()
def _checkCursor(self):
if self.dbc is None:
self.dbc = _DeadlockWrap(self.db.cursor)
if self.saved_dbc_key is not None:
_DeadlockWrap(self.dbc.set, self.saved_dbc_key)
self.saved_dbc_key = None
# This method is needed for all non-cursor DB calls to avoid
# Berkeley DB deadlocks (due to being opened with DB_INIT_LOCK
# and DB_THREAD to be thread safe) when intermixing database
# operations that use the cursor internally with those that don't.
def _closeCursors(self, save=1):
if self.dbc:
c = self.dbc
self.dbc = None
if save:
try:
self.saved_dbc_key = _DeadlockWrap(c.current, 0,0,0)[0]
except db.DBError:
pass
_DeadlockWrap(c.close)
del c
for cref in self._cursor_refs.values():
c = cref()
if c is not None:
_DeadlockWrap(c.close)
def _checkOpen(self):
if self.db is None:
raise error, "BSDDB object has already been closed"
def isOpen(self):
return self.db is not None
def __len__(self):
self._checkOpen()
return _DeadlockWrap(lambda: len(self.db)) # len(self.db)
if sys.version_info >= (2, 6) :
def __repr__(self) :
if self.isOpen() :
return repr(dict(_DeadlockWrap(self.db.items)))
return repr(dict())
def __getitem__(self, key):
self._checkOpen()
return _DeadlockWrap(lambda: self.db[key]) # self.db[key]
def __setitem__(self, key, value):
self._checkOpen()
self._closeCursors()
if self._in_iter and key not in self:
self._kill_iteration = True
def wrapF():
self.db[key] = value
_DeadlockWrap(wrapF) # self.db[key] = value
def __delitem__(self, key):
self._checkOpen()
self._closeCursors()
if self._in_iter and key in self:
self._kill_iteration = True
def wrapF():
del self.db[key]
_DeadlockWrap(wrapF) # del self.db[key]
def close(self):
self._closeCursors(save=0)
if self.dbc is not None:
_DeadlockWrap(self.dbc.close)
v = 0
if self.db is not None:
v = _DeadlockWrap(self.db.close)
self.dbc = None
self.db = None
return v
def keys(self):
self._checkOpen()
return _DeadlockWrap(self.db.keys)
def has_key(self, key):
self._checkOpen()
return _DeadlockWrap(self.db.has_key, key)
def set_location(self, key):
self._checkOpen()
self._checkCursor()
return _DeadlockWrap(self.dbc.set_range, key)
def next(self): # Renamed by "2to3"
self._checkOpen()
self._checkCursor()
rv = _DeadlockWrap(getattr(self.dbc, "next"))
return rv
if sys.version_info[0] >= 3 : # For "2to3" conversion
next = __next__
def previous(self):
self._checkOpen()
self._checkCursor()
rv = _DeadlockWrap(self.dbc.prev)
return rv
def first(self):
self._checkOpen()
# fix 1725856: don't needlessly try to restore our cursor position
self.saved_dbc_key = None
self._checkCursor()
rv = _DeadlockWrap(self.dbc.first)
return rv
def last(self):
self._checkOpen()
# fix 1725856: don't needlessly try to restore our cursor position
self.saved_dbc_key = None
self._checkCursor()
rv = _DeadlockWrap(self.dbc.last)
return rv
def sync(self):
self._checkOpen()
return _DeadlockWrap(self.db.sync)
#----------------------------------------------------------------------
# Compatibility object factory functions
def hashopen(file, flag='c', mode=0666, pgsize=None, ffactor=None, nelem=None,
cachesize=None, lorder=None, hflags=0):
flags = _checkflag(flag, file)
e = _openDBEnv(cachesize)
d = db.DB(e)
d.set_flags(hflags)
if pgsize is not None: d.set_pagesize(pgsize)
if lorder is not None: d.set_lorder(lorder)
if ffactor is not None: d.set_h_ffactor(ffactor)
if nelem is not None: d.set_h_nelem(nelem)
d.open(file, db.DB_HASH, flags, mode)
return _DBWithCursor(d)
#----------------------------------------------------------------------
def btopen(file, flag='c', mode=0666,
btflags=0, cachesize=None, maxkeypage=None, minkeypage=None,
pgsize=None, lorder=None):
flags = _checkflag(flag, file)
e = _openDBEnv(cachesize)
d = db.DB(e)
if pgsize is not None: d.set_pagesize(pgsize)
if lorder is not None: d.set_lorder(lorder)
d.set_flags(btflags)
if minkeypage is not None: d.set_bt_minkey(minkeypage)
if maxkeypage is not None: d.set_bt_maxkey(maxkeypage)
d.open(file, db.DB_BTREE, flags, mode)
return _DBWithCursor(d)
#----------------------------------------------------------------------
def rnopen(file, flag='c', mode=0666,
rnflags=0, cachesize=None, pgsize=None, lorder=None,
rlen=None, delim=None, source=None, pad=None):
flags = _checkflag(flag, file)
e = _openDBEnv(cachesize)
d = db.DB(e)
if pgsize is not None: d.set_pagesize(pgsize)
if lorder is not None: d.set_lorder(lorder)
d.set_flags(rnflags)
if delim is not None: d.set_re_delim(delim)
if rlen is not None: d.set_re_len(rlen)
if source is not None: d.set_re_source(source)
if pad is not None: d.set_re_pad(pad)
d.open(file, db.DB_RECNO, flags, mode)
return _DBWithCursor(d)
#----------------------------------------------------------------------
def _openDBEnv(cachesize):
e = db.DBEnv()
if cachesize is not None:
if cachesize >= 20480:
e.set_cachesize(0, cachesize)
else:
raise error, "cachesize must be >= 20480"
e.set_lk_detect(db.DB_LOCK_DEFAULT)
e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL)
return e
def _checkflag(flag, file):
if flag == 'r':
flags = db.DB_RDONLY
elif flag == 'rw':
flags = 0
elif flag == 'w':
flags = db.DB_CREATE
elif flag == 'c':
flags = db.DB_CREATE
elif flag == 'n':
flags = db.DB_CREATE
#flags = db.DB_CREATE | db.DB_TRUNCATE
# we used db.DB_TRUNCATE flag for this before but Berkeley DB
# 4.2.52 changed to disallowed truncate with txn environments.
if file is not None and os.path.isfile(file):
os.unlink(file)
else:
raise error, "flags should be one of 'r', 'w', 'c' or 'n'"
return flags | db.DB_THREAD
#----------------------------------------------------------------------
# This is a silly little hack that allows apps to continue to use the
# DB_THREAD flag even on systems without threads without freaking out
# Berkeley DB.
#
# This assumes that if Python was built with thread support then
# Berkeley DB was too.
try:
# 2to3 automatically changes "import thread" to "import _thread"
import thread as T
del T
except ImportError:
db.DB_THREAD = 0
#----------------------------------------------------------------------

View File

@@ -1,60 +0,0 @@
#----------------------------------------------------------------------
# Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
# and Andrew Kuchling. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# o Redistributions of source code must retain the above copyright
# notice, this list of conditions, and the disclaimer that follows.
#
# o Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions, and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# o Neither the name of Digital Creations nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
# IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DIGITAL
# CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#----------------------------------------------------------------------
# This module is just a placeholder for possible future expansion, in
# case we ever want to augment the stuff in _db in any way. For now
# it just simply imports everything from _db.
import sys
absolute_import = (sys.version_info[0] >= 3)
if not absolute_import :
if __name__.startswith('bsddb3.') :
# import _pybsddb binary as it should be the more recent version from
# a standalone pybsddb addon package than the version included with
# python as bsddb._bsddb.
from _pybsddb import *
from _pybsddb import __version__
else:
from _bsddb import *
from _bsddb import __version__
else :
# Because this syntaxis is not valid before Python 2.5
if __name__.startswith('bsddb3.') :
exec("from ._pybsddb import *")
exec("from ._pybsddb import __version__")
else :
exec("from ._bsddb import *")
exec("from ._bsddb import __version__")

View File

@@ -1,266 +0,0 @@
#-------------------------------------------------------------------------
# This file contains real Python object wrappers for DB and DBEnv
# C "objects" that can be usefully subclassed. The previous SWIG
# based interface allowed this thanks to SWIG's shadow classes.
# -- Gregory P. Smith
#-------------------------------------------------------------------------
#
# (C) Copyright 2001 Autonomous Zone Industries
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
#
# TODO it would be *really nice* to have an automatic shadow class populator
# so that new methods don't need to be added here manually after being
# added to _bsddb.c.
#
import sys
absolute_import = (sys.version_info[0] >= 3)
if absolute_import :
# Because this syntaxis is not valid before Python 2.5
exec("from . import db")
else :
import db
if sys.version_info < (2, 6) :
from UserDict import DictMixin as MutableMapping
else :
import collections
MutableMapping = collections.MutableMapping
class DBEnv:
def __init__(self, *args, **kwargs):
self._cobj = db.DBEnv(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def set_shm_key(self, *args, **kwargs):
return self._cobj.set_shm_key(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_data_dir(self, *args, **kwargs):
return self._cobj.set_data_dir(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_lg_bsize(self, *args, **kwargs):
return self._cobj.set_lg_bsize(*args, **kwargs)
def set_lg_dir(self, *args, **kwargs):
return self._cobj.set_lg_dir(*args, **kwargs)
def set_lg_max(self, *args, **kwargs):
return self._cobj.set_lg_max(*args, **kwargs)
def set_lk_detect(self, *args, **kwargs):
return self._cobj.set_lk_detect(*args, **kwargs)
if db.version() < (4,5):
def set_lk_max(self, *args, **kwargs):
return self._cobj.set_lk_max(*args, **kwargs)
def set_lk_max_locks(self, *args, **kwargs):
return self._cobj.set_lk_max_locks(*args, **kwargs)
def set_lk_max_lockers(self, *args, **kwargs):
return self._cobj.set_lk_max_lockers(*args, **kwargs)
def set_lk_max_objects(self, *args, **kwargs):
return self._cobj.set_lk_max_objects(*args, **kwargs)
def set_mp_mmapsize(self, *args, **kwargs):
return self._cobj.set_mp_mmapsize(*args, **kwargs)
def set_timeout(self, *args, **kwargs):
return self._cobj.set_timeout(*args, **kwargs)
def set_tmp_dir(self, *args, **kwargs):
return self._cobj.set_tmp_dir(*args, **kwargs)
def txn_begin(self, *args, **kwargs):
return self._cobj.txn_begin(*args, **kwargs)
def txn_checkpoint(self, *args, **kwargs):
return self._cobj.txn_checkpoint(*args, **kwargs)
def txn_stat(self, *args, **kwargs):
return self._cobj.txn_stat(*args, **kwargs)
def set_tx_max(self, *args, **kwargs):
return self._cobj.set_tx_max(*args, **kwargs)
def set_tx_timestamp(self, *args, **kwargs):
return self._cobj.set_tx_timestamp(*args, **kwargs)
def lock_detect(self, *args, **kwargs):
return self._cobj.lock_detect(*args, **kwargs)
def lock_get(self, *args, **kwargs):
return self._cobj.lock_get(*args, **kwargs)
def lock_id(self, *args, **kwargs):
return self._cobj.lock_id(*args, **kwargs)
def lock_put(self, *args, **kwargs):
return self._cobj.lock_put(*args, **kwargs)
def lock_stat(self, *args, **kwargs):
return self._cobj.lock_stat(*args, **kwargs)
def log_archive(self, *args, **kwargs):
return self._cobj.log_archive(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return self._cobj.set_get_returns_none(*args, **kwargs)
def log_stat(self, *args, **kwargs):
return self._cobj.log_stat(*args, **kwargs)
def dbremove(self, *args, **kwargs):
return self._cobj.dbremove(*args, **kwargs)
def dbrename(self, *args, **kwargs):
return self._cobj.dbrename(*args, **kwargs)
def set_encrypt(self, *args, **kwargs):
return self._cobj.set_encrypt(*args, **kwargs)
if db.version() >= (4,4):
def fileid_reset(self, *args, **kwargs):
return self._cobj.fileid_reset(*args, **kwargs)
def lsn_reset(self, *args, **kwargs):
return self._cobj.lsn_reset(*args, **kwargs)
class DB(MutableMapping):
def __init__(self, dbenv, *args, **kwargs):
# give it the proper DBEnv C object that its expecting
self._cobj = db.DB(*((dbenv._cobj,) + args), **kwargs)
# TODO are there other dict methods that need to be overridden?
def __len__(self):
return len(self._cobj)
def __getitem__(self, arg):
return self._cobj[arg]
def __setitem__(self, key, value):
self._cobj[key] = value
def __delitem__(self, arg):
del self._cobj[arg]
if sys.version_info >= (2, 6) :
def __iter__(self) :
return self._cobj.__iter__()
def append(self, *args, **kwargs):
return self._cobj.append(*args, **kwargs)
def associate(self, *args, **kwargs):
return self._cobj.associate(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def consume(self, *args, **kwargs):
return self._cobj.consume(*args, **kwargs)
def consume_wait(self, *args, **kwargs):
return self._cobj.consume_wait(*args, **kwargs)
def cursor(self, *args, **kwargs):
return self._cobj.cursor(*args, **kwargs)
def delete(self, *args, **kwargs):
return self._cobj.delete(*args, **kwargs)
def fd(self, *args, **kwargs):
return self._cobj.fd(*args, **kwargs)
def get(self, *args, **kwargs):
return self._cobj.get(*args, **kwargs)
def pget(self, *args, **kwargs):
return self._cobj.pget(*args, **kwargs)
def get_both(self, *args, **kwargs):
return self._cobj.get_both(*args, **kwargs)
def get_byteswapped(self, *args, **kwargs):
return self._cobj.get_byteswapped(*args, **kwargs)
def get_size(self, *args, **kwargs):
return self._cobj.get_size(*args, **kwargs)
def get_type(self, *args, **kwargs):
return self._cobj.get_type(*args, **kwargs)
def join(self, *args, **kwargs):
return self._cobj.join(*args, **kwargs)
def key_range(self, *args, **kwargs):
return self._cobj.key_range(*args, **kwargs)
def has_key(self, *args, **kwargs):
return self._cobj.has_key(*args, **kwargs)
def items(self, *args, **kwargs):
return self._cobj.items(*args, **kwargs)
def keys(self, *args, **kwargs):
return self._cobj.keys(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def put(self, *args, **kwargs):
return self._cobj.put(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def rename(self, *args, **kwargs):
return self._cobj.rename(*args, **kwargs)
def set_bt_minkey(self, *args, **kwargs):
return self._cobj.set_bt_minkey(*args, **kwargs)
def set_bt_compare(self, *args, **kwargs):
return self._cobj.set_bt_compare(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_dup_compare(self, *args, **kwargs) :
return self._cobj.set_dup_compare(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_h_ffactor(self, *args, **kwargs):
return self._cobj.set_h_ffactor(*args, **kwargs)
def set_h_nelem(self, *args, **kwargs):
return self._cobj.set_h_nelem(*args, **kwargs)
def set_lorder(self, *args, **kwargs):
return self._cobj.set_lorder(*args, **kwargs)
def set_pagesize(self, *args, **kwargs):
return self._cobj.set_pagesize(*args, **kwargs)
def set_re_delim(self, *args, **kwargs):
return self._cobj.set_re_delim(*args, **kwargs)
def set_re_len(self, *args, **kwargs):
return self._cobj.set_re_len(*args, **kwargs)
def set_re_pad(self, *args, **kwargs):
return self._cobj.set_re_pad(*args, **kwargs)
def set_re_source(self, *args, **kwargs):
return self._cobj.set_re_source(*args, **kwargs)
def set_q_extentsize(self, *args, **kwargs):
return self._cobj.set_q_extentsize(*args, **kwargs)
def stat(self, *args, **kwargs):
return self._cobj.stat(*args, **kwargs)
def sync(self, *args, **kwargs):
return self._cobj.sync(*args, **kwargs)
def type(self, *args, **kwargs):
return self._cobj.type(*args, **kwargs)
def upgrade(self, *args, **kwargs):
return self._cobj.upgrade(*args, **kwargs)
def values(self, *args, **kwargs):
return self._cobj.values(*args, **kwargs)
def verify(self, *args, **kwargs):
return self._cobj.verify(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return self._cobj.set_get_returns_none(*args, **kwargs)
def set_encrypt(self, *args, **kwargs):
return self._cobj.set_encrypt(*args, **kwargs)
class DBSequence:
def __init__(self, *args, **kwargs):
self._cobj = db.DBSequence(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def get(self, *args, **kwargs):
return self._cobj.get(*args, **kwargs)
def get_dbp(self, *args, **kwargs):
return self._cobj.get_dbp(*args, **kwargs)
def get_key(self, *args, **kwargs):
return self._cobj.get_key(*args, **kwargs)
def init_value(self, *args, **kwargs):
return self._cobj.init_value(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def stat(self, *args, **kwargs):
return self._cobj.stat(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_range(self, *args, **kwargs):
return self._cobj.set_range(*args, **kwargs)
def get_cachesize(self, *args, **kwargs):
return self._cobj.get_cachesize(*args, **kwargs)
def get_flags(self, *args, **kwargs):
return self._cobj.get_flags(*args, **kwargs)
def get_range(self, *args, **kwargs):
return self._cobj.get_range(*args, **kwargs)

View File

@@ -1,190 +0,0 @@
"""
File-like objects that read from or write to a bsddb record.
This implements (nearly) all stdio methods.
f = DBRecIO(db, key, txn=None)
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
Notes:
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- There's a simple test set (see end of this file) - not yet updated
for DBRecIO.
- readline() is not implemented yet.
From:
Itamar Shtull-Trauring <itamar@maxnm.com>
"""
import errno
import string
class DBRecIO:
def __init__(self, db, key, txn=None):
self.db = db
self.key = key
self.txn = txn
self.len = None
self.pos = 0
self.closed = 0
self.softspace = 0
def close(self):
if not self.closed:
self.closed = 1
del self.db, self.txn
def isatty(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return 0
def seek(self, pos, mode = 0):
if self.closed:
raise ValueError, "I/O operation on closed file"
if mode == 1:
pos = pos + self.pos
elif mode == 2:
pos = pos + self.len
self.pos = max(0, pos)
def tell(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return self.pos
def read(self, n = -1):
if self.closed:
raise ValueError, "I/O operation on closed file"
if n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
dlen = newpos - self.pos
r = self.db.get(self.key, txn=self.txn, dlen=dlen, doff=self.pos)
self.pos = newpos
return r
__fixme = """
def readline(self, length=None):
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.buflist:
self.buf = self.buf + string.joinfields(self.buflist, '')
self.buflist = []
i = string.find(self.buf, '\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
"""
def truncate(self, size=None):
if self.closed:
raise ValueError, "I/O operation on closed file"
if size is None:
size = self.pos
elif size < 0:
raise IOError(errno.EINVAL,
"Negative size not allowed")
elif size < self.pos:
self.pos = size
self.db.put(self.key, "", txn=self.txn, dlen=self.len-size, doff=size)
def write(self, s):
if self.closed:
raise ValueError, "I/O operation on closed file"
if not s: return
if self.pos > self.len:
self.buflist.append('\0'*(self.pos - self.len))
self.len = self.pos
newpos = self.pos + len(s)
self.db.put(self.key, s, txn=self.txn, dlen=len(s), doff=self.pos)
self.pos = newpos
def writelines(self, list):
self.write(string.joinfields(list, ''))
def flush(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
"""
# A little test suite
def _test():
import sys
if sys.argv[1:]:
file = sys.argv[1]
else:
file = '/etc/passwd'
lines = open(file, 'r').readlines()
text = open(file, 'r').read()
f = StringIO()
for line in lines[:-2]:
f.write(line)
f.writelines(lines[-2:])
if f.getvalue() != text:
raise RuntimeError, 'write failed'
length = f.tell()
print 'File length =', length
f.seek(len(lines[0]))
f.write(lines[1])
f.seek(0)
print 'First line =', repr(f.readline())
here = f.tell()
line = f.readline()
print 'Second line =', repr(line)
f.seek(-len(line), 1)
line2 = f.read(len(line))
if line != line2:
raise RuntimeError, 'bad result after seek back'
f.seek(len(line2), 1)
list = f.readlines()
line = list[-1]
f.seek(f.tell() - len(line))
line2 = f.read()
if line != line2:
raise RuntimeError, 'bad result after seek back from EOF'
print 'Read', len(list), 'more lines'
print 'File length =', f.tell()
if f.tell() != length:
raise RuntimeError, 'bad length'
f.close()
if __name__ == '__main__':
_test()
"""

View File

@@ -1,381 +0,0 @@
#------------------------------------------------------------------------
# Copyright (c) 1997-2001 by Total Control Software
# All Rights Reserved
#------------------------------------------------------------------------
#
# Module Name: dbShelve.py
#
# Description: A reimplementation of the standard shelve.py that
# forces the use of cPickle, and DB.
#
# Creation Date: 11/3/97 3:39:04PM
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
# 13-Dec-2000: Updated to be used with the new bsddb3 package.
# Added DBShelfCursor class.
#
#------------------------------------------------------------------------
"""Manage shelves of pickled objects using bsddb database files for the
storage.
"""
#------------------------------------------------------------------------
import sys
absolute_import = (sys.version_info[0] >= 3)
if absolute_import :
# Because this syntaxis is not valid before Python 2.5
exec("from . import db")
else :
import db
if sys.version_info[0] >= 3 :
import cPickle # Will be converted to "pickle" by "2to3"
else :
if sys.version_info < (2, 6) :
import cPickle
else :
# When we drop support for python 2.4
# we could use: (in 2.5 we need a __future__ statement)
#
# with warnings.catch_warnings():
# warnings.filterwarnings(...)
# ...
#
# We can not use "with" as is, because it would be invalid syntax
# in python 2.4 and (with no __future__) 2.5.
# Here we simulate "with" following PEP 343 :
import warnings
w = warnings.catch_warnings()
w.__enter__()
try :
warnings.filterwarnings('ignore',
message='the cPickle module has been removed in Python 3.0',
category=DeprecationWarning)
import cPickle
finally :
w.__exit__()
del w
HIGHEST_PROTOCOL = cPickle.HIGHEST_PROTOCOL
def _dumps(object, protocol):
return cPickle.dumps(object, protocol=protocol)
if sys.version_info < (2, 6) :
from UserDict import DictMixin as MutableMapping
else :
import collections
MutableMapping = collections.MutableMapping
#------------------------------------------------------------------------
def open(filename, flags=db.DB_CREATE, mode=0660, filetype=db.DB_HASH,
dbenv=None, dbname=None):
"""
A simple factory function for compatibility with the standard
shleve.py module. It can be used like this, where key is a string
and data is a pickleable object:
from bsddb import dbshelve
db = dbshelve.open(filename)
db[key] = data
db.close()
"""
if type(flags) == type(''):
sflag = flags
if sflag == 'r':
flags = db.DB_RDONLY
elif sflag == 'rw':
flags = 0
elif sflag == 'w':
flags = db.DB_CREATE
elif sflag == 'c':
flags = db.DB_CREATE
elif sflag == 'n':
flags = db.DB_TRUNCATE | db.DB_CREATE
else:
raise db.DBError, "flags should be one of 'r', 'w', 'c' or 'n' or use the bsddb.db.DB_* flags"
d = DBShelf(dbenv)
d.open(filename, dbname, filetype, flags, mode)
return d
#---------------------------------------------------------------------------
class DBShelveError(db.DBError): pass
class DBShelf(MutableMapping):
"""A shelf to hold pickled objects, built upon a bsddb DB object. It
automatically pickles/unpickles data objects going to/from the DB.
"""
def __init__(self, dbenv=None):
self.db = db.DB(dbenv)
self._closed = True
if HIGHEST_PROTOCOL:
self.protocol = HIGHEST_PROTOCOL
else:
self.protocol = 1
def __del__(self):
self.close()
def __getattr__(self, name):
"""Many methods we can just pass through to the DB object.
(See below)
"""
return getattr(self.db, name)
#-----------------------------------
# Dictionary access methods
def __len__(self):
return len(self.db)
def __getitem__(self, key):
data = self.db[key]
return cPickle.loads(data)
def __setitem__(self, key, value):
data = _dumps(value, self.protocol)
self.db[key] = data
def __delitem__(self, key):
del self.db[key]
def keys(self, txn=None):
if txn is not None:
return self.db.keys(txn)
else:
return self.db.keys()
if sys.version_info >= (2, 6) :
def __iter__(self) : # XXX: Load all keys in memory :-(
for k in self.db.keys() :
yield k
# Do this when "DB" support iteration
# Or is it enough to pass thru "getattr"?
#
# def __iter__(self) :
# return self.db.__iter__()
def open(self, *args, **kwargs):
self.db.open(*args, **kwargs)
self._closed = False
def close(self, *args, **kwargs):
self.db.close(*args, **kwargs)
self._closed = True
def __repr__(self):
if self._closed:
return '<DBShelf @ 0x%x - closed>' % (id(self))
else:
return repr(dict(self.iteritems()))
def items(self, txn=None):
if txn is not None:
items = self.db.items(txn)
else:
items = self.db.items()
newitems = []
for k, v in items:
newitems.append( (k, cPickle.loads(v)) )
return newitems
def values(self, txn=None):
if txn is not None:
values = self.db.values(txn)
else:
values = self.db.values()
return map(cPickle.loads, values)
#-----------------------------------
# Other methods
def __append(self, value, txn=None):
data = _dumps(value, self.protocol)
return self.db.append(data, txn)
def append(self, value, txn=None):
if self.get_type() == db.DB_RECNO:
return self.__append(value, txn=txn)
raise DBShelveError, "append() only supported when dbshelve opened with filetype=dbshelve.db.DB_RECNO"
def associate(self, secondaryDB, callback, flags=0):
def _shelf_callback(priKey, priData, realCallback=callback):
# Safe in Python 2.x because expresion short circuit
if sys.version_info[0] < 3 or isinstance(priData, bytes) :
data = cPickle.loads(priData)
else :
data = cPickle.loads(bytes(priData, "iso8859-1")) # 8 bits
return realCallback(priKey, data)
return self.db.associate(secondaryDB, _shelf_callback, flags)
#def get(self, key, default=None, txn=None, flags=0):
def get(self, *args, **kw):
# We do it with *args and **kw so if the default value wasn't
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
return data # we may be getting the default value, or None,
# so it doesn't need unpickled.
def get_both(self, key, value, txn=None, flags=0):
data = _dumps(value, self.protocol)
data = self.db.get(key, data, txn, flags)
return cPickle.loads(data)
def cursor(self, txn=None, flags=0):
c = DBShelfCursor(self.db.cursor(txn, flags))
c.protocol = self.protocol
return c
def put(self, key, value, txn=None, flags=0):
data = _dumps(value, self.protocol)
return self.db.put(key, data, txn, flags)
def join(self, cursorList, flags=0):
raise NotImplementedError
#----------------------------------------------
# Methods allowed to pass-through to self.db
#
# close, delete, fd, get_byteswapped, get_type, has_key,
# key_range, open, remove, rename, stat, sync,
# upgrade, verify, and all set_* methods.
#---------------------------------------------------------------------------
class DBShelfCursor:
"""
"""
def __init__(self, cursor):
self.dbc = cursor
def __del__(self):
self.close()
def __getattr__(self, name):
"""Some methods we can just pass through to the cursor object. (See below)"""
return getattr(self.dbc, name)
#----------------------------------------------
def dup(self, flags=0):
c = DBShelfCursor(self.dbc.dup(flags))
c.protocol = self.protocol
return c
def put(self, key, value, flags=0):
data = _dumps(value, self.protocol)
return self.dbc.put(key, data, flags)
def get(self, *args):
count = len(args) # a method overloading hack
method = getattr(self, 'get_%d' % count)
method(*args)
def get_1(self, flags):
rec = self.dbc.get(flags)
return self._extract(rec)
def get_2(self, key, flags):
rec = self.dbc.get(key, flags)
return self._extract(rec)
def get_3(self, key, value, flags):
data = _dumps(value, self.protocol)
rec = self.dbc.get(key, flags)
return self._extract(rec)
def current(self, flags=0): return self.get_1(flags|db.DB_CURRENT)
def first(self, flags=0): return self.get_1(flags|db.DB_FIRST)
def last(self, flags=0): return self.get_1(flags|db.DB_LAST)
def next(self, flags=0): return self.get_1(flags|db.DB_NEXT)
def prev(self, flags=0): return self.get_1(flags|db.DB_PREV)
def consume(self, flags=0): return self.get_1(flags|db.DB_CONSUME)
def next_dup(self, flags=0): return self.get_1(flags|db.DB_NEXT_DUP)
def next_nodup(self, flags=0): return self.get_1(flags|db.DB_NEXT_NODUP)
def prev_nodup(self, flags=0): return self.get_1(flags|db.DB_PREV_NODUP)
def get_both(self, key, value, flags=0):
data = _dumps(value, self.protocol)
rec = self.dbc.get_both(key, flags)
return self._extract(rec)
def set(self, key, flags=0):
rec = self.dbc.set(key, flags)
return self._extract(rec)
def set_range(self, key, flags=0):
rec = self.dbc.set_range(key, flags)
return self._extract(rec)
def set_recno(self, recno, flags=0):
rec = self.dbc.set_recno(recno, flags)
return self._extract(rec)
set_both = get_both
def _extract(self, rec):
if rec is None:
return None
else:
key, data = rec
# Safe in Python 2.x because expresion short circuit
if sys.version_info[0] < 3 or isinstance(data, bytes) :
return key, cPickle.loads(data)
else :
return key, cPickle.loads(bytes(data, "iso8859-1")) # 8 bits
#----------------------------------------------
# Methods allowed to pass-through to self.dbc
#
# close, count, delete, get_recno, join_item
#---------------------------------------------------------------------------

View File

@@ -1,83 +0,0 @@
#------------------------------------------------------------------------
#
# Copyright (C) 2000 Autonomous Zone Industries
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
# Author: Gregory P. Smith <greg@krypto.org>
#
# Note: I don't know how useful this is in reality since when a
# DBLockDeadlockError happens the current transaction is supposed to be
# aborted. If it doesn't then when the operation is attempted again
# the deadlock is still happening...
# --Robin
#
#------------------------------------------------------------------------
#
# import the time.sleep function in a namespace safe way to allow
# "from bsddb.dbutils import *"
#
from time import sleep as _sleep
import sys
absolute_import = (sys.version_info[0] >= 3)
if absolute_import :
# Because this syntaxis is not valid before Python 2.5
exec("from . import db")
else :
import db
# always sleep at least N seconds between retrys
_deadlock_MinSleepTime = 1.0/128
# never sleep more than N seconds between retrys
_deadlock_MaxSleepTime = 3.14159
# Assign a file object to this for a "sleeping" message to be written to it
# each retry
_deadlock_VerboseFile = None
def DeadlockWrap(function, *_args, **_kwargs):
"""DeadlockWrap(function, *_args, **_kwargs) - automatically retries
function in case of a database deadlock.
This is a function intended to be used to wrap database calls such
that they perform retrys with exponentially backing off sleeps in
between when a DBLockDeadlockError exception is raised.
A 'max_retries' parameter may optionally be passed to prevent it
from retrying forever (in which case the exception will be reraised).
d = DB(...)
d.open(...)
DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar"
"""
sleeptime = _deadlock_MinSleepTime
max_retries = _kwargs.get('max_retries', -1)
if 'max_retries' in _kwargs:
del _kwargs['max_retries']
while True:
try:
return function(*_args, **_kwargs)
except db.DBLockDeadlockError:
if _deadlock_VerboseFile:
_deadlock_VerboseFile.write(
'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime)
_sleep(sleeptime)
# exponential backoff in the sleep time
sleeptime *= 2
if sleeptime > _deadlock_MaxSleepTime:
sleeptime = _deadlock_MaxSleepTime
max_retries -= 1
if max_retries == -1:
raise
#------------------------------------------------------------------------

View File

@@ -1,31 +0,0 @@
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
import warnings
warnings.warn("The compiler package is deprecated and removed in Python 3.x.",
DeprecationWarning, stacklevel=2)
from compiler.transformer import parse, parseFile
from compiler.visitor import walk
from compiler.pycodegen import compile, compileFile

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +0,0 @@
# operation flags
OP_ASSIGN = 'OP_ASSIGN'
OP_DELETE = 'OP_DELETE'
OP_APPLY = 'OP_APPLY'
SC_LOCAL = 1
SC_GLOBAL_IMPLICIT = 2
SC_GLOBAL_EXPLICIT = 3
SC_FREE = 4
SC_CELL = 5
SC_UNKNOWN = 6
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
CO_GENERATOR_ALLOWED = 0
CO_FUTURE_DIVISION = 0x2000
CO_FUTURE_ABSIMPORT = 0x4000
CO_FUTURE_WITH_STATEMENT = 0x8000
CO_FUTURE_PRINT_FUNCTION = 0x10000

View File

@@ -1,74 +0,0 @@
"""Parser for future statements
"""
from compiler import ast, walk
def is_future(stmt):
"""Return true if statement is a well-formed future statement"""
if not isinstance(stmt, ast.From):
return 0
if stmt.modname == "__future__":
return 1
else:
return 0
class FutureParser:
features = ("nested_scopes", "generators", "division",
"absolute_import", "with_statement", "print_function",
"unicode_literals")
def __init__(self):
self.found = {} # set
def visitModule(self, node):
stmt = node.node
for s in stmt.nodes:
if not self.check_stmt(s):
break
def check_stmt(self, stmt):
if is_future(stmt):
for name, asname in stmt.names:
if name in self.features:
self.found[name] = 1
else:
raise SyntaxError, \
"future feature %s is not defined" % name
stmt.valid_future = 1
return 1
return 0
def get_features(self):
"""Return list of features enabled by future statements"""
return self.found.keys()
class BadFutureParser:
"""Check for invalid future statements"""
def visitFrom(self, node):
if hasattr(node, 'valid_future'):
return
if node.modname != "__future__":
return
raise SyntaxError, "invalid future statement " + repr(node)
def find_futures(node):
p1 = FutureParser()
p2 = BadFutureParser()
walk(node, p1)
walk(node, p2)
return p1.get_features()
if __name__ == "__main__":
import sys
from compiler import parseFile, walk
for file in sys.argv[1:]:
print file
tree = parseFile(file)
v = FutureParser()
walk(tree, v)
print v.found
print

View File

@@ -1,73 +0,0 @@
def flatten(tup):
elts = []
for elt in tup:
if isinstance(elt, tuple):
elts = elts + flatten(elt)
else:
elts.append(elt)
return elts
class Set:
def __init__(self):
self.elts = {}
def __len__(self):
return len(self.elts)
def __contains__(self, elt):
return elt in self.elts
def add(self, elt):
self.elts[elt] = elt
def elements(self):
return self.elts.keys()
def has_elt(self, elt):
return elt in self.elts
def remove(self, elt):
del self.elts[elt]
def copy(self):
c = Set()
c.elts.update(self.elts)
return c
class Stack:
def __init__(self):
self.stack = []
self.pop = self.stack.pop
def __len__(self):
return len(self.stack)
def push(self, elt):
self.stack.append(elt)
def top(self):
return self.stack[-1]
def __getitem__(self, index): # needed by visitContinue()
return self.stack[index]
MANGLE_LEN = 256 # magic constant from compile.c
def mangle(name, klass):
if not name.startswith('__'):
return name
if len(name) + 2 >= MANGLE_LEN:
return name
if name.endswith('__'):
return name
try:
i = 0
while klass[i] == '_':
i = i + 1
except IndexError:
return name
klass = klass[i:]
tlen = len(klass) + len(name)
if tlen > MANGLE_LEN:
klass = klass[:MANGLE_LEN-tlen]
return "_%s%s" % (klass, name)
def set_filename(filename, tree):
"""Set the filename attribute to filename on every node in tree"""
worklist = [tree]
while worklist:
node = worklist.pop(0)
node.filename = filename
worklist.extend(node.getChildNodes())

View File

@@ -1,46 +0,0 @@
"""Check for errs in the AST.
The Python parser does not catch all syntax errors. Others, like
assignments with invalid targets, are caught in the code generation
phase.
The compiler package catches some errors in the transformer module.
But it seems clearer to write checkers that use the AST to detect
errors.
"""
from compiler import ast, walk
def check(tree, multi=None):
v = SyntaxErrorChecker(multi)
walk(tree, v)
return v.errors
class SyntaxErrorChecker:
"""A visitor to find syntax errors in the AST."""
def __init__(self, multi=None):
"""Create new visitor object.
If optional argument multi is not None, then print messages
for each error rather than raising a SyntaxError for the
first.
"""
self.multi = multi
self.errors = 0
def error(self, node, msg):
self.errors = self.errors + 1
if self.multi is not None:
print "%s:%s: %s" % (node.filename, node.lineno, msg)
else:
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
def visitAssign(self, node):
# the transformer module handles many of these
pass
## for target in node.nodes:
## if isinstance(target, ast.AssList):
## if target.lineno is None:
## target.lineno = node.lineno
## self.error(target, "can't assign to list comprehension")

View File

@@ -1,64 +0,0 @@
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
import sys
from ctypes import *
_array_type = type(Array)
def _other_endian(typ):
"""Return the type with the 'other' byte order. Simple types like
c_int and so on already have __ctype_be__ and __ctype_le__
attributes which contain the types, for more complicated types
arrays and structures are supported.
"""
# check _OTHER_ENDIAN attribute (present if typ is primitive type)
if hasattr(typ, _OTHER_ENDIAN):
return getattr(typ, _OTHER_ENDIAN)
# if typ is array
if isinstance(typ, _array_type):
return _other_endian(typ._type_) * typ._length_
# if typ is structure
if issubclass(typ, Structure):
return typ
raise TypeError("This type does not support other endian: %s" % typ)
class _swapped_meta(type(Structure)):
def __setattr__(self, attrname, value):
if attrname == "_fields_":
fields = []
for desc in value:
name = desc[0]
typ = desc[1]
rest = desc[2:]
fields.append((name, _other_endian(typ)) + rest)
value = fields
super(_swapped_meta, self).__setattr__(attrname, value)
################################################################
# Note: The Structure metaclass checks for the *presence* (not the
# value!) of a _swapped_bytes_ attribute to determine the bit order in
# structures containing bit fields.
if sys.byteorder == "little":
_OTHER_ENDIAN = "__ctype_be__"
LittleEndianStructure = Structure
class BigEndianStructure(Structure):
"""Structure with big endian byte order"""
__metaclass__ = _swapped_meta
_swappedbytes_ = None
elif sys.byteorder == "big":
_OTHER_ENDIAN = "__ctype_le__"
BigEndianStructure = Structure
class LittleEndianStructure(Structure):
"""Structure with little endian byte order"""
__metaclass__ = _swapped_meta
_swappedbytes_ = None
else:
raise RuntimeError("Invalid byteorder")

View File

@@ -1,287 +0,0 @@
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
import sys, os
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
def _get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
# This function was copied from Lib/distutils/msvccompiler.py
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def find_msvcrt():
"""Return the name of the VC runtime dll"""
version = _get_build_version()
if version is None:
# better be safe than sorry
return None
if version <= 6:
clibname = 'msvcrt'
else:
clibname = 'msvcr%d' % (version * 10)
# If python was built with in debug mode
import imp
if imp.get_suffixes()[0][0] == '_d.pyd':
clibname += 'd'
return clibname+'.dll'
def find_library(name):
if name in ('c', 'm'):
return find_msvcrt()
# See MSDN for the REAL search order.
for directory in os.environ['PATH'].split(os.pathsep):
fname = os.path.join(directory, name)
if os.path.isfile(fname):
return fname
if fname.lower().endswith(".dll"):
continue
fname = fname + ".dll"
if os.path.isfile(fname):
return fname
return None
if os.name == "ce":
# search path according to MSDN:
# - absolute path specified by filename
# - The .exe launch directory
# - the Windows directory
# - ROM dll files (where are they?)
# - OEM specified search path: HKLM\Loader\SystemPath
def find_library(name):
return name
if os.name == "posix" and sys.platform == "darwin":
from ctypes.macholib.dyld import dyld_find as _dyld_find
def find_library(name):
possible = ['lib%s.dylib' % name,
'%s.dylib' % name,
'%s.framework/%s' % (name, name)]
for name in possible:
try:
return _dyld_find(name)
except ValueError:
continue
return None
elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
import re, tempfile, errno
def _findLib_gcc(name):
expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
fdout, ccout = tempfile.mkstemp()
os.close(fdout)
cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \
'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
f = os.popen(cmd)
try:
trace = f.read()
finally:
rv = f.close()
finally:
try:
os.unlink(ccout)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if rv == 10:
raise OSError, 'gcc or cc command not found'
res = re.search(expr, trace)
if not res:
return None
return res.group(0)
if sys.platform == "sunos5":
# use /usr/ccs/bin/dump on solaris
def _get_soname(f):
if not f:
return None
cmd = "/usr/ccs/bin/dump -Lpv 2>/dev/null " + f
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\[.*\]\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
else:
def _get_soname(f):
# assuming GNU binutils / ELF
if not f:
return None
cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
"objdump -p -j .dynamic 2>/dev/null " + f
f = os.popen(cmd)
dump = f.read()
rv = f.close()
if rv == 10:
raise OSError, 'objdump command not found'
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
if (sys.platform.startswith("freebsd")
or sys.platform.startswith("openbsd")
or sys.platform.startswith("dragonfly")):
def _num_version(libname):
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
parts = libname.split(".")
nums = []
try:
while parts:
nums.insert(0, int(parts.pop()))
except ValueError:
pass
return nums or [ sys.maxint ]
def find_library(name):
ename = re.escape(name)
expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
f = os.popen('/sbin/ldconfig -r 2>/dev/null')
try:
data = f.read()
finally:
f.close()
res = re.findall(expr, data)
if not res:
return _get_soname(_findLib_gcc(name))
res.sort(cmp= lambda x,y: cmp(_num_version(x), _num_version(y)))
return res[-1]
elif sys.platform == "sunos5":
def _findLib_crle(name, is64):
if not os.path.exists('/usr/bin/crle'):
return None
if is64:
cmd = 'env LC_ALL=C /usr/bin/crle -64 2>/dev/null'
else:
cmd = 'env LC_ALL=C /usr/bin/crle 2>/dev/null'
for line in os.popen(cmd).readlines():
line = line.strip()
if line.startswith('Default Library Path (ELF):'):
paths = line.split()[4]
if not paths:
return None
for dir in paths.split(":"):
libfile = os.path.join(dir, "lib%s.so" % name)
if os.path.exists(libfile):
return libfile
return None
def find_library(name, is64 = False):
return _get_soname(_findLib_crle(name, is64) or _findLib_gcc(name))
else:
def _findSoname_ldconfig(name):
import struct
# XXX this code assumes that we know all unames and that a single
# ABI is supported per uname; instead we should find what the
# ABI is (e.g. check ABI of current process) or simply ask libc
# to load the library for us
uname = os.uname()[4]
# ARM has a variety of unames, e.g. armv7l
if uname.startswith("arm"):
uname = "arm"
if struct.calcsize('l') == 4:
machine = uname + '-32'
else:
machine = uname + '-64'
mach_map = {
'x86_64-64': 'libc6,x86-64',
'ppc64-64': 'libc6,64bit',
'sparc64-64': 'libc6,64bit',
's390x-64': 'libc6,64bit',
'ia64-64': 'libc6,IA-64',
# this actually breaks on biarch or multiarch as the first
# library wins; uname doesn't tell us which ABI we're using
'arm-32': 'libc6(,hard-float)?',
}
abi_type = mach_map.get(machine, 'libc6')
# XXX assuming GLIBC's ldconfig (with option -p)
expr = r'\s+(lib%s\.[^\s]+)\s+\(%s' % (re.escape(name), abi_type)
f = os.popen('/sbin/ldconfig -p 2>/dev/null')
try:
data = f.read()
finally:
f.close()
res = re.search(expr, data)
if not res:
return None
return res.group(1)
def find_library(name):
return _findSoname_ldconfig(name) or _get_soname(_findLib_gcc(name))
################################################################
# test code
def test():
from ctypes import cdll
if os.name == "nt":
print cdll.msvcrt
print cdll.load("msvcrt")
print find_library("msvcrt")
if os.name == "posix":
# find and load_version
print find_library("m")
print find_library("c")
print find_library("bz2")
# getattr
## print cdll.m
## print cdll.bz2
# load
if sys.platform == "darwin":
print cdll.LoadLibrary("libm.dylib")
print cdll.LoadLibrary("libcrypto.dylib")
print cdll.LoadLibrary("libSystem.dylib")
print cdll.LoadLibrary("System.framework/System")
else:
print cdll.LoadLibrary("libm.so")
print cdll.LoadLibrary("libcrypt.so")
print find_library("crypt")
if __name__ == "__main__":
test()

View File

@@ -1,185 +0,0 @@
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
# The most useful windows datatypes
from ctypes import *
BYTE = c_byte
WORD = c_ushort
DWORD = c_ulong
WCHAR = c_wchar
UINT = c_uint
INT = c_int
DOUBLE = c_double
FLOAT = c_float
BOOLEAN = BYTE
BOOL = c_long
from ctypes import _SimpleCData
class VARIANT_BOOL(_SimpleCData):
_type_ = "v"
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.value)
ULONG = c_ulong
LONG = c_long
USHORT = c_ushort
SHORT = c_short
# in the windows header files, these are structures.
_LARGE_INTEGER = LARGE_INTEGER = c_longlong
_ULARGE_INTEGER = ULARGE_INTEGER = c_ulonglong
LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
LPCWSTR = LPWSTR = c_wchar_p
LPCSTR = LPSTR = c_char_p
LPCVOID = LPVOID = c_void_p
# WPARAM is defined as UINT_PTR (unsigned type)
# LPARAM is defined as LONG_PTR (signed type)
if sizeof(c_long) == sizeof(c_void_p):
WPARAM = c_ulong
LPARAM = c_long
elif sizeof(c_longlong) == sizeof(c_void_p):
WPARAM = c_ulonglong
LPARAM = c_longlong
ATOM = WORD
LANGID = WORD
COLORREF = DWORD
LGRPID = DWORD
LCTYPE = DWORD
LCID = DWORD
################################################################
# HANDLE types
HANDLE = c_void_p # in the header files: void *
HACCEL = HANDLE
HBITMAP = HANDLE
HBRUSH = HANDLE
HCOLORSPACE = HANDLE
HDC = HANDLE
HDESK = HANDLE
HDWP = HANDLE
HENHMETAFILE = HANDLE
HFONT = HANDLE
HGDIOBJ = HANDLE
HGLOBAL = HANDLE
HHOOK = HANDLE
HICON = HANDLE
HINSTANCE = HANDLE
HKEY = HANDLE
HKL = HANDLE
HLOCAL = HANDLE
HMENU = HANDLE
HMETAFILE = HANDLE
HMODULE = HANDLE
HMONITOR = HANDLE
HPALETTE = HANDLE
HPEN = HANDLE
HRGN = HANDLE
HRSRC = HANDLE
HSTR = HANDLE
HTASK = HANDLE
HWINSTA = HANDLE
HWND = HANDLE
SC_HANDLE = HANDLE
SERVICE_STATUS_HANDLE = HANDLE
################################################################
# Some important structure definitions
class RECT(Structure):
_fields_ = [("left", c_long),
("top", c_long),
("right", c_long),
("bottom", c_long)]
tagRECT = _RECTL = RECTL = RECT
class _SMALL_RECT(Structure):
_fields_ = [('Left', c_short),
('Top', c_short),
('Right', c_short),
('Bottom', c_short)]
SMALL_RECT = _SMALL_RECT
class _COORD(Structure):
_fields_ = [('X', c_short),
('Y', c_short)]
class POINT(Structure):
_fields_ = [("x", c_long),
("y", c_long)]
tagPOINT = _POINTL = POINTL = POINT
class SIZE(Structure):
_fields_ = [("cx", c_long),
("cy", c_long)]
tagSIZE = SIZEL = SIZE
def RGB(red, green, blue):
return red + (green << 8) + (blue << 16)
class FILETIME(Structure):
_fields_ = [("dwLowDateTime", DWORD),
("dwHighDateTime", DWORD)]
_FILETIME = FILETIME
class MSG(Structure):
_fields_ = [("hWnd", HWND),
("message", c_uint),
("wParam", WPARAM),
("lParam", LPARAM),
("time", DWORD),
("pt", POINT)]
tagMSG = MSG
MAX_PATH = 260
class WIN32_FIND_DATAA(Structure):
_fields_ = [("dwFileAttributes", DWORD),
("ftCreationTime", FILETIME),
("ftLastAccessTime", FILETIME),
("ftLastWriteTime", FILETIME),
("nFileSizeHigh", DWORD),
("nFileSizeLow", DWORD),
("dwReserved0", DWORD),
("dwReserved1", DWORD),
("cFileName", c_char * MAX_PATH),
("cAlternateFileName", c_char * 14)]
class WIN32_FIND_DATAW(Structure):
_fields_ = [("dwFileAttributes", DWORD),
("ftCreationTime", FILETIME),
("ftLastAccessTime", FILETIME),
("ftLastWriteTime", FILETIME),
("nFileSizeHigh", DWORD),
("nFileSizeLow", DWORD),
("dwReserved0", DWORD),
("dwReserved1", DWORD),
("cFileName", c_wchar * MAX_PATH),
("cAlternateFileName", c_wchar * 14)]
__all__ = ['ATOM', 'BOOL', 'BOOLEAN', 'BYTE', 'COLORREF', 'DOUBLE', 'DWORD',
'FILETIME', 'FLOAT', 'HACCEL', 'HANDLE', 'HBITMAP', 'HBRUSH',
'HCOLORSPACE', 'HDC', 'HDESK', 'HDWP', 'HENHMETAFILE', 'HFONT',
'HGDIOBJ', 'HGLOBAL', 'HHOOK', 'HICON', 'HINSTANCE', 'HKEY',
'HKL', 'HLOCAL', 'HMENU', 'HMETAFILE', 'HMODULE', 'HMONITOR',
'HPALETTE', 'HPEN', 'HRGN', 'HRSRC', 'HSTR', 'HTASK', 'HWINSTA',
'HWND', 'INT', 'LANGID', 'LARGE_INTEGER', 'LCID', 'LCTYPE',
'LGRPID', 'LONG', 'LPARAM', 'LPCOLESTR', 'LPCSTR', 'LPCVOID',
'LPCWSTR', 'LPOLESTR', 'LPSTR', 'LPVOID', 'LPWSTR', 'MAX_PATH',
'MSG', 'OLESTR', 'POINT', 'POINTL', 'RECT', 'RECTL', 'RGB',
'SC_HANDLE', 'SERVICE_STATUS_HANDLE', 'SHORT', 'SIZE', 'SIZEL',
'SMALL_RECT', 'UINT', 'ULARGE_INTEGER', 'ULONG', 'USHORT',
'VARIANT_BOOL', 'WCHAR', 'WIN32_FIND_DATAA', 'WIN32_FIND_DATAW',
'WORD', 'WPARAM', '_COORD', '_FILETIME', '_LARGE_INTEGER',
'_POINTL', '_RECTL', '_SMALL_RECT', '_ULARGE_INTEGER', 'tagMSG',
'tagPOINT', 'tagRECT', 'tagSIZE']

View File

@@ -1,59 +0,0 @@
"""curses
The main package for curses support for Python. Normally used by importing
the package, and perhaps a particular module inside it.
import curses
from curses import textpad
curses.initscr()
...
"""
__revision__ = "$Id$"
from _curses import *
from curses.wrapper import wrapper
import os as _os
import sys as _sys
# Some constants, most notably the ACS_* ones, are only added to the C
# _curses module's dictionary after initscr() is called. (Some
# versions of SGI's curses don't define values for those constants
# until initscr() has been called.) This wrapper function calls the
# underlying C initscr(), and then copies the constants from the
# _curses module to the curses package's dictionary. Don't do 'from
# curses import *' if you'll be needing the ACS_* constants.
def initscr():
import _curses, curses
# we call setupterm() here because it raises an error
# instead of calling exit() in error cases.
setupterm(term=_os.environ.get("TERM", "unknown"),
fd=_sys.__stdout__.fileno())
stdscr = _curses.initscr()
for key, value in _curses.__dict__.items():
if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'):
setattr(curses, key, value)
return stdscr
# This is a similar wrapper for start_color(), which adds the COLORS and
# COLOR_PAIRS variables which are only available after start_color() is
# called.
def start_color():
import _curses, curses
retval = _curses.start_color()
if hasattr(_curses, 'COLORS'):
curses.COLORS = _curses.COLORS
if hasattr(_curses, 'COLOR_PAIRS'):
curses.COLOR_PAIRS = _curses.COLOR_PAIRS
return retval
# Import Python has_key() implementation if _curses doesn't contain has_key()
try:
has_key
except NameError:
from has_key import has_key

View File

@@ -1,99 +0,0 @@
"""Constants and membership tests for ASCII characters"""
NUL = 0x00 # ^@
SOH = 0x01 # ^A
STX = 0x02 # ^B
ETX = 0x03 # ^C
EOT = 0x04 # ^D
ENQ = 0x05 # ^E
ACK = 0x06 # ^F
BEL = 0x07 # ^G
BS = 0x08 # ^H
TAB = 0x09 # ^I
HT = 0x09 # ^I
LF = 0x0a # ^J
NL = 0x0a # ^J
VT = 0x0b # ^K
FF = 0x0c # ^L
CR = 0x0d # ^M
SO = 0x0e # ^N
SI = 0x0f # ^O
DLE = 0x10 # ^P
DC1 = 0x11 # ^Q
DC2 = 0x12 # ^R
DC3 = 0x13 # ^S
DC4 = 0x14 # ^T
NAK = 0x15 # ^U
SYN = 0x16 # ^V
ETB = 0x17 # ^W
CAN = 0x18 # ^X
EM = 0x19 # ^Y
SUB = 0x1a # ^Z
ESC = 0x1b # ^[
FS = 0x1c # ^\
GS = 0x1d # ^]
RS = 0x1e # ^^
US = 0x1f # ^_
SP = 0x20 # space
DEL = 0x7f # delete
controlnames = [
"NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
"BS", "HT", "LF", "VT", "FF", "CR", "SO", "SI",
"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
"CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US",
"SP"
]
def _ctoi(c):
if type(c) == type(""):
return ord(c)
else:
return c
def isalnum(c): return isalpha(c) or isdigit(c)
def isalpha(c): return isupper(c) or islower(c)
def isascii(c): return _ctoi(c) <= 127 # ?
def isblank(c): return _ctoi(c) in (8,32)
def iscntrl(c): return _ctoi(c) <= 31
def isdigit(c): return _ctoi(c) >= 48 and _ctoi(c) <= 57
def isgraph(c): return _ctoi(c) >= 33 and _ctoi(c) <= 126
def islower(c): return _ctoi(c) >= 97 and _ctoi(c) <= 122
def isprint(c): return _ctoi(c) >= 32 and _ctoi(c) <= 126
def ispunct(c): return _ctoi(c) != 32 and not isalnum(c)
def isspace(c): return _ctoi(c) in (9, 10, 11, 12, 13, 32)
def isupper(c): return _ctoi(c) >= 65 and _ctoi(c) <= 90
def isxdigit(c): return isdigit(c) or \
(_ctoi(c) >= 65 and _ctoi(c) <= 70) or (_ctoi(c) >= 97 and _ctoi(c) <= 102)
def isctrl(c): return _ctoi(c) < 32
def ismeta(c): return _ctoi(c) > 127
def ascii(c):
if type(c) == type(""):
return chr(_ctoi(c) & 0x7f)
else:
return _ctoi(c) & 0x7f
def ctrl(c):
if type(c) == type(""):
return chr(_ctoi(c) & 0x1f)
else:
return _ctoi(c) & 0x1f
def alt(c):
if type(c) == type(""):
return chr(_ctoi(c) | 0x80)
else:
return _ctoi(c) | 0x80
def unctrl(c):
bits = _ctoi(c)
if bits == 0x7f:
rep = "^?"
elif isprint(bits & 0x7f):
rep = chr(bits & 0x7f)
else:
rep = "^" + chr(((bits & 0x7f) | 0x20) + 0x20)
if bits & 0x80:
return "!" + rep
return rep

View File

@@ -1,192 +0,0 @@
#
# Emulation of has_key() function for platforms that don't use ncurses
#
import _curses
# Table mapping curses keys to the terminfo capability name
_capability_names = {
_curses.KEY_A1: 'ka1',
_curses.KEY_A3: 'ka3',
_curses.KEY_B2: 'kb2',
_curses.KEY_BACKSPACE: 'kbs',
_curses.KEY_BEG: 'kbeg',
_curses.KEY_BTAB: 'kcbt',
_curses.KEY_C1: 'kc1',
_curses.KEY_C3: 'kc3',
_curses.KEY_CANCEL: 'kcan',
_curses.KEY_CATAB: 'ktbc',
_curses.KEY_CLEAR: 'kclr',
_curses.KEY_CLOSE: 'kclo',
_curses.KEY_COMMAND: 'kcmd',
_curses.KEY_COPY: 'kcpy',
_curses.KEY_CREATE: 'kcrt',
_curses.KEY_CTAB: 'kctab',
_curses.KEY_DC: 'kdch1',
_curses.KEY_DL: 'kdl1',
_curses.KEY_DOWN: 'kcud1',
_curses.KEY_EIC: 'krmir',
_curses.KEY_END: 'kend',
_curses.KEY_ENTER: 'kent',
_curses.KEY_EOL: 'kel',
_curses.KEY_EOS: 'ked',
_curses.KEY_EXIT: 'kext',
_curses.KEY_F0: 'kf0',
_curses.KEY_F1: 'kf1',
_curses.KEY_F10: 'kf10',
_curses.KEY_F11: 'kf11',
_curses.KEY_F12: 'kf12',
_curses.KEY_F13: 'kf13',
_curses.KEY_F14: 'kf14',
_curses.KEY_F15: 'kf15',
_curses.KEY_F16: 'kf16',
_curses.KEY_F17: 'kf17',
_curses.KEY_F18: 'kf18',
_curses.KEY_F19: 'kf19',
_curses.KEY_F2: 'kf2',
_curses.KEY_F20: 'kf20',
_curses.KEY_F21: 'kf21',
_curses.KEY_F22: 'kf22',
_curses.KEY_F23: 'kf23',
_curses.KEY_F24: 'kf24',
_curses.KEY_F25: 'kf25',
_curses.KEY_F26: 'kf26',
_curses.KEY_F27: 'kf27',
_curses.KEY_F28: 'kf28',
_curses.KEY_F29: 'kf29',
_curses.KEY_F3: 'kf3',
_curses.KEY_F30: 'kf30',
_curses.KEY_F31: 'kf31',
_curses.KEY_F32: 'kf32',
_curses.KEY_F33: 'kf33',
_curses.KEY_F34: 'kf34',
_curses.KEY_F35: 'kf35',
_curses.KEY_F36: 'kf36',
_curses.KEY_F37: 'kf37',
_curses.KEY_F38: 'kf38',
_curses.KEY_F39: 'kf39',
_curses.KEY_F4: 'kf4',
_curses.KEY_F40: 'kf40',
_curses.KEY_F41: 'kf41',
_curses.KEY_F42: 'kf42',
_curses.KEY_F43: 'kf43',
_curses.KEY_F44: 'kf44',
_curses.KEY_F45: 'kf45',
_curses.KEY_F46: 'kf46',
_curses.KEY_F47: 'kf47',
_curses.KEY_F48: 'kf48',
_curses.KEY_F49: 'kf49',
_curses.KEY_F5: 'kf5',
_curses.KEY_F50: 'kf50',
_curses.KEY_F51: 'kf51',
_curses.KEY_F52: 'kf52',
_curses.KEY_F53: 'kf53',
_curses.KEY_F54: 'kf54',
_curses.KEY_F55: 'kf55',
_curses.KEY_F56: 'kf56',
_curses.KEY_F57: 'kf57',
_curses.KEY_F58: 'kf58',
_curses.KEY_F59: 'kf59',
_curses.KEY_F6: 'kf6',
_curses.KEY_F60: 'kf60',
_curses.KEY_F61: 'kf61',
_curses.KEY_F62: 'kf62',
_curses.KEY_F63: 'kf63',
_curses.KEY_F7: 'kf7',
_curses.KEY_F8: 'kf8',
_curses.KEY_F9: 'kf9',
_curses.KEY_FIND: 'kfnd',
_curses.KEY_HELP: 'khlp',
_curses.KEY_HOME: 'khome',
_curses.KEY_IC: 'kich1',
_curses.KEY_IL: 'kil1',
_curses.KEY_LEFT: 'kcub1',
_curses.KEY_LL: 'kll',
_curses.KEY_MARK: 'kmrk',
_curses.KEY_MESSAGE: 'kmsg',
_curses.KEY_MOVE: 'kmov',
_curses.KEY_NEXT: 'knxt',
_curses.KEY_NPAGE: 'knp',
_curses.KEY_OPEN: 'kopn',
_curses.KEY_OPTIONS: 'kopt',
_curses.KEY_PPAGE: 'kpp',
_curses.KEY_PREVIOUS: 'kprv',
_curses.KEY_PRINT: 'kprt',
_curses.KEY_REDO: 'krdo',
_curses.KEY_REFERENCE: 'kref',
_curses.KEY_REFRESH: 'krfr',
_curses.KEY_REPLACE: 'krpl',
_curses.KEY_RESTART: 'krst',
_curses.KEY_RESUME: 'kres',
_curses.KEY_RIGHT: 'kcuf1',
_curses.KEY_SAVE: 'ksav',
_curses.KEY_SBEG: 'kBEG',
_curses.KEY_SCANCEL: 'kCAN',
_curses.KEY_SCOMMAND: 'kCMD',
_curses.KEY_SCOPY: 'kCPY',
_curses.KEY_SCREATE: 'kCRT',
_curses.KEY_SDC: 'kDC',
_curses.KEY_SDL: 'kDL',
_curses.KEY_SELECT: 'kslt',
_curses.KEY_SEND: 'kEND',
_curses.KEY_SEOL: 'kEOL',
_curses.KEY_SEXIT: 'kEXT',
_curses.KEY_SF: 'kind',
_curses.KEY_SFIND: 'kFND',
_curses.KEY_SHELP: 'kHLP',
_curses.KEY_SHOME: 'kHOM',
_curses.KEY_SIC: 'kIC',
_curses.KEY_SLEFT: 'kLFT',
_curses.KEY_SMESSAGE: 'kMSG',
_curses.KEY_SMOVE: 'kMOV',
_curses.KEY_SNEXT: 'kNXT',
_curses.KEY_SOPTIONS: 'kOPT',
_curses.KEY_SPREVIOUS: 'kPRV',
_curses.KEY_SPRINT: 'kPRT',
_curses.KEY_SR: 'kri',
_curses.KEY_SREDO: 'kRDO',
_curses.KEY_SREPLACE: 'kRPL',
_curses.KEY_SRIGHT: 'kRIT',
_curses.KEY_SRSUME: 'kRES',
_curses.KEY_SSAVE: 'kSAV',
_curses.KEY_SSUSPEND: 'kSPD',
_curses.KEY_STAB: 'khts',
_curses.KEY_SUNDO: 'kUND',
_curses.KEY_SUSPEND: 'kspd',
_curses.KEY_UNDO: 'kund',
_curses.KEY_UP: 'kcuu1'
}
def has_key(ch):
if isinstance(ch, str):
ch = ord(ch)
# Figure out the correct capability name for the keycode.
capability_name = _capability_names.get(ch)
if capability_name is None:
return False
#Check the current terminal description for that capability;
#if present, return true, else return false.
if _curses.tigetstr( capability_name ):
return True
else:
return False
if __name__ == '__main__':
# Compare the output of this implementation and the ncurses has_key,
# on platforms where has_key is already available
try:
L = []
_curses.initscr()
for key in _capability_names.keys():
system = key in _curses
python = has_key(key)
if system != python:
L.append( 'Mismatch for key %s, system=%i, Python=%i'
% (_curses.keyname( key ), system, python) )
finally:
_curses.endwin()
for i in L: print i

View File

@@ -1,8 +0,0 @@
"""curses.panel
Module for using panels with curses.
"""
__revision__ = "$Id$"
from _curses_panel import *

View File

@@ -1,50 +0,0 @@
"""curses.wrapper
Contains one function, wrapper(), which runs another function which
should be the rest of your curses-based application. If the
application raises an exception, wrapper() will restore the terminal
to a sane state so you can read the resulting traceback.
"""
import curses
def wrapper(func, *args, **kwds):
"""Wrapper function that initializes curses and calls another function,
restoring normal keyboard/screen behavior on error.
The callable object 'func' is then passed the main window 'stdscr'
as its first argument, followed by any other arguments passed to
wrapper().
"""
try:
# Initialize curses
stdscr = curses.initscr()
# Turn off echoing of keys, and enter cbreak mode,
# where no buffering is performed on keyboard input
curses.noecho()
curses.cbreak()
# In keypad mode, escape sequences for special keys
# (like the cursor keys) will be interpreted and
# a special value like curses.KEY_LEFT will be returned
stdscr.keypad(1)
# Start color, too. Harmless if the terminal doesn't have
# color; user can test with has_color() later on. The try/catch
# works around a minor bit of over-conscientiousness in the curses
# module -- the error return from C start_color() is ignorable.
try:
curses.start_color()
except:
pass
return func(stdscr, *args, **kwds)
finally:
# Set everything back to normal
if 'stdscr' in locals():
stdscr.keypad(0)
curses.echo()
curses.nocbreak()
curses.endwin()

View File

@@ -1,41 +0,0 @@
"""Read and cache directory listings.
The listdir() routine returns a sorted list of the files in a directory,
using a cache to avoid reading the directory more often than necessary.
The annotate() routine appends slashes to directories."""
from warnings import warnpy3k
warnpy3k("the dircache module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import os
__all__ = ["listdir", "opendir", "annotate", "reset"]
cache = {}
def reset():
"""Reset the cache completely."""
global cache
cache = {}
def listdir(path):
"""List directory contents, using cache."""
try:
cached_mtime, list = cache[path]
del cache[path]
except KeyError:
cached_mtime, list = -1, []
mtime = os.stat(path).st_mtime
if mtime != cached_mtime:
list = os.listdir(path)
list.sort()
cache[path] = mtime, list
return list
opendir = listdir # XXX backward compatibility
def annotate(head, list):
"""Add '/' suffixes to directories."""
for i in range(len(list)):
if os.path.isdir(os.path.join(head, list[i])):
list[i] = list[i] + '/'

View File

@@ -1,224 +0,0 @@
"""Disassembler of Python byte code into mnemonics."""
import sys
import types
from opcode import *
from opcode import __all__ as _opcodes_all
__all__ = ["dis", "disassemble", "distb", "disco",
"findlinestarts", "findlabels"] + _opcodes_all
del _opcodes_all
_have_code = (types.MethodType, types.FunctionType, types.CodeType,
types.ClassType, type)
def dis(x=None):
"""Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
"""
if x is None:
distb()
return
if isinstance(x, types.InstanceType):
x = x.__class__
if hasattr(x, 'im_func'):
x = x.im_func
if hasattr(x, 'func_code'):
x = x.func_code
if hasattr(x, '__dict__'):
items = x.__dict__.items()
items.sort()
for name, x1 in items:
if isinstance(x1, _have_code):
print "Disassembly of %s:" % name
try:
dis(x1)
except TypeError, msg:
print "Sorry:", msg
print
elif hasattr(x, 'co_code'):
disassemble(x)
elif isinstance(x, str):
disassemble_string(x)
else:
raise TypeError, \
"don't know how to disassemble %s objects" % \
type(x).__name__
def distb(tb=None):
"""Disassemble a traceback (default: last traceback)."""
if tb is None:
try:
tb = sys.last_traceback
except AttributeError:
raise RuntimeError, "no last traceback to disassemble"
while tb.tb_next: tb = tb.tb_next
disassemble(tb.tb_frame.f_code, tb.tb_lasti)
def disassemble(co, lasti=-1):
"""Disassemble a code object."""
code = co.co_code
labels = findlabels(code)
linestarts = dict(findlinestarts(co))
n = len(code)
i = 0
extended_arg = 0
free = None
while i < n:
c = code[i]
op = ord(c)
if i in linestarts:
if i > 0:
print
print "%3d" % linestarts[i],
else:
print ' ',
if i == lasti: print '-->',
else: print ' ',
if i in labels: print '>>',
else: print ' ',
print repr(i).rjust(4),
print opname[op].ljust(20),
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
extended_arg = 0
i = i+2
if op == EXTENDED_ARG:
extended_arg = oparg*65536L
print repr(oparg).rjust(5),
if op in hasconst:
print '(' + repr(co.co_consts[oparg]) + ')',
elif op in hasname:
print '(' + co.co_names[oparg] + ')',
elif op in hasjrel:
print '(to ' + repr(i + oparg) + ')',
elif op in haslocal:
print '(' + co.co_varnames[oparg] + ')',
elif op in hascompare:
print '(' + cmp_op[oparg] + ')',
elif op in hasfree:
if free is None:
free = co.co_cellvars + co.co_freevars
print '(' + free[oparg] + ')',
print
def disassemble_string(code, lasti=-1, varnames=None, names=None,
constants=None):
labels = findlabels(code)
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
if i == lasti: print '-->',
else: print ' ',
if i in labels: print '>>',
else: print ' ',
print repr(i).rjust(4),
print opname[op].ljust(15),
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256
i = i+2
print repr(oparg).rjust(5),
if op in hasconst:
if constants:
print '(' + repr(constants[oparg]) + ')',
else:
print '(%d)'%oparg,
elif op in hasname:
if names is not None:
print '(' + names[oparg] + ')',
else:
print '(%d)'%oparg,
elif op in hasjrel:
print '(to ' + repr(i + oparg) + ')',
elif op in haslocal:
if varnames:
print '(' + varnames[oparg] + ')',
else:
print '(%d)' % oparg,
elif op in hascompare:
print '(' + cmp_op[oparg] + ')',
print
disco = disassemble # XXX For backwards compatibility
def findlabels(code):
"""Detect all offsets in a byte code which are jump targets.
Return the list of offsets.
"""
labels = []
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256
i = i+2
label = -1
if op in hasjrel:
label = i+oparg
elif op in hasjabs:
label = oparg
if label >= 0:
if label not in labels:
labels.append(label)
return labels
def findlinestarts(code):
"""Find the offsets in a byte code which are start of lines in the source.
Generate pairs (offset, lineno) as described in Python/compile.c.
"""
byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
line_increments = [ord(c) for c in code.co_lnotab[1::2]]
lastlineno = None
lineno = code.co_firstlineno
addr = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if lineno != lastlineno:
yield (addr, lineno)
lastlineno = lineno
addr += byte_incr
lineno += line_incr
if lineno != lastlineno:
yield (addr, lineno)
def _test():
"""Simple test program to disassemble a file."""
if sys.argv[1:]:
if sys.argv[2:]:
sys.stderr.write("usage: python dis.py [-|file]\n")
sys.exit(2)
fn = sys.argv[1]
if not fn or fn == "-":
fn = None
else:
fn = None
if fn is None:
f = sys.stdin
else:
f = open(fn)
source = f.read()
if fn is not None:
f.close()
else:
fn = "<stdin>"
code = compile(source, fn, "exec")
dis(code)
if __name__ == "__main__":
_test()

2
test/ok_lib2.6/README Normal file
View File

@@ -0,0 +1,2 @@
These are programs from the Python 2.6 standard library that are known
to decompile ok.

2
test/ok_lib2.7/README Normal file
View File

@@ -0,0 +1,2 @@
These are programs from the Python 2.7 standard library that are known
to decompile ok.

BIN
test/ok_lib2.7/aifc.pyc Normal file

Binary file not shown.

Binary file not shown.

BIN
test/ok_lib2.7/anydbm.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/asynchat.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/atexit.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/audiodev.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/base64.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/binhex.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/bisect.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/calendar.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/cgitb.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/chunk.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/codeop.pyc Normal file

Binary file not shown.

BIN
test/ok_lib2.7/colorsys.pyc Normal file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More