svn+ssh://pythondev@svn.python.org/python/branches/p3yk ................ r55837 | guido.van.rossum | 2007-06-08 16:04:42 -0700 (Fri, 08 Jun 2007) | 2 lines PEP 3119 -- the abc module. ................ r55838 | guido.van.rossum | 2007-06-08 17:38:55 -0700 (Fri, 08 Jun 2007) | 2 lines Implement part of PEP 3119 -- One Trick Ponies. ................ r55847 | guido.van.rossum | 2007-06-09 08:28:06 -0700 (Sat, 09 Jun 2007) | 2 lines Different way to do one trick ponies, allowing registration (per PEP strawman). ................ r55849 | guido.van.rossum | 2007-06-09 18:06:38 -0700 (Sat, 09 Jun 2007) | 3 lines Make sure that the magic looking for __hash__ (etc.) doesn't apply to real subclasses of Hashable. ................ r55852 | guido.van.rossum | 2007-06-10 08:29:51 -0700 (Sun, 10 Jun 2007) | 2 lines Add some more examples, e.g. generators and dict views. ................ r55853 | guido.van.rossum | 2007-06-10 08:31:59 -0700 (Sun, 10 Jun 2007) | 2 lines keys() and items() *are* containers -- just values() isn't. ................ r55864 | georg.brandl | 2007-06-10 15:29:40 -0700 (Sun, 10 Jun 2007) | 2 lines PEP 3127: new octal literals, binary literals. ................ r55865 | georg.brandl | 2007-06-10 15:31:37 -0700 (Sun, 10 Jun 2007) | 2 lines Some octal literal fixes in Tools. ................ r55866 | georg.brandl | 2007-06-10 15:37:43 -0700 (Sun, 10 Jun 2007) | 2 lines Tokenizer changes for PEP 3127. ................ r55867 | georg.brandl | 2007-06-10 15:37:55 -0700 (Sun, 10 Jun 2007) | 2 lines Some docs for PEP 3127. ................ r55868 | georg.brandl | 2007-06-10 15:44:39 -0700 (Sun, 10 Jun 2007) | 2 lines Missed a place in intobject.c. Is that used anymore anyway? ................ r55871 | neal.norwitz | 2007-06-10 18:31:49 -0700 (Sun, 10 Jun 2007) | 182 lines Merged revisions 55729-55868 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r55731 | neal.norwitz | 2007-06-01 00:29:12 -0700 (Fri, 01 Jun 2007) | 7 lines SF 1668596/1720897: distutils now copies data files even if package_dir is empty. This needs to be backported. I'm too tired tonight. It would be great if someone backports this if the buildbots are ok with it. Otherwise, I will try to get to it tomorrow. ........ r55732 | georg.brandl | 2007-06-01 04:33:33 -0700 (Fri, 01 Jun 2007) | 2 lines Bug #1722484: remove docstrings again when running with -OO. ........ r55735 | georg.brandl | 2007-06-01 12:20:27 -0700 (Fri, 01 Jun 2007) | 2 lines Fix wrong issue number. ........ r55739 | brett.cannon | 2007-06-01 20:02:29 -0700 (Fri, 01 Jun 2007) | 3 lines Have configure raise an error when building on AtheOS. Code specific to AtheOS will be removed in Python 2.7. ........ r55746 | neal.norwitz | 2007-06-02 11:33:53 -0700 (Sat, 02 Jun 2007) | 1 line Update expected birthday of 2.6 ........ r55751 | neal.norwitz | 2007-06-03 13:32:50 -0700 (Sun, 03 Jun 2007) | 10 lines Backout the original 'fix' to 1721309 which had no effect. Different versions of Berkeley DB handle this differently. The comments and bug report should have the details. Memory is allocated in 4.4 (and presumably earlier), but not in 4.5. Thus 4.5 has the free error, but not earlier versions. Mostly update comments, plus make the free conditional. This fix was already applied to the 2.5 branch. ........ r55752 | brett.cannon | 2007-06-03 16:13:41 -0700 (Sun, 03 Jun 2007) | 6 lines Make _strptime.TimeRE().pattern() use ``\s+`` for matching whitespace instead of ``\s*``. This prevents patterns from "stealing" bits from other patterns in order to make a match work. Closes bug #1730389. Will be backported. ........ r55766 | hyeshik.chang | 2007-06-05 11:16:52 -0700 (Tue, 05 Jun 2007) | 4 lines Fix build on FreeBSD. Bluetooth HCI API in FreeBSD is quite different from Linux's. Just fix the build for now but the code doesn't support the complete capability of HCI on FreeBSD yet. ........ r55770 | hyeshik.chang | 2007-06-05 11:58:51 -0700 (Tue, 05 Jun 2007) | 4 lines Bug #1728403: Fix a bug that CJKCodecs StreamReader hangs when it reads a file that ends with incomplete sequence and sizehint argument for .read() is specified. ........ r55775 | hyeshik.chang | 2007-06-05 12:28:15 -0700 (Tue, 05 Jun 2007) | 2 lines Fix for Windows: close a temporary file before trying to delete it. ........ r55783 | guido.van.rossum | 2007-06-05 14:24:47 -0700 (Tue, 05 Jun 2007) | 2 lines Patch by Tim Delany (missing DECREF). SF #1731330. ........ r55785 | collin.winter | 2007-06-05 17:17:35 -0700 (Tue, 05 Jun 2007) | 3 lines Patch #1731049: make threading.py use a proper "raise" when checking internal state, rather than assert statements (which get stripped out by -O). ........ r55786 | facundo.batista | 2007-06-06 08:13:37 -0700 (Wed, 06 Jun 2007) | 4 lines FTP.ntransfercmd method now uses create_connection when passive, using the timeout received in connection time. ........ r55792 | facundo.batista | 2007-06-06 10:15:23 -0700 (Wed, 06 Jun 2007) | 7 lines Added an optional timeout parameter to function urllib2.urlopen, with tests in test_urllib2net.py (must have network resource enabled to execute them). Also modified test_urllib2.py because testing mock classes must take it into acount. Docs are also updated. ........ r55793 | thomas.heller | 2007-06-06 13:19:19 -0700 (Wed, 06 Jun 2007) | 1 line Build _ctypes and _ctypes_test in the ReleaseAMD64 configuration. ........ r55802 | georg.brandl | 2007-06-07 06:23:24 -0700 (Thu, 07 Jun 2007) | 3 lines Disallow function calls like foo(None=1). Backport from py3k rev. 55708 by Guido. ........ r55804 | georg.brandl | 2007-06-07 06:30:24 -0700 (Thu, 07 Jun 2007) | 2 lines Make reindent.py executable. ........ r55805 | georg.brandl | 2007-06-07 06:34:10 -0700 (Thu, 07 Jun 2007) | 2 lines Patch #1667860: Fix UnboundLocalError in urllib2. ........ r55821 | kristjan.jonsson | 2007-06-07 16:53:49 -0700 (Thu, 07 Jun 2007) | 1 line Fixing changes to getbuildinfo.c that broke linux builds ........ r55828 | thomas.heller | 2007-06-08 09:10:27 -0700 (Fri, 08 Jun 2007) | 1 line Make this test work with older Python releases where struct has no 't' format character. ........ r55829 | martin.v.loewis | 2007-06-08 10:29:20 -0700 (Fri, 08 Jun 2007) | 3 lines Bug #1733488: Fix compilation of bufferobject.c on AIX. Will backport to 2.5. ........ r55831 | thomas.heller | 2007-06-08 11:20:09 -0700 (Fri, 08 Jun 2007) | 2 lines [ 1715718 ] x64 clean compile patch for _ctypes, by Kristj?n Valur with small modifications. ........ r55832 | thomas.heller | 2007-06-08 12:01:06 -0700 (Fri, 08 Jun 2007) | 1 line Fix gcc warnings intruduced by passing Py_ssize_t to PyErr_Format calls. ........ r55833 | thomas.heller | 2007-06-08 12:08:31 -0700 (Fri, 08 Jun 2007) | 2 lines Fix wrong documentation, and correct the punktuation. Closes [1700455]. ........ r55834 | thomas.heller | 2007-06-08 12:14:23 -0700 (Fri, 08 Jun 2007) | 1 line Fix warnings by using proper function prototype. ........ r55839 | neal.norwitz | 2007-06-08 20:36:34 -0700 (Fri, 08 Jun 2007) | 7 lines Prevent expandtabs() on string and unicode objects from causing a segfault when a large width is passed on 32-bit platforms. Found by Google. It would be good for people to review this especially carefully and verify I don't have an off by one error and there is no other way to cause overflow. ........ r55841 | neal.norwitz | 2007-06-08 21:48:22 -0700 (Fri, 08 Jun 2007) | 1 line Use macro version of GET_SIZE to avoid Coverity warning (#150) about a possible error. ........ r55842 | martin.v.loewis | 2007-06-09 00:42:52 -0700 (Sat, 09 Jun 2007) | 3 lines Patch #1733960: Allow T_LONGLONG to accept ints. Will backport to 2.5. ........ r55843 | martin.v.loewis | 2007-06-09 00:58:05 -0700 (Sat, 09 Jun 2007) | 2 lines Fix Windows build. ........ r55845 | martin.v.loewis | 2007-06-09 03:10:26 -0700 (Sat, 09 Jun 2007) | 2 lines Provide LLONG_MAX for S390. ........ r55854 | thomas.heller | 2007-06-10 08:59:17 -0700 (Sun, 10 Jun 2007) | 4 lines First version of build scripts for Windows/AMD64 (no external components are built yet, and 'kill_python' is disabled). ........ r55855 | thomas.heller | 2007-06-10 10:55:51 -0700 (Sun, 10 Jun 2007) | 3 lines For now, disable the _bsddb, _sqlite3, _ssl, _testcapi, _tkinter modules in the ReleaseAMD64 configuration because they do not compile. ........ r55856 | thomas.heller | 2007-06-10 11:27:54 -0700 (Sun, 10 Jun 2007) | 1 line Need to set the environment variables, otherwise devenv.com is not found. ........ r55860 | thomas.heller | 2007-06-10 14:01:17 -0700 (Sun, 10 Jun 2007) | 1 line Revert commit 55855. ........ ................ r55880 | neal.norwitz | 2007-06-10 22:07:36 -0700 (Sun, 10 Jun 2007) | 5 lines Fix the refleak counter on test_collections. The ABC metaclass creates a registry which must be cleared on each run. Otherwise, there *seem* to be refleaks when there really aren't any. (The class is held within the registry even though it's no longer needed.) ................ r55884 | neal.norwitz | 2007-06-10 22:46:33 -0700 (Sun, 10 Jun 2007) | 1 line These tests have been removed, so they are no longer needed here ................ r55886 | georg.brandl | 2007-06-11 00:26:37 -0700 (Mon, 11 Jun 2007) | 3 lines Optimize access to True and False in the compiler (if True) and the peepholer (LOAD_NAME True). ................ r55905 | georg.brandl | 2007-06-11 10:02:26 -0700 (Mon, 11 Jun 2007) | 5 lines Remove __oct__ and __hex__ and use __index__ for converting non-ints before formatting in a base. Add a bin() builtin. ................ r55906 | georg.brandl | 2007-06-11 10:04:44 -0700 (Mon, 11 Jun 2007) | 2 lines int(x, 0) does not "guess". ................ r55907 | georg.brandl | 2007-06-11 10:05:47 -0700 (Mon, 11 Jun 2007) | 2 lines Add a comment to explain that nb_oct and nb_hex are nonfunctional. ................ r55908 | guido.van.rossum | 2007-06-11 10:49:18 -0700 (Mon, 11 Jun 2007) | 2 lines Get rid of unused imports and comment. ................ r55910 | guido.van.rossum | 2007-06-11 13:05:17 -0700 (Mon, 11 Jun 2007) | 2 lines _Abstract.__new__ now requires either no arguments or __init__ overridden. ................ r55911 | guido.van.rossum | 2007-06-11 13:07:49 -0700 (Mon, 11 Jun 2007) | 7 lines Move the collections ABCs to a separate file, _abcoll.py, in order to avoid needing to import _collections.so during the bootstrap (this will become apparent in the next submit of os.py). Add (plain and mutable) ABCs for Set, Mapping, Sequence. ................ r55912 | guido.van.rossum | 2007-06-11 13:09:31 -0700 (Mon, 11 Jun 2007) | 2 lines Rewrite the _Environ class to use the new collections ABCs. ................ r55913 | guido.van.rossum | 2007-06-11 13:59:45 -0700 (Mon, 11 Jun 2007) | 72 lines Merged revisions 55869-55912 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r55869 | neal.norwitz | 2007-06-10 17:42:11 -0700 (Sun, 10 Jun 2007) | 1 line Add Atul Varma for patch # 1667860 ........ r55870 | neal.norwitz | 2007-06-10 18:22:03 -0700 (Sun, 10 Jun 2007) | 1 line Ignore valgrind problems on Ubuntu from ld ........ r55872 | neal.norwitz | 2007-06-10 18:48:46 -0700 (Sun, 10 Jun 2007) | 2 lines Ignore config.status.lineno which seems new (new autoconf?) ........ r55873 | neal.norwitz | 2007-06-10 19:14:39 -0700 (Sun, 10 Jun 2007) | 1 line Prevent these tests from running on Win64 since they don\'t apply there either ........ r55874 | neal.norwitz | 2007-06-10 19:16:10 -0700 (Sun, 10 Jun 2007) | 5 lines Fix a bug when there was a newline in the string expandtabs was called on. This also catches another condition that can overflow. Will backport. ........ r55879 | neal.norwitz | 2007-06-10 21:52:37 -0700 (Sun, 10 Jun 2007) | 1 line Prevent hang if the port cannot be opened. ........ r55881 | neal.norwitz | 2007-06-10 22:28:45 -0700 (Sun, 10 Jun 2007) | 4 lines Add all of the distuils modules that don't seem to have explicit tests. :-( Move an import in mworkscompiler so that this module can be imported on any platform. Hopefully this works on all platforms. ........ r55882 | neal.norwitz | 2007-06-10 22:35:10 -0700 (Sun, 10 Jun 2007) | 4 lines SF #1734732, lower case the module names per PEP 8. Will backport. ........ r55885 | neal.norwitz | 2007-06-10 23:16:48 -0700 (Sun, 10 Jun 2007) | 4 lines Not sure why this only fails sometimes on Unix machines. Better to disable it and only import msvccompiler on Windows since that's the only place it can work anyways. ........ r55887 | neal.norwitz | 2007-06-11 00:29:43 -0700 (Mon, 11 Jun 2007) | 4 lines Bug #1734723: Fix repr.Repr() so it doesn't ignore the maxtuple attribute. Will backport ........ r55889 | neal.norwitz | 2007-06-11 00:36:24 -0700 (Mon, 11 Jun 2007) | 1 line Reflow long line ........ r55896 | thomas.heller | 2007-06-11 08:58:33 -0700 (Mon, 11 Jun 2007) | 3 lines Use "O&" in calls to PyArg_Parse when we need a 'void*' instead of "k" or "K" codes. ........ r55901 | facundo.batista | 2007-06-11 09:27:08 -0700 (Mon, 11 Jun 2007) | 5 lines Added versionchanged flag to all the methods which received a new optional timeout parameter, and a versionadded flag to the socket.create_connection function. ........ ................ r55914 | guido.van.rossum | 2007-06-11 14:19:50 -0700 (Mon, 11 Jun 2007) | 3 lines New super() implementation, for PEP 3135 (though the PEP is not yet updated to this design, and small tweaks may still be made later). ................ r55923 | guido.van.rossum | 2007-06-11 21:15:24 -0700 (Mon, 11 Jun 2007) | 4 lines I'm guessing this module broke when Neal ripped out the types module -- it used 'list' both as a local variable and as the built-in list type. Renamed the local variable since that was easier. ................ r55924 | guido.van.rossum | 2007-06-11 21:20:05 -0700 (Mon, 11 Jun 2007) | 5 lines Change all occurrences of super(<thisclass>, <firstarg>) to super(). Seems to have worked, all the tests still pass. Exception: test_descr and test_descrtut, which have tons of these and are there to test the various usages. ................ r55939 | collin.winter | 2007-06-12 13:57:33 -0700 (Tue, 12 Jun 2007) | 1 line Patch #1735485: remove StandardError from the exception hierarchy. ................ r55954 | neal.norwitz | 2007-06-12 21:56:32 -0700 (Tue, 12 Jun 2007) | 51 lines Merged revisions 55913-55950 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r55926 | marc-andre.lemburg | 2007-06-12 02:09:58 -0700 (Tue, 12 Jun 2007) | 3 lines Apply patch #1734945 to support TurboLinux as distribution. ........ r55927 | marc-andre.lemburg | 2007-06-12 02:26:49 -0700 (Tue, 12 Jun 2007) | 3 lines Add patch #1726668: Windows Vista support. ........ r55929 | thomas.heller | 2007-06-12 08:36:22 -0700 (Tue, 12 Jun 2007) | 1 line Checkout, but do not yet try to build, exernal sources. ........ r55930 | thomas.heller | 2007-06-12 09:08:27 -0700 (Tue, 12 Jun 2007) | 6 lines Add bufferoverflowU.lib to the libraries needed by _ssl (is this the right thing to do?). Set the /XP64 /RETAIL build enviroment in the makefile when building ReleaseAMD64. ........ r55931 | thomas.heller | 2007-06-12 09:23:19 -0700 (Tue, 12 Jun 2007) | 5 lines Revert this change, since it breaks the win32 build: Add bufferoverflowU.lib to the libraries needed by _ssl (is this the right thing to do?). ........ r55934 | thomas.heller | 2007-06-12 10:28:31 -0700 (Tue, 12 Jun 2007) | 3 lines Specify the bufferoverflowU.lib to the makefile on the command line (for ReleaseAMD64 builds). ........ r55937 | thomas.heller | 2007-06-12 12:02:59 -0700 (Tue, 12 Jun 2007) | 3 lines Add bufferoverflowU.lib to PCBuild\_bsddb.vcproj. Build sqlite3.dll and bsddb. ........ r55938 | thomas.heller | 2007-06-12 12:56:12 -0700 (Tue, 12 Jun 2007) | 2 lines Don't rebuild Berkeley DB if not needed (this was committed by accident). ........ r55948 | martin.v.loewis | 2007-06-12 20:42:19 -0700 (Tue, 12 Jun 2007) | 3 lines Provide PY_LLONG_MAX on all systems having long long. Will backport to 2.5. ........ ................ r55959 | guido.van.rossum | 2007-06-13 09:22:41 -0700 (Wed, 13 Jun 2007) | 2 lines Fix a compilation warning. ................
1164 lines
38 KiB
Python
1164 lines
38 KiB
Python
import gc
|
|
import sys
|
|
import unittest
|
|
import UserList
|
|
import weakref
|
|
|
|
from test import test_support
|
|
|
|
# Used in ReferencesTestCase.test_ref_created_during_del() .
|
|
ref_from_del = None
|
|
|
|
class C:
|
|
def method(self):
|
|
pass
|
|
|
|
|
|
class Callable:
|
|
bar = None
|
|
|
|
def __call__(self, x):
|
|
self.bar = x
|
|
|
|
|
|
def create_function():
|
|
def f(): pass
|
|
return f
|
|
|
|
def create_bound_method():
|
|
return C().method
|
|
|
|
def create_unbound_method():
|
|
return C.method
|
|
|
|
|
|
class TestBase(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
self.cbcalled = 0
|
|
|
|
def callback(self, ref):
|
|
self.cbcalled += 1
|
|
|
|
|
|
class ReferencesTestCase(TestBase):
|
|
|
|
def test_basic_ref(self):
|
|
self.check_basic_ref(C)
|
|
self.check_basic_ref(create_function)
|
|
self.check_basic_ref(create_bound_method)
|
|
self.check_basic_ref(create_unbound_method)
|
|
|
|
# Just make sure the tp_repr handler doesn't raise an exception.
|
|
# Live reference:
|
|
o = C()
|
|
wr = weakref.ref(o)
|
|
repr(wr)
|
|
# Dead reference:
|
|
del o
|
|
repr(wr)
|
|
|
|
def test_basic_callback(self):
|
|
self.check_basic_callback(C)
|
|
self.check_basic_callback(create_function)
|
|
self.check_basic_callback(create_bound_method)
|
|
self.check_basic_callback(create_unbound_method)
|
|
|
|
def test_multiple_callbacks(self):
|
|
o = C()
|
|
ref1 = weakref.ref(o, self.callback)
|
|
ref2 = weakref.ref(o, self.callback)
|
|
del o
|
|
self.assert_(ref1() is None,
|
|
"expected reference to be invalidated")
|
|
self.assert_(ref2() is None,
|
|
"expected reference to be invalidated")
|
|
self.assert_(self.cbcalled == 2,
|
|
"callback not called the right number of times")
|
|
|
|
def test_multiple_selfref_callbacks(self):
|
|
# Make sure all references are invalidated before callbacks are called
|
|
#
|
|
# What's important here is that we're using the first
|
|
# reference in the callback invoked on the second reference
|
|
# (the most recently created ref is cleaned up first). This
|
|
# tests that all references to the object are invalidated
|
|
# before any of the callbacks are invoked, so that we only
|
|
# have one invocation of _weakref.c:cleanup_helper() active
|
|
# for a particular object at a time.
|
|
#
|
|
def callback(object, self=self):
|
|
self.ref()
|
|
c = C()
|
|
self.ref = weakref.ref(c, callback)
|
|
ref1 = weakref.ref(c, callback)
|
|
del c
|
|
|
|
def test_proxy_ref(self):
|
|
o = C()
|
|
o.bar = 1
|
|
ref1 = weakref.proxy(o, self.callback)
|
|
ref2 = weakref.proxy(o, self.callback)
|
|
del o
|
|
|
|
def check(proxy):
|
|
proxy.bar
|
|
|
|
self.assertRaises(ReferenceError, check, ref1)
|
|
self.assertRaises(ReferenceError, check, ref2)
|
|
self.assertRaises(ReferenceError, bool, weakref.proxy(C()))
|
|
self.assert_(self.cbcalled == 2)
|
|
|
|
def check_basic_ref(self, factory):
|
|
o = factory()
|
|
ref = weakref.ref(o)
|
|
self.assert_(ref() is not None,
|
|
"weak reference to live object should be live")
|
|
o2 = ref()
|
|
self.assert_(o is o2,
|
|
"<ref>() should return original object if live")
|
|
|
|
def check_basic_callback(self, factory):
|
|
self.cbcalled = 0
|
|
o = factory()
|
|
ref = weakref.ref(o, self.callback)
|
|
del o
|
|
self.assert_(self.cbcalled == 1,
|
|
"callback did not properly set 'cbcalled'")
|
|
self.assert_(ref() is None,
|
|
"ref2 should be dead after deleting object reference")
|
|
|
|
def test_ref_reuse(self):
|
|
o = C()
|
|
ref1 = weakref.ref(o)
|
|
# create a proxy to make sure that there's an intervening creation
|
|
# between these two; it should make no difference
|
|
proxy = weakref.proxy(o)
|
|
ref2 = weakref.ref(o)
|
|
self.assert_(ref1 is ref2,
|
|
"reference object w/out callback should be re-used")
|
|
|
|
o = C()
|
|
proxy = weakref.proxy(o)
|
|
ref1 = weakref.ref(o)
|
|
ref2 = weakref.ref(o)
|
|
self.assert_(ref1 is ref2,
|
|
"reference object w/out callback should be re-used")
|
|
self.assert_(weakref.getweakrefcount(o) == 2,
|
|
"wrong weak ref count for object")
|
|
del proxy
|
|
self.assert_(weakref.getweakrefcount(o) == 1,
|
|
"wrong weak ref count for object after deleting proxy")
|
|
|
|
def test_proxy_reuse(self):
|
|
o = C()
|
|
proxy1 = weakref.proxy(o)
|
|
ref = weakref.ref(o)
|
|
proxy2 = weakref.proxy(o)
|
|
self.assert_(proxy1 is proxy2,
|
|
"proxy object w/out callback should have been re-used")
|
|
|
|
def test_basic_proxy(self):
|
|
o = C()
|
|
self.check_proxy(o, weakref.proxy(o))
|
|
|
|
L = UserList.UserList()
|
|
p = weakref.proxy(L)
|
|
self.failIf(p, "proxy for empty UserList should be false")
|
|
p.append(12)
|
|
self.assertEqual(len(L), 1)
|
|
self.failUnless(p, "proxy for non-empty UserList should be true")
|
|
p[:] = [2, 3]
|
|
self.assertEqual(len(L), 2)
|
|
self.assertEqual(len(p), 2)
|
|
self.failUnless(3 in p,
|
|
"proxy didn't support __contains__() properly")
|
|
p[1] = 5
|
|
self.assertEqual(L[1], 5)
|
|
self.assertEqual(p[1], 5)
|
|
L2 = UserList.UserList(L)
|
|
p2 = weakref.proxy(L2)
|
|
self.assertEqual(p, p2)
|
|
## self.assertEqual(repr(L2), repr(p2))
|
|
L3 = UserList.UserList(range(10))
|
|
p3 = weakref.proxy(L3)
|
|
self.assertEqual(L3[:], p3[:])
|
|
self.assertEqual(L3[5:], p3[5:])
|
|
self.assertEqual(L3[:5], p3[:5])
|
|
self.assertEqual(L3[2:5], p3[2:5])
|
|
|
|
# The PyWeakref_* C API is documented as allowing either NULL or
|
|
# None as the value for the callback, where either means "no
|
|
# callback". The "no callback" ref and proxy objects are supposed
|
|
# to be shared so long as they exist by all callers so long as
|
|
# they are active. In Python 2.3.3 and earlier, this guarantee
|
|
# was not honored, and was broken in different ways for
|
|
# PyWeakref_NewRef() and PyWeakref_NewProxy(). (Two tests.)
|
|
|
|
def test_shared_ref_without_callback(self):
|
|
self.check_shared_without_callback(weakref.ref)
|
|
|
|
def test_shared_proxy_without_callback(self):
|
|
self.check_shared_without_callback(weakref.proxy)
|
|
|
|
def check_shared_without_callback(self, makeref):
|
|
o = Object(1)
|
|
p1 = makeref(o, None)
|
|
p2 = makeref(o, None)
|
|
self.assert_(p1 is p2, "both callbacks were None in the C API")
|
|
del p1, p2
|
|
p1 = makeref(o)
|
|
p2 = makeref(o, None)
|
|
self.assert_(p1 is p2, "callbacks were NULL, None in the C API")
|
|
del p1, p2
|
|
p1 = makeref(o)
|
|
p2 = makeref(o)
|
|
self.assert_(p1 is p2, "both callbacks were NULL in the C API")
|
|
del p1, p2
|
|
p1 = makeref(o, None)
|
|
p2 = makeref(o)
|
|
self.assert_(p1 is p2, "callbacks were None, NULL in the C API")
|
|
|
|
def test_callable_proxy(self):
|
|
o = Callable()
|
|
ref1 = weakref.proxy(o)
|
|
|
|
self.check_proxy(o, ref1)
|
|
|
|
self.assert_(type(ref1) is weakref.CallableProxyType,
|
|
"proxy is not of callable type")
|
|
ref1('twinkies!')
|
|
self.assert_(o.bar == 'twinkies!',
|
|
"call through proxy not passed through to original")
|
|
ref1(x='Splat.')
|
|
self.assert_(o.bar == 'Splat.',
|
|
"call through proxy not passed through to original")
|
|
|
|
# expect due to too few args
|
|
self.assertRaises(TypeError, ref1)
|
|
|
|
# expect due to too many args
|
|
self.assertRaises(TypeError, ref1, 1, 2, 3)
|
|
|
|
def check_proxy(self, o, proxy):
|
|
o.foo = 1
|
|
self.assert_(proxy.foo == 1,
|
|
"proxy does not reflect attribute addition")
|
|
o.foo = 2
|
|
self.assert_(proxy.foo == 2,
|
|
"proxy does not reflect attribute modification")
|
|
del o.foo
|
|
self.assert_(not hasattr(proxy, 'foo'),
|
|
"proxy does not reflect attribute removal")
|
|
|
|
proxy.foo = 1
|
|
self.assert_(o.foo == 1,
|
|
"object does not reflect attribute addition via proxy")
|
|
proxy.foo = 2
|
|
self.assert_(
|
|
o.foo == 2,
|
|
"object does not reflect attribute modification via proxy")
|
|
del proxy.foo
|
|
self.assert_(not hasattr(o, 'foo'),
|
|
"object does not reflect attribute removal via proxy")
|
|
|
|
def test_proxy_deletion(self):
|
|
# Test clearing of SF bug #762891
|
|
class Foo:
|
|
result = None
|
|
def __delitem__(self, accessor):
|
|
self.result = accessor
|
|
g = Foo()
|
|
f = weakref.proxy(g)
|
|
del f[0]
|
|
self.assertEqual(f.result, 0)
|
|
|
|
def test_proxy_bool(self):
|
|
# Test clearing of SF bug #1170766
|
|
class List(list): pass
|
|
lyst = List()
|
|
self.assertEqual(bool(weakref.proxy(lyst)), bool(lyst))
|
|
|
|
def test_getweakrefcount(self):
|
|
o = C()
|
|
ref1 = weakref.ref(o)
|
|
ref2 = weakref.ref(o, self.callback)
|
|
self.assert_(weakref.getweakrefcount(o) == 2,
|
|
"got wrong number of weak reference objects")
|
|
|
|
proxy1 = weakref.proxy(o)
|
|
proxy2 = weakref.proxy(o, self.callback)
|
|
self.assert_(weakref.getweakrefcount(o) == 4,
|
|
"got wrong number of weak reference objects")
|
|
|
|
del ref1, ref2, proxy1, proxy2
|
|
self.assert_(weakref.getweakrefcount(o) == 0,
|
|
"weak reference objects not unlinked from"
|
|
" referent when discarded.")
|
|
|
|
# assumes ints do not support weakrefs
|
|
self.assert_(weakref.getweakrefcount(1) == 0,
|
|
"got wrong number of weak reference objects for int")
|
|
|
|
def test_getweakrefs(self):
|
|
o = C()
|
|
ref1 = weakref.ref(o, self.callback)
|
|
ref2 = weakref.ref(o, self.callback)
|
|
del ref1
|
|
self.assert_(weakref.getweakrefs(o) == [ref2],
|
|
"list of refs does not match")
|
|
|
|
o = C()
|
|
ref1 = weakref.ref(o, self.callback)
|
|
ref2 = weakref.ref(o, self.callback)
|
|
del ref2
|
|
self.assert_(weakref.getweakrefs(o) == [ref1],
|
|
"list of refs does not match")
|
|
|
|
del ref1
|
|
self.assert_(weakref.getweakrefs(o) == [],
|
|
"list of refs not cleared")
|
|
|
|
# assumes ints do not support weakrefs
|
|
self.assert_(weakref.getweakrefs(1) == [],
|
|
"list of refs does not match for int")
|
|
|
|
def test_newstyle_number_ops(self):
|
|
class F(float):
|
|
pass
|
|
f = F(2.0)
|
|
p = weakref.proxy(f)
|
|
self.assert_(p + 1.0 == 3.0)
|
|
self.assert_(1.0 + p == 3.0) # this used to SEGV
|
|
|
|
def test_callbacks_protected(self):
|
|
# Callbacks protected from already-set exceptions?
|
|
# Regression test for SF bug #478534.
|
|
class BogusError(Exception):
|
|
pass
|
|
data = {}
|
|
def remove(k):
|
|
del data[k]
|
|
def encapsulate():
|
|
f = lambda : ()
|
|
data[weakref.ref(f, remove)] = None
|
|
raise BogusError
|
|
try:
|
|
encapsulate()
|
|
except BogusError:
|
|
pass
|
|
else:
|
|
self.fail("exception not properly restored")
|
|
try:
|
|
encapsulate()
|
|
except BogusError:
|
|
pass
|
|
else:
|
|
self.fail("exception not properly restored")
|
|
|
|
def test_sf_bug_840829(self):
|
|
# "weakref callbacks and gc corrupt memory"
|
|
# subtype_dealloc erroneously exposed a new-style instance
|
|
# already in the process of getting deallocated to gc,
|
|
# causing double-deallocation if the instance had a weakref
|
|
# callback that triggered gc.
|
|
# If the bug exists, there probably won't be an obvious symptom
|
|
# in a release build. In a debug build, a segfault will occur
|
|
# when the second attempt to remove the instance from the "list
|
|
# of all objects" occurs.
|
|
|
|
import gc
|
|
|
|
class C(object):
|
|
pass
|
|
|
|
c = C()
|
|
wr = weakref.ref(c, lambda ignore: gc.collect())
|
|
del c
|
|
|
|
# There endeth the first part. It gets worse.
|
|
del wr
|
|
|
|
c1 = C()
|
|
c1.i = C()
|
|
wr = weakref.ref(c1.i, lambda ignore: gc.collect())
|
|
|
|
c2 = C()
|
|
c2.c1 = c1
|
|
del c1 # still alive because c2 points to it
|
|
|
|
# Now when subtype_dealloc gets called on c2, it's not enough just
|
|
# that c2 is immune from gc while the weakref callbacks associated
|
|
# with c2 execute (there are none in this 2nd half of the test, btw).
|
|
# subtype_dealloc goes on to call the base classes' deallocs too,
|
|
# so any gc triggered by weakref callbacks associated with anything
|
|
# torn down by a base class dealloc can also trigger double
|
|
# deallocation of c2.
|
|
del c2
|
|
|
|
def test_callback_in_cycle_1(self):
|
|
import gc
|
|
|
|
class J(object):
|
|
pass
|
|
|
|
class II(object):
|
|
def acallback(self, ignore):
|
|
self.J
|
|
|
|
I = II()
|
|
I.J = J
|
|
I.wr = weakref.ref(J, I.acallback)
|
|
|
|
# Now J and II are each in a self-cycle (as all new-style class
|
|
# objects are, since their __mro__ points back to them). I holds
|
|
# both a weak reference (I.wr) and a strong reference (I.J) to class
|
|
# J. I is also in a cycle (I.wr points to a weakref that references
|
|
# I.acallback). When we del these three, they all become trash, but
|
|
# the cycles prevent any of them from getting cleaned up immediately.
|
|
# Instead they have to wait for cyclic gc to deduce that they're
|
|
# trash.
|
|
#
|
|
# gc used to call tp_clear on all of them, and the order in which
|
|
# it does that is pretty accidental. The exact order in which we
|
|
# built up these things manages to provoke gc into running tp_clear
|
|
# in just the right order (I last). Calling tp_clear on II leaves
|
|
# behind an insane class object (its __mro__ becomes NULL). Calling
|
|
# tp_clear on J breaks its self-cycle, but J doesn't get deleted
|
|
# just then because of the strong reference from I.J. Calling
|
|
# tp_clear on I starts to clear I's __dict__, and just happens to
|
|
# clear I.J first -- I.wr is still intact. That removes the last
|
|
# reference to J, which triggers the weakref callback. The callback
|
|
# tries to do "self.J", and instances of new-style classes look up
|
|
# attributes ("J") in the class dict first. The class (II) wants to
|
|
# search II.__mro__, but that's NULL. The result was a segfault in
|
|
# a release build, and an assert failure in a debug build.
|
|
del I, J, II
|
|
gc.collect()
|
|
|
|
def test_callback_in_cycle_2(self):
|
|
import gc
|
|
|
|
# This is just like test_callback_in_cycle_1, except that II is an
|
|
# old-style class. The symptom is different then: an instance of an
|
|
# old-style class looks in its own __dict__ first. 'J' happens to
|
|
# get cleared from I.__dict__ before 'wr', and 'J' was never in II's
|
|
# __dict__, so the attribute isn't found. The difference is that
|
|
# the old-style II doesn't have a NULL __mro__ (it doesn't have any
|
|
# __mro__), so no segfault occurs. Instead it got:
|
|
# test_callback_in_cycle_2 (__main__.ReferencesTestCase) ...
|
|
# Exception exceptions.AttributeError:
|
|
# "II instance has no attribute 'J'" in <bound method II.acallback
|
|
# of <?.II instance at 0x00B9B4B8>> ignored
|
|
|
|
class J(object):
|
|
pass
|
|
|
|
class II:
|
|
def acallback(self, ignore):
|
|
self.J
|
|
|
|
I = II()
|
|
I.J = J
|
|
I.wr = weakref.ref(J, I.acallback)
|
|
|
|
del I, J, II
|
|
gc.collect()
|
|
|
|
def test_callback_in_cycle_3(self):
|
|
import gc
|
|
|
|
# This one broke the first patch that fixed the last two. In this
|
|
# case, the objects reachable from the callback aren't also reachable
|
|
# from the object (c1) *triggering* the callback: you can get to
|
|
# c1 from c2, but not vice-versa. The result was that c2's __dict__
|
|
# got tp_clear'ed by the time the c2.cb callback got invoked.
|
|
|
|
class C:
|
|
def cb(self, ignore):
|
|
self.me
|
|
self.c1
|
|
self.wr
|
|
|
|
c1, c2 = C(), C()
|
|
|
|
c2.me = c2
|
|
c2.c1 = c1
|
|
c2.wr = weakref.ref(c1, c2.cb)
|
|
|
|
del c1, c2
|
|
gc.collect()
|
|
|
|
def test_callback_in_cycle_4(self):
|
|
import gc
|
|
|
|
# Like test_callback_in_cycle_3, except c2 and c1 have different
|
|
# classes. c2's class (C) isn't reachable from c1 then, so protecting
|
|
# objects reachable from the dying object (c1) isn't enough to stop
|
|
# c2's class (C) from getting tp_clear'ed before c2.cb is invoked.
|
|
# The result was a segfault (C.__mro__ was NULL when the callback
|
|
# tried to look up self.me).
|
|
|
|
class C(object):
|
|
def cb(self, ignore):
|
|
self.me
|
|
self.c1
|
|
self.wr
|
|
|
|
class D:
|
|
pass
|
|
|
|
c1, c2 = D(), C()
|
|
|
|
c2.me = c2
|
|
c2.c1 = c1
|
|
c2.wr = weakref.ref(c1, c2.cb)
|
|
|
|
del c1, c2, C, D
|
|
gc.collect()
|
|
|
|
def test_callback_in_cycle_resurrection(self):
|
|
import gc
|
|
|
|
# Do something nasty in a weakref callback: resurrect objects
|
|
# from dead cycles. For this to be attempted, the weakref and
|
|
# its callback must also be part of the cyclic trash (else the
|
|
# objects reachable via the callback couldn't be in cyclic trash
|
|
# to begin with -- the callback would act like an external root).
|
|
# But gc clears trash weakrefs with callbacks early now, which
|
|
# disables the callbacks, so the callbacks shouldn't get called
|
|
# at all (and so nothing actually gets resurrected).
|
|
|
|
alist = []
|
|
class C(object):
|
|
def __init__(self, value):
|
|
self.attribute = value
|
|
|
|
def acallback(self, ignore):
|
|
alist.append(self.c)
|
|
|
|
c1, c2 = C(1), C(2)
|
|
c1.c = c2
|
|
c2.c = c1
|
|
c1.wr = weakref.ref(c2, c1.acallback)
|
|
c2.wr = weakref.ref(c1, c2.acallback)
|
|
|
|
def C_went_away(ignore):
|
|
alist.append("C went away")
|
|
wr = weakref.ref(C, C_went_away)
|
|
|
|
del c1, c2, C # make them all trash
|
|
self.assertEqual(alist, []) # del isn't enough to reclaim anything
|
|
|
|
gc.collect()
|
|
# c1.wr and c2.wr were part of the cyclic trash, so should have
|
|
# been cleared without their callbacks executing. OTOH, the weakref
|
|
# to C is bound to a function local (wr), and wasn't trash, so that
|
|
# callback should have been invoked when C went away.
|
|
self.assertEqual(alist, ["C went away"])
|
|
# The remaining weakref should be dead now (its callback ran).
|
|
self.assertEqual(wr(), None)
|
|
|
|
del alist[:]
|
|
gc.collect()
|
|
self.assertEqual(alist, [])
|
|
|
|
def test_callbacks_on_callback(self):
|
|
import gc
|
|
|
|
# Set up weakref callbacks *on* weakref callbacks.
|
|
alist = []
|
|
def safe_callback(ignore):
|
|
alist.append("safe_callback called")
|
|
|
|
class C(object):
|
|
def cb(self, ignore):
|
|
alist.append("cb called")
|
|
|
|
c, d = C(), C()
|
|
c.other = d
|
|
d.other = c
|
|
callback = c.cb
|
|
c.wr = weakref.ref(d, callback) # this won't trigger
|
|
d.wr = weakref.ref(callback, d.cb) # ditto
|
|
external_wr = weakref.ref(callback, safe_callback) # but this will
|
|
self.assert_(external_wr() is callback)
|
|
|
|
# The weakrefs attached to c and d should get cleared, so that
|
|
# C.cb is never called. But external_wr isn't part of the cyclic
|
|
# trash, and no cyclic trash is reachable from it, so safe_callback
|
|
# should get invoked when the bound method object callback (c.cb)
|
|
# -- which is itself a callback, and also part of the cyclic trash --
|
|
# gets reclaimed at the end of gc.
|
|
|
|
del callback, c, d, C
|
|
self.assertEqual(alist, []) # del isn't enough to clean up cycles
|
|
gc.collect()
|
|
self.assertEqual(alist, ["safe_callback called"])
|
|
self.assertEqual(external_wr(), None)
|
|
|
|
del alist[:]
|
|
gc.collect()
|
|
self.assertEqual(alist, [])
|
|
|
|
def test_gc_during_ref_creation(self):
|
|
self.check_gc_during_creation(weakref.ref)
|
|
|
|
def test_gc_during_proxy_creation(self):
|
|
self.check_gc_during_creation(weakref.proxy)
|
|
|
|
def check_gc_during_creation(self, makeref):
|
|
thresholds = gc.get_threshold()
|
|
gc.set_threshold(1, 1, 1)
|
|
gc.collect()
|
|
class A:
|
|
pass
|
|
|
|
def callback(*args):
|
|
pass
|
|
|
|
referenced = A()
|
|
|
|
a = A()
|
|
a.a = a
|
|
a.wr = makeref(referenced)
|
|
|
|
try:
|
|
# now make sure the object and the ref get labeled as
|
|
# cyclic trash:
|
|
a = A()
|
|
weakref.ref(referenced, callback)
|
|
|
|
finally:
|
|
gc.set_threshold(*thresholds)
|
|
|
|
def test_ref_created_during_del(self):
|
|
# Bug #1377858
|
|
# A weakref created in an object's __del__() would crash the
|
|
# interpreter when the weakref was cleaned up since it would refer to
|
|
# non-existent memory. This test should not segfault the interpreter.
|
|
class Target(object):
|
|
def __del__(self):
|
|
global ref_from_del
|
|
ref_from_del = weakref.ref(self)
|
|
|
|
w = Target()
|
|
|
|
|
|
class SubclassableWeakrefTestCase(unittest.TestCase):
|
|
|
|
def test_subclass_refs(self):
|
|
class MyRef(weakref.ref):
|
|
def __init__(self, ob, callback=None, value=42):
|
|
self.value = value
|
|
super().__init__(ob, callback)
|
|
def __call__(self):
|
|
self.called = True
|
|
return super().__call__()
|
|
o = Object("foo")
|
|
mr = MyRef(o, value=24)
|
|
self.assert_(mr() is o)
|
|
self.assert_(mr.called)
|
|
self.assertEqual(mr.value, 24)
|
|
del o
|
|
self.assert_(mr() is None)
|
|
self.assert_(mr.called)
|
|
|
|
def test_subclass_refs_dont_replace_standard_refs(self):
|
|
class MyRef(weakref.ref):
|
|
pass
|
|
o = Object(42)
|
|
r1 = MyRef(o)
|
|
r2 = weakref.ref(o)
|
|
self.assert_(r1 is not r2)
|
|
self.assertEqual(weakref.getweakrefs(o), [r2, r1])
|
|
self.assertEqual(weakref.getweakrefcount(o), 2)
|
|
r3 = MyRef(o)
|
|
self.assertEqual(weakref.getweakrefcount(o), 3)
|
|
refs = weakref.getweakrefs(o)
|
|
self.assertEqual(len(refs), 3)
|
|
self.assert_(r2 is refs[0])
|
|
self.assert_(r1 in refs[1:])
|
|
self.assert_(r3 in refs[1:])
|
|
|
|
def test_subclass_refs_dont_conflate_callbacks(self):
|
|
class MyRef(weakref.ref):
|
|
pass
|
|
o = Object(42)
|
|
r1 = MyRef(o, id)
|
|
r2 = MyRef(o, str)
|
|
self.assert_(r1 is not r2)
|
|
refs = weakref.getweakrefs(o)
|
|
self.assert_(r1 in refs)
|
|
self.assert_(r2 in refs)
|
|
|
|
def test_subclass_refs_with_slots(self):
|
|
class MyRef(weakref.ref):
|
|
__slots__ = "slot1", "slot2"
|
|
def __new__(type, ob, callback, slot1, slot2):
|
|
return weakref.ref.__new__(type, ob, callback)
|
|
def __init__(self, ob, callback, slot1, slot2):
|
|
self.slot1 = slot1
|
|
self.slot2 = slot2
|
|
def meth(self):
|
|
return self.slot1 + self.slot2
|
|
o = Object(42)
|
|
r = MyRef(o, None, "abc", "def")
|
|
self.assertEqual(r.slot1, "abc")
|
|
self.assertEqual(r.slot2, "def")
|
|
self.assertEqual(r.meth(), "abcdef")
|
|
self.failIf(hasattr(r, "__dict__"))
|
|
|
|
|
|
class Object:
|
|
def __init__(self, arg):
|
|
self.arg = arg
|
|
def __repr__(self):
|
|
return "<Object %r>" % self.arg
|
|
def __lt__(self, other):
|
|
if isinstance(other, Object):
|
|
return self.arg < other.arg
|
|
return NotImplemented
|
|
def __hash__(self):
|
|
return hash(self.arg)
|
|
|
|
|
|
class MappingTestCase(TestBase):
|
|
|
|
COUNT = 10
|
|
|
|
def test_weak_values(self):
|
|
#
|
|
# This exercises d.copy(), d.items(), d[], del d[], len(d).
|
|
#
|
|
dict, objects = self.make_weak_valued_dict()
|
|
for o in objects:
|
|
self.assertEqual(weakref.getweakrefcount(o), 1)
|
|
self.assert_(o is dict[o.arg],
|
|
"wrong object returned by weak dict!")
|
|
items1 = dict.items()
|
|
items2 = dict.copy().items()
|
|
items1.sort()
|
|
items2.sort()
|
|
self.assert_(items1 == items2,
|
|
"cloning of weak-valued dictionary did not work!")
|
|
del items1, items2
|
|
self.assert_(len(dict) == self.COUNT)
|
|
del objects[0]
|
|
self.assert_(len(dict) == (self.COUNT - 1),
|
|
"deleting object did not cause dictionary update")
|
|
del objects, o
|
|
self.assert_(len(dict) == 0,
|
|
"deleting the values did not clear the dictionary")
|
|
# regression on SF bug #447152:
|
|
dict = weakref.WeakValueDictionary()
|
|
self.assertRaises(KeyError, dict.__getitem__, 1)
|
|
dict[2] = C()
|
|
self.assertRaises(KeyError, dict.__getitem__, 2)
|
|
|
|
def test_weak_keys(self):
|
|
#
|
|
# This exercises d.copy(), d.items(), d[] = v, d[], del d[],
|
|
# len(d), k in d.
|
|
#
|
|
dict, objects = self.make_weak_keyed_dict()
|
|
for o in objects:
|
|
self.assert_(weakref.getweakrefcount(o) == 1,
|
|
"wrong number of weak references to %r!" % o)
|
|
self.assert_(o.arg is dict[o],
|
|
"wrong object returned by weak dict!")
|
|
items1 = dict.items()
|
|
items2 = dict.copy().items()
|
|
self.assert_(set(items1) == set(items2),
|
|
"cloning of weak-keyed dictionary did not work!")
|
|
del items1, items2
|
|
self.assert_(len(dict) == self.COUNT)
|
|
del objects[0]
|
|
self.assert_(len(dict) == (self.COUNT - 1),
|
|
"deleting object did not cause dictionary update")
|
|
del objects, o
|
|
self.assert_(len(dict) == 0,
|
|
"deleting the keys did not clear the dictionary")
|
|
o = Object(42)
|
|
dict[o] = "What is the meaning of the universe?"
|
|
self.assert_(o in dict)
|
|
self.assert_(34 not in dict)
|
|
|
|
def test_weak_keyed_iters(self):
|
|
dict, objects = self.make_weak_keyed_dict()
|
|
self.check_iters(dict)
|
|
|
|
# Test keyrefs()
|
|
refs = dict.keyrefs()
|
|
self.assertEqual(len(refs), len(objects))
|
|
objects2 = list(objects)
|
|
for wr in refs:
|
|
ob = wr()
|
|
self.assert_(ob in dict)
|
|
self.assert_(ob in dict)
|
|
self.assertEqual(ob.arg, dict[ob])
|
|
objects2.remove(ob)
|
|
self.assertEqual(len(objects2), 0)
|
|
|
|
# Test iterkeyrefs()
|
|
objects2 = list(objects)
|
|
self.assertEqual(len(list(dict.iterkeyrefs())), len(objects))
|
|
for wr in dict.iterkeyrefs():
|
|
ob = wr()
|
|
self.assert_(ob in dict)
|
|
self.assert_(ob in dict)
|
|
self.assertEqual(ob.arg, dict[ob])
|
|
objects2.remove(ob)
|
|
self.assertEqual(len(objects2), 0)
|
|
|
|
def test_weak_valued_iters(self):
|
|
dict, objects = self.make_weak_valued_dict()
|
|
self.check_iters(dict)
|
|
|
|
# Test valuerefs()
|
|
refs = dict.valuerefs()
|
|
self.assertEqual(len(refs), len(objects))
|
|
objects2 = list(objects)
|
|
for wr in refs:
|
|
ob = wr()
|
|
self.assertEqual(ob, dict[ob.arg])
|
|
self.assertEqual(ob.arg, dict[ob.arg].arg)
|
|
objects2.remove(ob)
|
|
self.assertEqual(len(objects2), 0)
|
|
|
|
# Test itervaluerefs()
|
|
objects2 = list(objects)
|
|
self.assertEqual(len(list(dict.itervaluerefs())), len(objects))
|
|
for wr in dict.itervaluerefs():
|
|
ob = wr()
|
|
self.assertEqual(ob, dict[ob.arg])
|
|
self.assertEqual(ob.arg, dict[ob.arg].arg)
|
|
objects2.remove(ob)
|
|
self.assertEqual(len(objects2), 0)
|
|
|
|
def check_iters(self, dict):
|
|
# item iterator:
|
|
items = dict.items()
|
|
for item in dict.items():
|
|
items.remove(item)
|
|
self.assert_(len(items) == 0, "items() did not touch all items")
|
|
|
|
# key iterator, via __iter__():
|
|
keys = list(dict.keys())
|
|
for k in dict:
|
|
keys.remove(k)
|
|
self.assert_(len(keys) == 0, "__iter__() did not touch all keys")
|
|
|
|
# key iterator, via iterkeys():
|
|
keys = list(dict.keys())
|
|
for k in dict.keys():
|
|
keys.remove(k)
|
|
self.assert_(len(keys) == 0, "iterkeys() did not touch all keys")
|
|
|
|
# value iterator:
|
|
values = list(dict.values())
|
|
for v in dict.values():
|
|
values.remove(v)
|
|
self.assert_(len(values) == 0,
|
|
"itervalues() did not touch all values")
|
|
|
|
def test_make_weak_keyed_dict_from_dict(self):
|
|
o = Object(3)
|
|
dict = weakref.WeakKeyDictionary({o:364})
|
|
self.assert_(dict[o] == 364)
|
|
|
|
def test_make_weak_keyed_dict_from_weak_keyed_dict(self):
|
|
o = Object(3)
|
|
dict = weakref.WeakKeyDictionary({o:364})
|
|
dict2 = weakref.WeakKeyDictionary(dict)
|
|
self.assert_(dict[o] == 364)
|
|
|
|
def make_weak_keyed_dict(self):
|
|
dict = weakref.WeakKeyDictionary()
|
|
objects = map(Object, range(self.COUNT))
|
|
for o in objects:
|
|
dict[o] = o.arg
|
|
return dict, objects
|
|
|
|
def make_weak_valued_dict(self):
|
|
dict = weakref.WeakValueDictionary()
|
|
objects = map(Object, range(self.COUNT))
|
|
for o in objects:
|
|
dict[o.arg] = o
|
|
return dict, objects
|
|
|
|
def check_popitem(self, klass, key1, value1, key2, value2):
|
|
weakdict = klass()
|
|
weakdict[key1] = value1
|
|
weakdict[key2] = value2
|
|
self.assert_(len(weakdict) == 2)
|
|
k, v = weakdict.popitem()
|
|
self.assert_(len(weakdict) == 1)
|
|
if k is key1:
|
|
self.assert_(v is value1)
|
|
else:
|
|
self.assert_(v is value2)
|
|
k, v = weakdict.popitem()
|
|
self.assert_(len(weakdict) == 0)
|
|
if k is key1:
|
|
self.assert_(v is value1)
|
|
else:
|
|
self.assert_(v is value2)
|
|
|
|
def test_weak_valued_dict_popitem(self):
|
|
self.check_popitem(weakref.WeakValueDictionary,
|
|
"key1", C(), "key2", C())
|
|
|
|
def test_weak_keyed_dict_popitem(self):
|
|
self.check_popitem(weakref.WeakKeyDictionary,
|
|
C(), "value 1", C(), "value 2")
|
|
|
|
def check_setdefault(self, klass, key, value1, value2):
|
|
self.assert_(value1 is not value2,
|
|
"invalid test"
|
|
" -- value parameters must be distinct objects")
|
|
weakdict = klass()
|
|
o = weakdict.setdefault(key, value1)
|
|
self.assert_(o is value1)
|
|
self.assert_(key in weakdict)
|
|
self.assert_(weakdict.get(key) is value1)
|
|
self.assert_(weakdict[key] is value1)
|
|
|
|
o = weakdict.setdefault(key, value2)
|
|
self.assert_(o is value1)
|
|
self.assert_(key in weakdict)
|
|
self.assert_(weakdict.get(key) is value1)
|
|
self.assert_(weakdict[key] is value1)
|
|
|
|
def test_weak_valued_dict_setdefault(self):
|
|
self.check_setdefault(weakref.WeakValueDictionary,
|
|
"key", C(), C())
|
|
|
|
def test_weak_keyed_dict_setdefault(self):
|
|
self.check_setdefault(weakref.WeakKeyDictionary,
|
|
C(), "value 1", "value 2")
|
|
|
|
def check_update(self, klass, dict):
|
|
#
|
|
# This exercises d.update(), len(d), d.keys(), k in d,
|
|
# d.get(), d[].
|
|
#
|
|
weakdict = klass()
|
|
weakdict.update(dict)
|
|
self.assert_(len(weakdict) == len(dict))
|
|
for k in weakdict.keys():
|
|
self.assert_(k in dict,
|
|
"mysterious new key appeared in weak dict")
|
|
v = dict.get(k)
|
|
self.assert_(v is weakdict[k])
|
|
self.assert_(v is weakdict.get(k))
|
|
for k in dict.keys():
|
|
self.assert_(k in weakdict,
|
|
"original key disappeared in weak dict")
|
|
v = dict[k]
|
|
self.assert_(v is weakdict[k])
|
|
self.assert_(v is weakdict.get(k))
|
|
|
|
def test_weak_valued_dict_update(self):
|
|
self.check_update(weakref.WeakValueDictionary,
|
|
{1: C(), 'a': C(), C(): C()})
|
|
|
|
def test_weak_keyed_dict_update(self):
|
|
self.check_update(weakref.WeakKeyDictionary,
|
|
{C(): 1, C(): 2, C(): 3})
|
|
|
|
def test_weak_keyed_delitem(self):
|
|
d = weakref.WeakKeyDictionary()
|
|
o1 = Object('1')
|
|
o2 = Object('2')
|
|
d[o1] = 'something'
|
|
d[o2] = 'something'
|
|
self.assert_(len(d) == 2)
|
|
del d[o1]
|
|
self.assert_(len(d) == 1)
|
|
self.assert_(d.keys() == [o2])
|
|
|
|
def test_weak_valued_delitem(self):
|
|
d = weakref.WeakValueDictionary()
|
|
o1 = Object('1')
|
|
o2 = Object('2')
|
|
d['something'] = o1
|
|
d['something else'] = o2
|
|
self.assert_(len(d) == 2)
|
|
del d['something']
|
|
self.assert_(len(d) == 1)
|
|
self.assert_(d.items() == [('something else', o2)])
|
|
|
|
def test_weak_keyed_bad_delitem(self):
|
|
d = weakref.WeakKeyDictionary()
|
|
o = Object('1')
|
|
# An attempt to delete an object that isn't there should raise
|
|
# KeyError. It didn't before 2.3.
|
|
self.assertRaises(KeyError, d.__delitem__, o)
|
|
self.assertRaises(KeyError, d.__getitem__, o)
|
|
|
|
# If a key isn't of a weakly referencable type, __getitem__ and
|
|
# __setitem__ raise TypeError. __delitem__ should too.
|
|
self.assertRaises(TypeError, d.__delitem__, 13)
|
|
self.assertRaises(TypeError, d.__getitem__, 13)
|
|
self.assertRaises(TypeError, d.__setitem__, 13, 13)
|
|
|
|
def test_weak_keyed_cascading_deletes(self):
|
|
# SF bug 742860. For some reason, before 2.3 __delitem__ iterated
|
|
# over the keys via self.data.iterkeys(). If things vanished from
|
|
# the dict during this (or got added), that caused a RuntimeError.
|
|
|
|
d = weakref.WeakKeyDictionary()
|
|
mutate = False
|
|
|
|
class C(object):
|
|
def __init__(self, i):
|
|
self.value = i
|
|
def __hash__(self):
|
|
return hash(self.value)
|
|
def __eq__(self, other):
|
|
if mutate:
|
|
# Side effect that mutates the dict, by removing the
|
|
# last strong reference to a key.
|
|
del objs[-1]
|
|
return self.value == other.value
|
|
|
|
objs = [C(i) for i in range(4)]
|
|
for o in objs:
|
|
d[o] = o.value
|
|
del o # now the only strong references to keys are in objs
|
|
# Find the order in which iterkeys sees the keys.
|
|
objs = d.keys()
|
|
# Reverse it, so that the iteration implementation of __delitem__
|
|
# has to keep looping to find the first object we delete.
|
|
objs.reverse()
|
|
|
|
# Turn on mutation in C.__eq__. The first time thru the loop,
|
|
# under the iterkeys() business the first comparison will delete
|
|
# the last item iterkeys() would see, and that causes a
|
|
# RuntimeError: dictionary changed size during iteration
|
|
# when the iterkeys() loop goes around to try comparing the next
|
|
# key. After this was fixed, it just deletes the last object *our*
|
|
# "for o in obj" loop would have gotten to.
|
|
mutate = True
|
|
count = 0
|
|
for o in objs:
|
|
count += 1
|
|
del d[o]
|
|
self.assertEqual(len(d), 0)
|
|
self.assertEqual(count, 2)
|
|
|
|
from test import mapping_tests
|
|
|
|
class WeakValueDictionaryTestCase(mapping_tests.BasicTestMappingProtocol):
|
|
"""Check that WeakValueDictionary conforms to the mapping protocol"""
|
|
__ref = {"key1":Object(1), "key2":Object(2), "key3":Object(3)}
|
|
type2test = weakref.WeakValueDictionary
|
|
def _reference(self):
|
|
return self.__ref.copy()
|
|
|
|
class WeakKeyDictionaryTestCase(mapping_tests.BasicTestMappingProtocol):
|
|
"""Check that WeakKeyDictionary conforms to the mapping protocol"""
|
|
__ref = {Object("key1"):1, Object("key2"):2, Object("key3"):3}
|
|
type2test = weakref.WeakKeyDictionary
|
|
def _reference(self):
|
|
return self.__ref.copy()
|
|
|
|
libreftest = """ Doctest for examples in the library reference: libweakref.tex
|
|
|
|
>>> import weakref
|
|
>>> class Dict(dict):
|
|
... pass
|
|
...
|
|
>>> obj = Dict(red=1, green=2, blue=3) # this object is weak referencable
|
|
>>> r = weakref.ref(obj)
|
|
>>> print(r() is obj)
|
|
True
|
|
|
|
>>> import weakref
|
|
>>> class Object:
|
|
... pass
|
|
...
|
|
>>> o = Object()
|
|
>>> r = weakref.ref(o)
|
|
>>> o2 = r()
|
|
>>> o is o2
|
|
True
|
|
>>> del o, o2
|
|
>>> print(r())
|
|
None
|
|
|
|
>>> import weakref
|
|
>>> class ExtendedRef(weakref.ref):
|
|
... def __init__(self, ob, callback=None, **annotations):
|
|
... super().__init__(ob, callback)
|
|
... self.__counter = 0
|
|
... for k, v in annotations.items():
|
|
... setattr(self, k, v)
|
|
... def __call__(self):
|
|
... '''Return a pair containing the referent and the number of
|
|
... times the reference has been called.
|
|
... '''
|
|
... ob = super().__call__()
|
|
... if ob is not None:
|
|
... self.__counter += 1
|
|
... ob = (ob, self.__counter)
|
|
... return ob
|
|
...
|
|
>>> class A: # not in docs from here, just testing the ExtendedRef
|
|
... pass
|
|
...
|
|
>>> a = A()
|
|
>>> r = ExtendedRef(a, foo=1, bar="baz")
|
|
>>> r.foo
|
|
1
|
|
>>> r.bar
|
|
'baz'
|
|
>>> r()[1]
|
|
1
|
|
>>> r()[1]
|
|
2
|
|
>>> r()[0] is a
|
|
True
|
|
|
|
|
|
>>> import weakref
|
|
>>> _id2obj_dict = weakref.WeakValueDictionary()
|
|
>>> def remember(obj):
|
|
... oid = id(obj)
|
|
... _id2obj_dict[oid] = obj
|
|
... return oid
|
|
...
|
|
>>> def id2obj(oid):
|
|
... return _id2obj_dict[oid]
|
|
...
|
|
>>> a = A() # from here, just testing
|
|
>>> a_id = remember(a)
|
|
>>> id2obj(a_id) is a
|
|
True
|
|
>>> del a
|
|
>>> try:
|
|
... id2obj(a_id)
|
|
... except KeyError:
|
|
... print('OK')
|
|
... else:
|
|
... print('WeakValueDictionary error')
|
|
OK
|
|
|
|
"""
|
|
|
|
__test__ = {'libreftest' : libreftest}
|
|
|
|
def test_main():
|
|
test_support.run_unittest(
|
|
ReferencesTestCase,
|
|
MappingTestCase,
|
|
WeakValueDictionaryTestCase,
|
|
WeakKeyDictionaryTestCase,
|
|
)
|
|
test_support.run_doctest(sys.modules[__name__])
|
|
|
|
|
|
if __name__ == "__main__":
|
|
test_main()
|