2025-03-27 19:06:52 +00:00
|
|
|
import contextlib
|
2017-08-10 16:01:47 +02:00
|
|
|
import errno
|
2012-04-14 14:10:13 -04:00
|
|
|
import importlib
|
2017-08-10 16:01:47 +02:00
|
|
|
import io
|
2025-03-27 19:06:52 +00:00
|
|
|
import logging
|
2017-08-10 16:01:47 +02:00
|
|
|
import os
|
2013-07-28 22:11:50 +10:00
|
|
|
import shutil
|
2024-07-02 10:34:13 +02:00
|
|
|
import signal
|
2017-08-10 16:01:47 +02:00
|
|
|
import socket
|
2016-11-20 16:16:06 +02:00
|
|
|
import stat
|
2017-11-20 15:24:56 -08:00
|
|
|
import subprocess
|
2011-07-23 08:48:53 +03:00
|
|
|
import sys
|
2023-09-27 10:18:39 +02:00
|
|
|
import sysconfig
|
2011-07-23 08:48:53 +03:00
|
|
|
import tempfile
|
2018-02-23 02:37:38 +01:00
|
|
|
import textwrap
|
2017-08-10 16:01:47 +02:00
|
|
|
import unittest
|
2021-08-16 20:13:51 +02:00
|
|
|
import warnings
|
|
|
|
|
2011-07-23 08:48:53 +03:00
|
|
|
from test import support
|
2020-06-30 21:46:31 +08:00
|
|
|
from test.support import import_helper
|
|
|
|
from test.support import os_helper
|
2018-02-23 02:37:38 +01:00
|
|
|
from test.support import script_helper
|
2020-04-25 10:06:29 +03:00
|
|
|
from test.support import socket_helper
|
2020-06-30 21:46:31 +08:00
|
|
|
from test.support import warnings_helper
|
2011-07-23 08:48:53 +03:00
|
|
|
|
2020-06-30 21:46:31 +08:00
|
|
|
TESTFN = os_helper.TESTFN
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
|
2025-03-27 19:06:52 +00:00
|
|
|
class LogCaptureHandler(logging.StreamHandler):
|
|
|
|
# Inspired by pytest's caplog
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__(io.StringIO())
|
|
|
|
self.records = []
|
|
|
|
|
|
|
|
def emit(self, record) -> None:
|
|
|
|
self.records.append(record)
|
|
|
|
super().emit(record)
|
|
|
|
|
|
|
|
def handleError(self, record):
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def _caplog():
|
|
|
|
handler = LogCaptureHandler()
|
|
|
|
root_logger = logging.getLogger()
|
|
|
|
root_logger.addHandler(handler)
|
|
|
|
try:
|
|
|
|
yield handler
|
|
|
|
finally:
|
|
|
|
root_logger.removeHandler(handler)
|
|
|
|
|
|
|
|
|
2011-07-23 08:48:53 +03:00
|
|
|
class TestSupport(unittest.TestCase):
|
2021-08-16 20:13:51 +02:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2025-04-09 16:18:54 -07:00
|
|
|
orig_filter_len = len(warnings._get_filters())
|
2021-08-16 20:13:51 +02:00
|
|
|
cls._warnings_helper_token = support.ignore_deprecations_from(
|
|
|
|
"test.support.warnings_helper", like=".*used in test_support.*"
|
|
|
|
)
|
|
|
|
cls._test_support_token = support.ignore_deprecations_from(
|
2023-05-11 17:44:39 +03:00
|
|
|
__name__, like=".*You should NOT be seeing this.*"
|
2021-08-16 20:13:51 +02:00
|
|
|
)
|
2025-04-09 16:18:54 -07:00
|
|
|
assert len(warnings._get_filters()) == orig_filter_len + 2
|
2021-08-16 20:13:51 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
2025-04-09 16:18:54 -07:00
|
|
|
orig_filter_len = len(warnings._get_filters())
|
2021-08-16 20:13:51 +02:00
|
|
|
support.clear_ignored_deprecations(
|
|
|
|
cls._warnings_helper_token,
|
|
|
|
cls._test_support_token,
|
|
|
|
)
|
2025-04-09 16:18:54 -07:00
|
|
|
assert len(warnings._get_filters()) == orig_filter_len - 2
|
2021-08-16 20:13:51 +02:00
|
|
|
|
|
|
|
def test_ignored_deprecations_are_silent(self):
|
|
|
|
"""Test support.ignore_deprecations_from() silences warnings"""
|
|
|
|
with warnings.catch_warnings(record=True) as warning_objs:
|
|
|
|
warnings_helper._warn_about_deprecation()
|
|
|
|
warnings.warn("You should NOT be seeing this.", DeprecationWarning)
|
|
|
|
messages = [str(w.message) for w in warning_objs]
|
|
|
|
self.assertEqual(len(messages), 0, messages)
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_import_module(self):
|
2020-06-30 21:46:31 +08:00
|
|
|
import_helper.import_module("ftplib")
|
|
|
|
self.assertRaises(unittest.SkipTest,
|
|
|
|
import_helper.import_module, "foo")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_import_fresh_module(self):
|
2020-06-30 21:46:31 +08:00
|
|
|
import_helper.import_fresh_module("ftplib")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_get_attribute(self):
|
|
|
|
self.assertEqual(support.get_attribute(self, "test_get_attribute"),
|
|
|
|
self.test_get_attribute)
|
|
|
|
self.assertRaises(unittest.SkipTest, support.get_attribute, self, "foo")
|
|
|
|
|
2011-07-23 15:00:31 +03:00
|
|
|
@unittest.skip("failing buildbots")
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_get_original_stdout(self):
|
|
|
|
self.assertEqual(support.get_original_stdout(), sys.stdout)
|
|
|
|
|
|
|
|
def test_unload(self):
|
2024-06-18 17:51:47 +02:00
|
|
|
import sched # noqa: F401
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertIn("sched", sys.modules)
|
2020-06-30 21:46:31 +08:00
|
|
|
import_helper.unload("sched")
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertNotIn("sched", sys.modules)
|
|
|
|
|
|
|
|
def test_unlink(self):
|
2021-04-02 12:53:46 +09:00
|
|
|
with open(TESTFN, "w", encoding="utf-8") as f:
|
2011-07-23 08:48:53 +03:00
|
|
|
pass
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.unlink(TESTFN)
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertFalse(os.path.exists(TESTFN))
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.unlink(TESTFN)
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_rmtree(self):
|
2020-06-30 21:46:31 +08:00
|
|
|
dirpath = os_helper.TESTFN + 'd'
|
2016-11-20 16:16:06 +02:00
|
|
|
subdirpath = os.path.join(dirpath, 'subdir')
|
|
|
|
os.mkdir(dirpath)
|
|
|
|
os.mkdir(subdirpath)
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.rmtree(dirpath)
|
2016-11-20 16:16:06 +02:00
|
|
|
self.assertFalse(os.path.exists(dirpath))
|
|
|
|
with support.swap_attr(support, 'verbose', 0):
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.rmtree(dirpath)
|
2016-11-20 16:16:06 +02:00
|
|
|
|
|
|
|
os.mkdir(dirpath)
|
|
|
|
os.mkdir(subdirpath)
|
|
|
|
os.chmod(dirpath, stat.S_IRUSR|stat.S_IXUSR)
|
|
|
|
with support.swap_attr(support, 'verbose', 0):
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.rmtree(dirpath)
|
2016-11-20 16:16:06 +02:00
|
|
|
self.assertFalse(os.path.exists(dirpath))
|
2015-11-24 22:12:05 +00:00
|
|
|
|
2016-11-20 16:16:06 +02:00
|
|
|
os.mkdir(dirpath)
|
|
|
|
os.mkdir(subdirpath)
|
|
|
|
os.chmod(dirpath, 0)
|
|
|
|
with support.swap_attr(support, 'verbose', 0):
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.rmtree(dirpath)
|
2016-11-20 16:16:06 +02:00
|
|
|
self.assertFalse(os.path.exists(dirpath))
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_forget(self):
|
2011-08-02 06:24:31 +03:00
|
|
|
mod_filename = TESTFN + '.py'
|
2021-04-02 12:53:46 +09:00
|
|
|
with open(mod_filename, 'w', encoding="utf-8") as f:
|
2011-08-02 06:24:31 +03:00
|
|
|
print('foo = 1', file=f)
|
2011-08-03 05:18:33 +03:00
|
|
|
sys.path.insert(0, os.curdir)
|
2012-04-14 14:10:13 -04:00
|
|
|
importlib.invalidate_caches()
|
2011-08-02 06:24:31 +03:00
|
|
|
try:
|
|
|
|
mod = __import__(TESTFN)
|
|
|
|
self.assertIn(TESTFN, sys.modules)
|
|
|
|
|
2020-06-30 21:46:31 +08:00
|
|
|
import_helper.forget(TESTFN)
|
2011-08-02 06:24:31 +03:00
|
|
|
self.assertNotIn(TESTFN, sys.modules)
|
|
|
|
finally:
|
2011-08-03 05:18:33 +03:00
|
|
|
del sys.path[0]
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.unlink(mod_filename)
|
|
|
|
os_helper.rmtree('__pycache__')
|
2011-07-23 08:48:53 +03:00
|
|
|
|
2022-04-07 10:22:47 +03:00
|
|
|
@support.requires_working_socket()
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_HOST(self):
|
2020-04-25 10:06:29 +03:00
|
|
|
s = socket.create_server((socket_helper.HOST, 0))
|
2011-07-23 08:48:53 +03:00
|
|
|
s.close()
|
|
|
|
|
2022-04-07 10:22:47 +03:00
|
|
|
@support.requires_working_socket()
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_find_unused_port(self):
|
2020-04-25 10:06:29 +03:00
|
|
|
port = socket_helper.find_unused_port()
|
|
|
|
s = socket.create_server((socket_helper.HOST, port))
|
2011-07-23 08:48:53 +03:00
|
|
|
s.close()
|
|
|
|
|
2022-04-07 10:22:47 +03:00
|
|
|
@support.requires_working_socket()
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_bind_port(self):
|
|
|
|
s = socket.socket()
|
2020-04-25 10:06:29 +03:00
|
|
|
socket_helper.bind_port(s)
|
2014-07-23 19:28:13 +01:00
|
|
|
s.listen()
|
2011-07-23 08:48:53 +03:00
|
|
|
s.close()
|
|
|
|
|
2013-07-28 22:11:50 +10:00
|
|
|
# Tests for temp_dir()
|
|
|
|
|
|
|
|
def test_temp_dir(self):
|
|
|
|
"""Test that temp_dir() creates and destroys its directory."""
|
|
|
|
parent_dir = tempfile.mkdtemp()
|
|
|
|
parent_dir = os.path.realpath(parent_dir)
|
|
|
|
|
|
|
|
try:
|
|
|
|
path = os.path.join(parent_dir, 'temp')
|
|
|
|
self.assertFalse(os.path.isdir(path))
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir(path) as temp_path:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertEqual(temp_path, path)
|
|
|
|
self.assertTrue(os.path.isdir(path))
|
|
|
|
self.assertFalse(os.path.isdir(path))
|
|
|
|
finally:
|
2020-06-30 21:46:31 +08:00
|
|
|
os_helper.rmtree(parent_dir)
|
2013-07-28 22:11:50 +10:00
|
|
|
|
|
|
|
def test_temp_dir__path_none(self):
|
|
|
|
"""Test passing no path."""
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir() as temp_path:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertTrue(os.path.isdir(temp_path))
|
|
|
|
self.assertFalse(os.path.isdir(temp_path))
|
|
|
|
|
|
|
|
def test_temp_dir__existing_dir__quiet_default(self):
|
|
|
|
"""Test passing a directory that already exists."""
|
|
|
|
def call_temp_dir(path):
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir(path) as temp_path:
|
2013-07-28 22:11:50 +10:00
|
|
|
raise Exception("should not get here")
|
|
|
|
|
|
|
|
path = tempfile.mkdtemp()
|
|
|
|
path = os.path.realpath(path)
|
|
|
|
try:
|
|
|
|
self.assertTrue(os.path.isdir(path))
|
|
|
|
self.assertRaises(FileExistsError, call_temp_dir, path)
|
|
|
|
# Make sure temp_dir did not delete the original directory.
|
|
|
|
self.assertTrue(os.path.isdir(path))
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
|
|
|
def test_temp_dir__existing_dir__quiet_true(self):
|
|
|
|
"""Test passing a directory that already exists with quiet=True."""
|
|
|
|
path = tempfile.mkdtemp()
|
|
|
|
path = os.path.realpath(path)
|
|
|
|
|
|
|
|
try:
|
2025-03-27 19:06:52 +00:00
|
|
|
with warnings_helper.check_warnings() as recorder, _caplog() as caplog:
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir(path, quiet=True) as temp_path:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertEqual(path, temp_path)
|
|
|
|
warnings = [str(w.message) for w in recorder.warnings]
|
|
|
|
# Make sure temp_dir did not delete the original directory.
|
|
|
|
self.assertTrue(os.path.isdir(path))
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2025-03-27 19:06:52 +00:00
|
|
|
self.assertListEqual(warnings, [])
|
|
|
|
self.assertEqual(len(caplog.records), 1)
|
|
|
|
record = caplog.records[0]
|
|
|
|
self.assertStartsWith(
|
|
|
|
record.getMessage(),
|
|
|
|
f'tests may fail, unable to create '
|
|
|
|
f'temporary directory {path!r}: '
|
|
|
|
)
|
2013-07-28 22:11:50 +10:00
|
|
|
|
2022-01-16 23:52:43 +02:00
|
|
|
@support.requires_fork()
|
2018-02-23 02:37:38 +01:00
|
|
|
def test_temp_dir__forked_child(self):
|
|
|
|
"""Test that a forked child process does not remove the directory."""
|
|
|
|
# See bpo-30028 for details.
|
|
|
|
# Run the test as an external script, because it uses fork.
|
|
|
|
script_helper.assert_python_ok("-c", textwrap.dedent("""
|
|
|
|
import os
|
|
|
|
from test import support
|
2020-06-30 21:46:31 +08:00
|
|
|
from test.support import os_helper
|
|
|
|
with os_helper.temp_cwd() as temp_path:
|
2018-02-23 02:37:38 +01:00
|
|
|
pid = os.fork()
|
|
|
|
if pid != 0:
|
2020-03-31 20:08:12 +02:00
|
|
|
# parent process
|
2018-02-23 02:37:38 +01:00
|
|
|
|
|
|
|
# wait for the child to terminate
|
2020-03-31 20:08:12 +02:00
|
|
|
support.wait_process(pid, exitcode=0)
|
2018-02-23 02:37:38 +01:00
|
|
|
|
|
|
|
# Make sure that temp_path is still present. When the child
|
|
|
|
# process leaves the 'temp_cwd'-context, the __exit__()-
|
|
|
|
# method of the context must not remove the temporary
|
|
|
|
# directory.
|
|
|
|
if not os.path.isdir(temp_path):
|
|
|
|
raise AssertionError("Child removed temp_path.")
|
|
|
|
"""))
|
|
|
|
|
2013-07-28 22:11:50 +10:00
|
|
|
# Tests for change_cwd()
|
|
|
|
|
|
|
|
def test_change_cwd(self):
|
|
|
|
original_cwd = os.getcwd()
|
|
|
|
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir() as temp_path:
|
|
|
|
with os_helper.change_cwd(temp_path) as new_cwd:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertEqual(new_cwd, temp_path)
|
|
|
|
self.assertEqual(os.getcwd(), new_cwd)
|
|
|
|
|
|
|
|
self.assertEqual(os.getcwd(), original_cwd)
|
|
|
|
|
|
|
|
def test_change_cwd__non_existent_dir(self):
|
|
|
|
"""Test passing a non-existent directory."""
|
|
|
|
original_cwd = os.getcwd()
|
|
|
|
|
|
|
|
def call_change_cwd(path):
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.change_cwd(path) as new_cwd:
|
2013-07-28 22:11:50 +10:00
|
|
|
raise Exception("should not get here")
|
|
|
|
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir() as parent_dir:
|
2013-07-28 22:11:50 +10:00
|
|
|
non_existent_dir = os.path.join(parent_dir, 'does_not_exist')
|
|
|
|
self.assertRaises(FileNotFoundError, call_change_cwd,
|
|
|
|
non_existent_dir)
|
|
|
|
|
|
|
|
self.assertEqual(os.getcwd(), original_cwd)
|
|
|
|
|
|
|
|
def test_change_cwd__non_existent_dir__quiet_true(self):
|
|
|
|
"""Test passing a non-existent directory with quiet=True."""
|
|
|
|
original_cwd = os.getcwd()
|
|
|
|
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_dir() as parent_dir:
|
2013-07-28 22:11:50 +10:00
|
|
|
bad_dir = os.path.join(parent_dir, 'does_not_exist')
|
2025-03-27 19:06:52 +00:00
|
|
|
with warnings_helper.check_warnings() as recorder, _caplog() as caplog:
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.change_cwd(bad_dir, quiet=True) as new_cwd:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertEqual(new_cwd, original_cwd)
|
|
|
|
self.assertEqual(os.getcwd(), new_cwd)
|
|
|
|
warnings = [str(w.message) for w in recorder.warnings]
|
|
|
|
|
2025-03-27 19:06:52 +00:00
|
|
|
self.assertListEqual(warnings, [])
|
|
|
|
self.assertEqual(len(caplog.records), 1)
|
|
|
|
record = caplog.records[0]
|
|
|
|
self.assertStartsWith(
|
|
|
|
record.getMessage(),
|
|
|
|
f'tests may fail, unable to change '
|
|
|
|
f'the current working directory '
|
|
|
|
f'to {bad_dir!r}: '
|
|
|
|
)
|
2013-07-28 22:11:50 +10:00
|
|
|
|
|
|
|
# Tests for change_cwd()
|
|
|
|
|
|
|
|
def test_change_cwd__chdir_warning(self):
|
|
|
|
"""Check the warning message when os.chdir() fails."""
|
|
|
|
path = TESTFN + '_does_not_exist'
|
2025-03-27 19:06:52 +00:00
|
|
|
with warnings_helper.check_warnings() as recorder, _caplog() as caplog:
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.change_cwd(path=path, quiet=True):
|
2013-07-28 22:11:50 +10:00
|
|
|
pass
|
|
|
|
messages = [str(w.message) for w in recorder.warnings]
|
2017-02-08 12:49:02 +01:00
|
|
|
|
2025-03-27 19:06:52 +00:00
|
|
|
self.assertListEqual(messages, [])
|
|
|
|
self.assertEqual(len(caplog.records), 1)
|
|
|
|
record = caplog.records[0]
|
|
|
|
self.assertStartsWith(
|
|
|
|
record.getMessage(),
|
|
|
|
f'tests may fail, unable to change '
|
|
|
|
f'the current working directory '
|
|
|
|
f'to {path!r}: ',
|
|
|
|
)
|
2013-07-28 22:11:50 +10:00
|
|
|
|
|
|
|
# Tests for temp_cwd()
|
|
|
|
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_temp_cwd(self):
|
|
|
|
here = os.getcwd()
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_cwd(name=TESTFN):
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertEqual(os.path.basename(os.getcwd()), TESTFN)
|
|
|
|
self.assertFalse(os.path.exists(TESTFN))
|
2018-07-09 20:25:55 +05:00
|
|
|
self.assertEqual(os.getcwd(), here)
|
2011-07-23 08:48:53 +03:00
|
|
|
|
2013-07-28 22:11:50 +10:00
|
|
|
|
|
|
|
def test_temp_cwd__name_none(self):
|
|
|
|
"""Test passing None to temp_cwd()."""
|
|
|
|
original_cwd = os.getcwd()
|
2020-06-30 21:46:31 +08:00
|
|
|
with os_helper.temp_cwd(name=None) as new_cwd:
|
2013-07-28 22:11:50 +10:00
|
|
|
self.assertNotEqual(new_cwd, original_cwd)
|
|
|
|
self.assertTrue(os.path.isdir(new_cwd))
|
|
|
|
self.assertEqual(os.getcwd(), new_cwd)
|
|
|
|
self.assertEqual(os.getcwd(), original_cwd)
|
2012-09-21 16:53:07 +03:00
|
|
|
|
2011-07-23 08:48:53 +03:00
|
|
|
def test_sortdict(self):
|
|
|
|
self.assertEqual(support.sortdict({3:3, 2:2, 1:1}), "{1: 1, 2: 2, 3: 3}")
|
|
|
|
|
|
|
|
def test_make_bad_fd(self):
|
2020-06-30 21:46:31 +08:00
|
|
|
fd = os_helper.make_bad_fd()
|
2011-07-23 08:48:53 +03:00
|
|
|
with self.assertRaises(OSError) as cm:
|
|
|
|
os.write(fd, b"foo")
|
|
|
|
self.assertEqual(cm.exception.errno, errno.EBADF)
|
|
|
|
|
|
|
|
def test_check_syntax_error(self):
|
2018-09-24 17:12:49 -04:00
|
|
|
support.check_syntax_error(self, "def class", lineno=1, offset=5)
|
2016-02-08 17:57:02 +01:00
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
support.check_syntax_error(self, "x=1")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_CleanImport(self):
|
|
|
|
import importlib
|
2021-06-24 12:37:26 -07:00
|
|
|
with import_helper.CleanImport("pprint"):
|
|
|
|
importlib.import_module("pprint")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_DirsOnSysPath(self):
|
2020-06-30 21:46:31 +08:00
|
|
|
with import_helper.DirsOnSysPath('foo', 'bar'):
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertIn("foo", sys.path)
|
|
|
|
self.assertIn("bar", sys.path)
|
|
|
|
self.assertNotIn("foo", sys.path)
|
|
|
|
self.assertNotIn("bar", sys.path)
|
|
|
|
|
|
|
|
def test_captured_stdout(self):
|
2013-07-11 12:28:40 -04:00
|
|
|
with support.captured_stdout() as stdout:
|
2011-07-23 08:51:14 +03:00
|
|
|
print("hello")
|
2013-07-11 12:28:40 -04:00
|
|
|
self.assertEqual(stdout.getvalue(), "hello\n")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_captured_stderr(self):
|
2013-07-11 12:28:40 -04:00
|
|
|
with support.captured_stderr() as stderr:
|
2011-07-23 08:51:14 +03:00
|
|
|
print("hello", file=sys.stderr)
|
2013-07-11 12:28:40 -04:00
|
|
|
self.assertEqual(stderr.getvalue(), "hello\n")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_captured_stdin(self):
|
2013-07-11 12:28:40 -04:00
|
|
|
with support.captured_stdin() as stdin:
|
|
|
|
stdin.write('hello\n')
|
|
|
|
stdin.seek(0)
|
|
|
|
# call test code that consumes from sys.stdin
|
|
|
|
captured = input()
|
|
|
|
self.assertEqual(captured, "hello")
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_gc_collect(self):
|
|
|
|
support.gc_collect()
|
|
|
|
|
|
|
|
def test_python_is_optimized(self):
|
|
|
|
self.assertIsInstance(support.python_is_optimized(), bool)
|
|
|
|
|
|
|
|
def test_swap_attr(self):
|
|
|
|
class Obj:
|
2017-04-28 19:17:26 +03:00
|
|
|
pass
|
2011-07-23 08:48:53 +03:00
|
|
|
obj = Obj()
|
2017-04-28 19:17:26 +03:00
|
|
|
obj.x = 1
|
|
|
|
with support.swap_attr(obj, "x", 5) as x:
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertEqual(obj.x, 5)
|
2017-04-28 19:17:26 +03:00
|
|
|
self.assertEqual(x, 1)
|
2011-07-23 08:48:53 +03:00
|
|
|
self.assertEqual(obj.x, 1)
|
2017-04-28 19:17:26 +03:00
|
|
|
with support.swap_attr(obj, "y", 5) as y:
|
|
|
|
self.assertEqual(obj.y, 5)
|
|
|
|
self.assertIsNone(y)
|
2025-05-22 13:17:22 +03:00
|
|
|
self.assertNotHasAttr(obj, 'y')
|
2017-04-28 19:17:26 +03:00
|
|
|
with support.swap_attr(obj, "y", 5):
|
|
|
|
del obj.y
|
2025-05-22 13:17:22 +03:00
|
|
|
self.assertNotHasAttr(obj, 'y')
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
def test_swap_item(self):
|
2017-04-28 19:17:26 +03:00
|
|
|
D = {"x":1}
|
|
|
|
with support.swap_item(D, "x", 5) as x:
|
|
|
|
self.assertEqual(D["x"], 5)
|
|
|
|
self.assertEqual(x, 1)
|
|
|
|
self.assertEqual(D["x"], 1)
|
|
|
|
with support.swap_item(D, "y", 5) as y:
|
|
|
|
self.assertEqual(D["y"], 5)
|
|
|
|
self.assertIsNone(y)
|
|
|
|
self.assertNotIn("y", D)
|
|
|
|
with support.swap_item(D, "y", 5):
|
|
|
|
del D["y"]
|
|
|
|
self.assertNotIn("y", D)
|
2011-07-23 08:48:53 +03:00
|
|
|
|
2015-04-14 13:26:06 -07:00
|
|
|
class RefClass:
|
|
|
|
attribute1 = None
|
|
|
|
attribute2 = None
|
|
|
|
_hidden_attribute1 = None
|
|
|
|
__magic_1__ = None
|
|
|
|
|
|
|
|
class OtherClass:
|
|
|
|
attribute2 = None
|
|
|
|
attribute3 = None
|
|
|
|
__magic_1__ = None
|
|
|
|
__magic_2__ = None
|
|
|
|
|
2015-04-14 12:56:53 -07:00
|
|
|
def test_detect_api_mismatch(self):
|
2015-04-14 13:26:06 -07:00
|
|
|
missing_items = support.detect_api_mismatch(self.RefClass,
|
|
|
|
self.OtherClass)
|
2015-04-14 12:56:53 -07:00
|
|
|
self.assertEqual({'attribute1'}, missing_items)
|
|
|
|
|
2015-04-14 13:26:06 -07:00
|
|
|
missing_items = support.detect_api_mismatch(self.OtherClass,
|
|
|
|
self.RefClass)
|
2015-04-14 12:56:53 -07:00
|
|
|
self.assertEqual({'attribute3', '__magic_2__'}, missing_items)
|
|
|
|
|
|
|
|
def test_detect_api_mismatch__ignore(self):
|
|
|
|
ignore = ['attribute1', 'attribute3', '__magic_2__', 'not_in_either']
|
|
|
|
|
2015-04-14 13:26:06 -07:00
|
|
|
missing_items = support.detect_api_mismatch(
|
|
|
|
self.RefClass, self.OtherClass, ignore=ignore)
|
2015-04-14 12:56:53 -07:00
|
|
|
self.assertEqual(set(), missing_items)
|
|
|
|
|
2015-04-14 13:26:06 -07:00
|
|
|
missing_items = support.detect_api_mismatch(
|
|
|
|
self.OtherClass, self.RefClass, ignore=ignore)
|
2015-04-14 12:56:53 -07:00
|
|
|
self.assertEqual(set(), missing_items)
|
|
|
|
|
2015-11-14 11:47:00 +00:00
|
|
|
def test_check__all__(self):
|
|
|
|
extra = {'tempdir'}
|
2020-08-17 07:20:40 +02:00
|
|
|
not_exported = {'template'}
|
2015-11-14 11:47:00 +00:00
|
|
|
support.check__all__(self,
|
|
|
|
tempfile,
|
|
|
|
extra=extra,
|
2020-08-17 07:20:40 +02:00
|
|
|
not_exported=not_exported)
|
2015-11-14 11:47:00 +00:00
|
|
|
|
2021-09-15 20:33:31 +02:00
|
|
|
extra = {
|
|
|
|
'TextTestResult',
|
|
|
|
'installHandler',
|
|
|
|
}
|
2020-08-17 07:20:40 +02:00
|
|
|
not_exported = {'load_tests', "TestProgram", "BaseTestSuite"}
|
2015-11-14 11:47:00 +00:00
|
|
|
support.check__all__(self,
|
|
|
|
unittest,
|
|
|
|
("unittest.result", "unittest.case",
|
|
|
|
"unittest.suite", "unittest.loader",
|
|
|
|
"unittest.main", "unittest.runner",
|
2019-05-29 12:33:59 +03:00
|
|
|
"unittest.signals", "unittest.async_case"),
|
2015-11-14 11:47:00 +00:00
|
|
|
extra=extra,
|
2020-08-17 07:20:40 +02:00
|
|
|
not_exported=not_exported)
|
2015-11-14 11:47:00 +00:00
|
|
|
|
|
|
|
self.assertRaises(AssertionError, support.check__all__, self, unittest)
|
|
|
|
|
2017-08-10 16:01:47 +02:00
|
|
|
@unittest.skipUnless(hasattr(os, 'waitpid') and hasattr(os, 'WNOHANG'),
|
|
|
|
'need os.waitpid() and os.WNOHANG')
|
2022-01-16 23:52:43 +02:00
|
|
|
@support.requires_fork()
|
2017-08-10 16:01:47 +02:00
|
|
|
def test_reap_children(self):
|
|
|
|
# Make sure that there is no other pending child process
|
|
|
|
support.reap_children()
|
|
|
|
|
|
|
|
# Create a child process
|
|
|
|
pid = os.fork()
|
|
|
|
if pid == 0:
|
|
|
|
# child process: do nothing, just exit
|
|
|
|
os._exit(0)
|
|
|
|
|
|
|
|
was_altered = support.environment_altered
|
|
|
|
try:
|
|
|
|
support.environment_altered = False
|
|
|
|
stderr = io.StringIO()
|
|
|
|
|
2022-06-15 11:42:10 +02:00
|
|
|
for _ in support.sleeping_retry(support.SHORT_TIMEOUT):
|
2021-10-13 14:08:18 +02:00
|
|
|
with support.swap_attr(support.print_warning, 'orig_stderr', stderr):
|
2017-08-10 16:01:47 +02:00
|
|
|
support.reap_children()
|
|
|
|
|
|
|
|
# Use environment_altered to check if reap_children() found
|
|
|
|
# the child process
|
|
|
|
if support.environment_altered:
|
|
|
|
break
|
|
|
|
|
|
|
|
msg = "Warning -- reap_children() reaped child process %s" % pid
|
|
|
|
self.assertIn(msg, stderr.getvalue())
|
|
|
|
self.assertTrue(support.environment_altered)
|
|
|
|
finally:
|
|
|
|
support.environment_altered = was_altered
|
|
|
|
|
|
|
|
# Just in case, check again that there is no other
|
|
|
|
# pending child process
|
|
|
|
support.reap_children()
|
|
|
|
|
2022-01-25 09:09:06 +02:00
|
|
|
@support.requires_subprocess()
|
2018-11-23 17:54:20 +01:00
|
|
|
def check_options(self, args, func, expected=None):
|
2017-11-20 15:24:56 -08:00
|
|
|
code = f'from test.support import {func}; print(repr({func}()))'
|
|
|
|
cmd = [sys.executable, *args, '-c', code]
|
|
|
|
env = {key: value for key, value in os.environ.items()
|
|
|
|
if not key.startswith('PYTHON')}
|
|
|
|
proc = subprocess.run(cmd,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
universal_newlines=True,
|
|
|
|
env=env)
|
2018-11-23 17:54:20 +01:00
|
|
|
if expected is None:
|
|
|
|
expected = args
|
|
|
|
self.assertEqual(proc.stdout.rstrip(), repr(expected))
|
2017-11-20 15:24:56 -08:00
|
|
|
self.assertEqual(proc.returncode, 0)
|
|
|
|
|
2023-09-02 07:45:34 +03:00
|
|
|
@support.requires_resource('cpu')
|
2017-11-20 15:24:56 -08:00
|
|
|
def test_args_from_interpreter_flags(self):
|
|
|
|
# Test test.support.args_from_interpreter_flags()
|
|
|
|
for opts in (
|
|
|
|
# no option
|
|
|
|
[],
|
|
|
|
# single option
|
|
|
|
['-B'],
|
|
|
|
['-s'],
|
|
|
|
['-S'],
|
|
|
|
['-E'],
|
|
|
|
['-v'],
|
|
|
|
['-b'],
|
2022-05-06 01:34:11 +02:00
|
|
|
['-P'],
|
2017-11-20 15:24:56 -08:00
|
|
|
['-q'],
|
2018-11-23 17:54:20 +01:00
|
|
|
['-I'],
|
2017-11-20 15:24:56 -08:00
|
|
|
# same option multiple times
|
|
|
|
['-bb'],
|
|
|
|
['-vvv'],
|
|
|
|
# -W options
|
|
|
|
['-Wignore'],
|
|
|
|
# -X options
|
|
|
|
['-X', 'dev'],
|
|
|
|
['-Wignore', '-X', 'dev'],
|
|
|
|
['-X', 'faulthandler'],
|
|
|
|
['-X', 'importtime'],
|
2025-05-05 20:03:55 -04:00
|
|
|
['-X', 'importtime=2'],
|
2017-11-20 15:24:56 -08:00
|
|
|
['-X', 'showrefcount'],
|
|
|
|
['-X', 'tracemalloc'],
|
|
|
|
['-X', 'tracemalloc=3'],
|
|
|
|
):
|
|
|
|
with self.subTest(opts=opts):
|
|
|
|
self.check_options(opts, 'args_from_interpreter_flags')
|
|
|
|
|
2022-05-06 01:34:11 +02:00
|
|
|
self.check_options(['-I', '-E', '-s', '-P'],
|
|
|
|
'args_from_interpreter_flags',
|
2018-11-23 17:54:20 +01:00
|
|
|
['-I'])
|
|
|
|
|
2017-11-20 15:24:56 -08:00
|
|
|
def test_optim_args_from_interpreter_flags(self):
|
|
|
|
# Test test.support.optim_args_from_interpreter_flags()
|
|
|
|
for opts in (
|
|
|
|
# no option
|
|
|
|
[],
|
|
|
|
['-O'],
|
|
|
|
['-OO'],
|
|
|
|
['-OOOO'],
|
|
|
|
):
|
|
|
|
with self.subTest(opts=opts):
|
|
|
|
self.check_options(opts, 'optim_args_from_interpreter_flags')
|
|
|
|
|
2024-09-27 10:49:35 -07:00
|
|
|
@unittest.skipIf(support.is_apple_mobile, "Unstable on Apple Mobile")
|
2022-05-23 10:39:57 +02:00
|
|
|
@unittest.skipIf(support.is_wasi, "Unavailable on WASI")
|
2018-06-06 17:23:50 +02:00
|
|
|
def test_fd_count(self):
|
2024-09-27 10:49:35 -07:00
|
|
|
# We cannot test the absolute value of fd_count(): on old Linux kernel
|
|
|
|
# or glibc versions, os.urandom() keeps a FD open on /dev/urandom
|
|
|
|
# device and Python has 4 FD opens instead of 3. Test is unstable on
|
|
|
|
# Emscripten and Apple Mobile platforms; these platforms start and stop
|
2022-03-17 13:09:57 +02:00
|
|
|
# background threads that use pipes and epoll fds.
|
2020-06-30 21:46:31 +08:00
|
|
|
start = os_helper.fd_count()
|
2018-06-06 17:23:50 +02:00
|
|
|
fd = os.open(__file__, os.O_RDONLY)
|
|
|
|
try:
|
2020-06-30 21:46:31 +08:00
|
|
|
more = os_helper.fd_count()
|
2018-06-06 17:23:50 +02:00
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
self.assertEqual(more - start, 1)
|
2017-11-21 15:34:02 -08:00
|
|
|
|
2020-04-23 19:03:52 +02:00
|
|
|
def check_print_warning(self, msg, expected):
|
|
|
|
stderr = io.StringIO()
|
2021-10-13 14:08:18 +02:00
|
|
|
with support.swap_attr(support.print_warning, 'orig_stderr', stderr):
|
2020-04-23 19:03:52 +02:00
|
|
|
support.print_warning(msg)
|
|
|
|
self.assertEqual(stderr.getvalue(), expected)
|
|
|
|
|
|
|
|
def test_print_warning(self):
|
|
|
|
self.check_print_warning("msg",
|
|
|
|
"Warning -- msg\n")
|
|
|
|
self.check_print_warning("a\nb",
|
|
|
|
'Warning -- a\nWarning -- b\n')
|
|
|
|
|
2022-03-15 11:41:04 +02:00
|
|
|
def test_has_strftime_extensions(self):
|
2024-12-04 01:06:20 +01:00
|
|
|
if sys.platform == "win32":
|
2022-03-15 11:41:04 +02:00
|
|
|
self.assertFalse(support.has_strftime_extensions)
|
|
|
|
else:
|
|
|
|
self.assertTrue(support.has_strftime_extensions)
|
|
|
|
|
2023-09-06 17:34:31 +02:00
|
|
|
def test_get_recursion_depth(self):
|
|
|
|
# test support.get_recursion_depth()
|
|
|
|
code = textwrap.dedent("""
|
|
|
|
from test import support
|
|
|
|
import sys
|
|
|
|
|
|
|
|
def check(cond):
|
|
|
|
if not cond:
|
|
|
|
raise AssertionError("test failed")
|
|
|
|
|
|
|
|
# depth 1
|
|
|
|
check(support.get_recursion_depth() == 1)
|
|
|
|
|
|
|
|
# depth 2
|
|
|
|
def test_func():
|
|
|
|
check(support.get_recursion_depth() == 2)
|
|
|
|
test_func()
|
|
|
|
|
|
|
|
def test_recursive(depth, limit):
|
|
|
|
if depth >= limit:
|
|
|
|
# cannot call get_recursion_depth() at this depth,
|
|
|
|
# it can raise RecursionError
|
|
|
|
return
|
|
|
|
get_depth = support.get_recursion_depth()
|
|
|
|
print(f"test_recursive: {depth}/{limit}: "
|
|
|
|
f"get_recursion_depth() says {get_depth}")
|
|
|
|
check(get_depth == depth)
|
|
|
|
test_recursive(depth + 1, limit)
|
|
|
|
|
|
|
|
# depth up to 25
|
|
|
|
with support.infinite_recursion(max_depth=25):
|
|
|
|
limit = sys.getrecursionlimit()
|
|
|
|
print(f"test with sys.getrecursionlimit()={limit}")
|
|
|
|
test_recursive(2, limit)
|
|
|
|
|
|
|
|
# depth up to 500
|
|
|
|
with support.infinite_recursion(max_depth=500):
|
|
|
|
limit = sys.getrecursionlimit()
|
|
|
|
print(f"test with sys.getrecursionlimit()={limit}")
|
|
|
|
test_recursive(2, limit)
|
|
|
|
""")
|
|
|
|
script_helper.assert_python_ok("-c", code)
|
|
|
|
|
|
|
|
def test_recursion(self):
|
|
|
|
# Test infinite_recursion() and get_recursion_available() functions.
|
|
|
|
def recursive_function(depth):
|
|
|
|
if depth:
|
|
|
|
recursive_function(depth - 1)
|
|
|
|
|
2023-12-22 14:25:25 +00:00
|
|
|
for max_depth in (5, 25, 250, 2500):
|
2023-09-06 17:34:31 +02:00
|
|
|
with support.infinite_recursion(max_depth):
|
|
|
|
available = support.get_recursion_available()
|
|
|
|
|
|
|
|
# Recursion up to 'available' additional frames should be OK.
|
|
|
|
recursive_function(available)
|
|
|
|
|
|
|
|
# Recursion up to 'available+1' additional frames must raise
|
|
|
|
# RecursionError. Avoid self.assertRaises(RecursionError) which
|
|
|
|
# can consume more than 3 frames and so raises RecursionError.
|
|
|
|
try:
|
|
|
|
recursive_function(available + 1)
|
|
|
|
except RecursionError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.fail("RecursionError was not raised")
|
|
|
|
|
|
|
|
# Test the bare minimumum: max_depth=3
|
|
|
|
with support.infinite_recursion(3):
|
|
|
|
try:
|
|
|
|
recursive_function(3)
|
|
|
|
except RecursionError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.fail("RecursionError was not raised")
|
|
|
|
|
2023-09-10 01:41:21 +02:00
|
|
|
def test_parse_memlimit(self):
|
|
|
|
parse = support._parse_memlimit
|
|
|
|
KiB = 1024
|
|
|
|
MiB = KiB * 1024
|
|
|
|
GiB = MiB * 1024
|
|
|
|
TiB = GiB * 1024
|
|
|
|
self.assertEqual(parse('0k'), 0)
|
|
|
|
self.assertEqual(parse('3k'), 3 * KiB)
|
|
|
|
self.assertEqual(parse('2.4m'), int(2.4 * MiB))
|
|
|
|
self.assertEqual(parse('4g'), int(4 * GiB))
|
|
|
|
self.assertEqual(parse('1t'), TiB)
|
|
|
|
|
|
|
|
for limit in ('', '3', '3.5.10k', '10x'):
|
|
|
|
with self.subTest(limit=limit):
|
|
|
|
with self.assertRaises(ValueError):
|
|
|
|
parse(limit)
|
|
|
|
|
|
|
|
def test_set_memlimit(self):
|
|
|
|
_4GiB = 4 * 1024 ** 3
|
|
|
|
TiB = 1024 ** 4
|
|
|
|
old_max_memuse = support.max_memuse
|
|
|
|
old_real_max_memuse = support.real_max_memuse
|
|
|
|
try:
|
|
|
|
if sys.maxsize > 2**32:
|
|
|
|
support.set_memlimit('4g')
|
|
|
|
self.assertEqual(support.max_memuse, _4GiB)
|
|
|
|
self.assertEqual(support.real_max_memuse, _4GiB)
|
|
|
|
|
|
|
|
big = 2**100 // TiB
|
|
|
|
support.set_memlimit(f'{big}t')
|
|
|
|
self.assertEqual(support.max_memuse, sys.maxsize)
|
|
|
|
self.assertEqual(support.real_max_memuse, big * TiB)
|
|
|
|
else:
|
|
|
|
support.set_memlimit('4g')
|
|
|
|
self.assertEqual(support.max_memuse, sys.maxsize)
|
|
|
|
self.assertEqual(support.real_max_memuse, _4GiB)
|
|
|
|
finally:
|
|
|
|
support.max_memuse = old_max_memuse
|
|
|
|
support.real_max_memuse = old_real_max_memuse
|
2023-09-06 17:34:31 +02:00
|
|
|
|
2023-09-27 10:18:39 +02:00
|
|
|
def test_copy_python_src_ignore(self):
|
2023-09-27 17:29:20 +02:00
|
|
|
# Get source directory
|
2023-09-27 12:32:12 +02:00
|
|
|
src_dir = sysconfig.get_config_var('abs_srcdir')
|
|
|
|
if not src_dir:
|
|
|
|
src_dir = sysconfig.get_config_var('srcdir')
|
2023-09-27 10:18:39 +02:00
|
|
|
src_dir = os.path.abspath(src_dir)
|
2023-09-27 17:29:20 +02:00
|
|
|
|
|
|
|
# Check that the source code is available
|
2023-09-27 12:32:12 +02:00
|
|
|
if not os.path.exists(src_dir):
|
|
|
|
self.skipTest(f"cannot access Python source code directory:"
|
|
|
|
f" {src_dir!r}")
|
2023-09-29 16:24:38 +01:00
|
|
|
# Check that the landmark copy_python_src_ignore() expects is available
|
|
|
|
# (Previously we looked for 'Lib\os.py', which is always present on Windows.)
|
|
|
|
landmark = os.path.join(src_dir, 'Modules')
|
2023-09-27 17:29:20 +02:00
|
|
|
if not os.path.exists(landmark):
|
|
|
|
self.skipTest(f"cannot access Python source code directory:"
|
|
|
|
f" {landmark!r} landmark is missing")
|
2023-09-27 10:18:39 +02:00
|
|
|
|
2023-09-27 17:29:20 +02:00
|
|
|
# Test support.copy_python_src_ignore()
|
2023-09-27 10:18:39 +02:00
|
|
|
|
|
|
|
# Source code directory
|
2023-09-27 17:29:20 +02:00
|
|
|
ignored = {'.git', '__pycache__'}
|
2023-09-27 10:18:39 +02:00
|
|
|
names = os.listdir(src_dir)
|
|
|
|
self.assertEqual(support.copy_python_src_ignore(src_dir, names),
|
|
|
|
ignored | {'build'})
|
|
|
|
|
|
|
|
# Doc/ directory
|
|
|
|
path = os.path.join(src_dir, 'Doc')
|
|
|
|
self.assertEqual(support.copy_python_src_ignore(path, os.listdir(path)),
|
|
|
|
ignored | {'build', 'venv'})
|
|
|
|
|
2023-09-30 20:23:26 +02:00
|
|
|
# Another directory
|
2023-09-27 10:18:39 +02:00
|
|
|
path = os.path.join(src_dir, 'Objects')
|
|
|
|
self.assertEqual(support.copy_python_src_ignore(path, os.listdir(path)),
|
|
|
|
ignored)
|
|
|
|
|
2024-07-02 10:34:13 +02:00
|
|
|
def test_get_signal_name(self):
|
|
|
|
for exitcode, expected in (
|
|
|
|
(-int(signal.SIGINT), 'SIGINT'),
|
|
|
|
(-int(signal.SIGSEGV), 'SIGSEGV'),
|
|
|
|
(128 + int(signal.SIGABRT), 'SIGABRT'),
|
|
|
|
(3221225477, "STATUS_ACCESS_VIOLATION"),
|
|
|
|
(0xC00000FD, "STATUS_STACK_OVERFLOW"),
|
|
|
|
):
|
|
|
|
self.assertEqual(support.get_signal_name(exitcode), expected,
|
|
|
|
exitcode)
|
|
|
|
|
2025-03-13 10:33:46 +01:00
|
|
|
def test_linked_to_musl(self):
|
|
|
|
linked = support.linked_to_musl()
|
gh-90548: Make musl test skips smarter (fixes Alpine errors) (#131313)
* Make musl test skips smarter (fixes Alpine errors)
A relatively small number of tests fail when the underlying c library is
provided by musl. This was originally reported in bpo-46390 by
Christian Heimes. Among other changes, these tests were marked for
skipping in gh-31947/ef1327e3 as part of bpo-40280 (emscripten support),
but the skips were conditioned on the *platform* being emscripten (or
wasi, skips for which ere added in 9b50585e02).
In gh-131071 Victor Stinner added a linked_to_musl function to enable
skipping a test in test_math that fails under musl, like it does on a
number of other platforms. This check can successfully detect that
python is running under musl on Alpine, which was the original problem
report in bpo-46390.
This PR replaces Victor's solution with an enhancement to
platform.libc_ver that does the check more cheaply, and also gets the
version number. The latter is important because the math test being
skipped is due to a bug in musl that has been fixed, but as of this
checkin date has not yet been released. When it is, the test skip can
be fixed to check for the minimum needed version.
The enhanced version of linked_to_musl is also used to do the skips of
the other tests that generically fail under musl, as opposed to
emscripten or wasi only failures. This will allow these tests to be
skipped automatically on Alpine.
This PR does *not* enhance libc_ver to support emscripten and wasi, as
I'm not familiar with those platforms; instead it returns a version
triple of (0, 0, 0) for those platforms. This means the musl tests will
be skipped regardless of musl version, so ideally someone will add
support to libc_ver for these platforms.
* Platform tests and bug fixes.
In adding tests for the new platform code I found a bug in the old code:
if a valid version is passed for version and it is greater than the
version found for an so *and* there is no glibc version, then the
version from the argument was returned. The code changes here fix
that.
* Add support docs, including for some preexisting is_xxx's.
* Add news item about libc_ver enhancement.
* Prettify platform re expression using re.VERBOSE.
2025-03-19 13:05:09 -04:00
|
|
|
self.assertIsNotNone(linked)
|
|
|
|
if support.is_wasi or support.is_emscripten:
|
|
|
|
self.assertTrue(linked)
|
|
|
|
# The value is cached, so make sure it returns the same value again.
|
|
|
|
self.assertIs(linked, support.linked_to_musl())
|
|
|
|
# The unlike libc, the musl version is a triple.
|
|
|
|
if linked:
|
|
|
|
self.assertIsInstance(linked, tuple)
|
|
|
|
self.assertEqual(3, len(linked))
|
|
|
|
for v in linked:
|
|
|
|
self.assertIsInstance(v, int)
|
|
|
|
|
2025-03-13 10:33:46 +01:00
|
|
|
|
2011-07-23 08:48:53 +03:00
|
|
|
# XXX -follows a list of untested API
|
|
|
|
# make_legacy_pyc
|
|
|
|
# is_resource_enabled
|
|
|
|
# requires
|
|
|
|
# fcmp
|
|
|
|
# umaks
|
|
|
|
# findfile
|
|
|
|
# check_warnings
|
|
|
|
# EnvironmentVarGuard
|
|
|
|
# transient_internet
|
|
|
|
# run_with_locale
|
|
|
|
# bigmemtest
|
|
|
|
# precisionbigmemtest
|
|
|
|
# bigaddrspacetest
|
|
|
|
# requires_resource
|
|
|
|
# threading_cleanup
|
|
|
|
# reap_threads
|
|
|
|
# can_symlink
|
|
|
|
# skip_unless_symlink
|
2013-10-08 23:04:32 +02:00
|
|
|
# SuppressCrashReport
|
2011-07-23 08:48:53 +03:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2021-09-19 15:27:33 +03:00
|
|
|
unittest.main()
|