2009-05-05 08:31:54 +00:00
|
|
|
""" Tests for the linecache module """
|
|
|
|
|
|
|
|
import linecache
|
|
|
|
import unittest
|
|
|
|
import os.path
|
2015-08-19 12:20:37 +12:00
|
|
|
import tempfile
|
2025-05-09 08:45:16 +02:00
|
|
|
import threading
|
2015-08-20 10:48:46 +12:00
|
|
|
import tokenize
|
2024-02-20 11:47:41 -05:00
|
|
|
from importlib.machinery import ModuleSpec
|
2009-05-05 08:31:54 +00:00
|
|
|
from test import support
|
2020-07-06 17:15:08 +08:00
|
|
|
from test.support import os_helper
|
2025-05-09 08:45:16 +02:00
|
|
|
from test.support import threading_helper
|
2025-03-10 21:54:05 +00:00
|
|
|
from test.support.script_helper import assert_python_ok
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
FILENAME = linecache.__file__
|
2015-03-05 12:07:57 +13:00
|
|
|
NONEXISTENT_FILENAME = FILENAME + '.missing'
|
2009-05-05 08:31:54 +00:00
|
|
|
INVALID_NAME = '!@$)(!@#_1'
|
|
|
|
EMPTY = ''
|
2013-07-28 20:56:19 +10:00
|
|
|
TEST_PATH = os.path.dirname(__file__)
|
2009-07-19 21:01:52 +00:00
|
|
|
MODULES = "linecache abc".split()
|
2009-05-05 08:31:54 +00:00
|
|
|
MODULE_PATH = os.path.dirname(FILENAME)
|
|
|
|
|
|
|
|
SOURCE_1 = '''
|
|
|
|
" Docstring "
|
|
|
|
|
|
|
|
def function():
|
|
|
|
return result
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
SOURCE_2 = '''
|
|
|
|
def f():
|
|
|
|
return 1 + 1
|
|
|
|
|
|
|
|
a = f()
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
2010-05-21 21:45:06 +00:00
|
|
|
SOURCE_3 = '''
|
|
|
|
def f():
|
|
|
|
return 3''' # No ending newline
|
|
|
|
|
|
|
|
|
2015-08-19 12:20:37 +12:00
|
|
|
class TempFile:
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
super().setUp()
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as fp:
|
|
|
|
self.file_name = fp.name
|
|
|
|
fp.write(self.file_byte_string)
|
2020-07-06 17:15:08 +08:00
|
|
|
self.addCleanup(os_helper.unlink, self.file_name)
|
2015-08-19 12:20:37 +12:00
|
|
|
|
|
|
|
|
|
|
|
class GetLineTestsGoodData(TempFile):
|
|
|
|
# file_list = ['list\n', 'of\n', 'good\n', 'strings\n']
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.file_byte_string = ''.join(self.file_list).encode('utf-8')
|
|
|
|
super().setUp()
|
|
|
|
|
|
|
|
def test_getline(self):
|
2015-08-20 10:48:46 +12:00
|
|
|
with tokenize.open(self.file_name) as fp:
|
2015-08-19 12:20:37 +12:00
|
|
|
for index, line in enumerate(fp):
|
|
|
|
if not line.endswith('\n'):
|
|
|
|
line += '\n'
|
|
|
|
|
|
|
|
cached_line = linecache.getline(self.file_name, index + 1)
|
|
|
|
self.assertEqual(line, cached_line)
|
|
|
|
|
|
|
|
def test_getlines(self):
|
|
|
|
lines = linecache.getlines(self.file_name)
|
|
|
|
self.assertEqual(lines, self.file_list)
|
|
|
|
|
|
|
|
|
|
|
|
class GetLineTestsBadData(TempFile):
|
|
|
|
# file_byte_string = b'Bad data goes here'
|
|
|
|
|
|
|
|
def test_getline(self):
|
2022-06-30 10:18:18 +01:00
|
|
|
self.assertEqual(linecache.getline(self.file_name, 1), '')
|
2015-08-19 12:20:37 +12:00
|
|
|
|
|
|
|
def test_getlines(self):
|
2022-06-30 10:18:18 +01:00
|
|
|
self.assertEqual(linecache.getlines(self.file_name), [])
|
2015-08-19 12:20:37 +12:00
|
|
|
|
|
|
|
|
|
|
|
class EmptyFile(GetLineTestsGoodData, unittest.TestCase):
|
|
|
|
file_list = []
|
|
|
|
|
2024-03-18 16:13:02 +01:00
|
|
|
def test_getlines(self):
|
|
|
|
lines = linecache.getlines(self.file_name)
|
|
|
|
self.assertEqual(lines, ['\n'])
|
|
|
|
|
2015-08-19 12:20:37 +12:00
|
|
|
|
|
|
|
class SingleEmptyLine(GetLineTestsGoodData, unittest.TestCase):
|
|
|
|
file_list = ['\n']
|
|
|
|
|
|
|
|
|
|
|
|
class GoodUnicode(GetLineTestsGoodData, unittest.TestCase):
|
|
|
|
file_list = ['á\n', 'b\n', 'abcdef\n', 'ááááá\n']
|
|
|
|
|
2022-06-30 10:18:18 +01:00
|
|
|
class BadUnicode_NoDeclaration(GetLineTestsBadData, unittest.TestCase):
|
|
|
|
file_byte_string = b'\n\x80abc'
|
2015-08-19 12:20:37 +12:00
|
|
|
|
2022-06-30 10:18:18 +01:00
|
|
|
class BadUnicode_WithDeclaration(GetLineTestsBadData, unittest.TestCase):
|
|
|
|
file_byte_string = b'# coding=utf-8\n\x80abc'
|
2015-08-19 12:20:37 +12:00
|
|
|
|
|
|
|
|
2024-02-20 11:47:41 -05:00
|
|
|
class FakeLoader:
|
|
|
|
def get_source(self, fullname):
|
|
|
|
return f'source for {fullname}'
|
|
|
|
|
|
|
|
|
|
|
|
class NoSourceLoader:
|
|
|
|
def get_source(self, fullname):
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2009-05-05 08:31:54 +00:00
|
|
|
class LineCacheTests(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_getline(self):
|
|
|
|
getline = linecache.getline
|
|
|
|
|
|
|
|
# Bad values for line number should return an empty string
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(getline(FILENAME, 2**15), EMPTY)
|
|
|
|
self.assertEqual(getline(FILENAME, -1), EMPTY)
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
# Float values currently raise TypeError, should it?
|
|
|
|
self.assertRaises(TypeError, getline, FILENAME, 1.1)
|
|
|
|
|
|
|
|
# Bad filenames should return an empty string
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(getline(EMPTY, 1), EMPTY)
|
|
|
|
self.assertEqual(getline(INVALID_NAME, 1), EMPTY)
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
# Check module loading
|
|
|
|
for entry in MODULES:
|
|
|
|
filename = os.path.join(MODULE_PATH, entry) + '.py'
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(filename, encoding='utf-8') as file:
|
2010-10-29 23:55:51 +00:00
|
|
|
for index, line in enumerate(file):
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(line, getline(filename, index + 1))
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
# Check that bogus data isn't returned (issue #1309567)
|
|
|
|
empty = linecache.getlines('a/b/c/__init__.py')
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(empty, [])
|
2009-05-05 08:31:54 +00:00
|
|
|
|
2010-05-21 21:45:06 +00:00
|
|
|
def test_no_ending_newline(self):
|
2020-07-06 17:15:08 +08:00
|
|
|
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(os_helper.TESTFN, "w", encoding='utf-8') as fp:
|
2010-05-21 21:45:06 +00:00
|
|
|
fp.write(SOURCE_3)
|
2020-07-06 17:15:08 +08:00
|
|
|
lines = linecache.getlines(os_helper.TESTFN)
|
2010-05-21 21:45:06 +00:00
|
|
|
self.assertEqual(lines, ["\n", "def f():\n", " return 3\n"])
|
|
|
|
|
2009-05-05 08:31:54 +00:00
|
|
|
def test_clearcache(self):
|
|
|
|
cached = []
|
2015-08-19 12:20:37 +12:00
|
|
|
for entry in MODULES:
|
|
|
|
filename = os.path.join(MODULE_PATH, entry) + '.py'
|
2009-05-05 08:31:54 +00:00
|
|
|
cached.append(filename)
|
|
|
|
linecache.getline(filename, 1)
|
|
|
|
|
|
|
|
# Are all files cached?
|
2015-08-19 12:20:37 +12:00
|
|
|
self.assertNotEqual(cached, [])
|
2009-05-05 08:31:54 +00:00
|
|
|
cached_empty = [fn for fn in cached if fn not in linecache.cache]
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(cached_empty, [])
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
# Can we clear the cache?
|
|
|
|
linecache.clearcache()
|
|
|
|
cached_empty = [fn for fn in cached if fn in linecache.cache]
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(cached_empty, [])
|
2009-05-05 08:31:54 +00:00
|
|
|
|
|
|
|
def test_checkcache(self):
|
|
|
|
getline = linecache.getline
|
2010-05-21 21:45:06 +00:00
|
|
|
# Create a source file and cache its contents
|
2020-07-06 17:15:08 +08:00
|
|
|
source_name = os_helper.TESTFN + '.py'
|
|
|
|
self.addCleanup(os_helper.unlink, source_name)
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(source_name, 'w', encoding='utf-8') as source:
|
2010-05-21 21:45:06 +00:00
|
|
|
source.write(SOURCE_1)
|
|
|
|
getline(source_name, 1)
|
|
|
|
|
|
|
|
# Keep a copy of the old contents
|
|
|
|
source_list = []
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(source_name, encoding='utf-8') as source:
|
2010-05-21 21:45:06 +00:00
|
|
|
for index, line in enumerate(source):
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(line, getline(source_name, index + 1))
|
2010-05-21 21:45:06 +00:00
|
|
|
source_list.append(line)
|
2009-12-04 00:09:14 +00:00
|
|
|
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(source_name, 'w', encoding='utf-8') as source:
|
2010-05-21 21:45:06 +00:00
|
|
|
source.write(SOURCE_2)
|
2009-12-04 00:09:14 +00:00
|
|
|
|
2010-05-21 21:45:06 +00:00
|
|
|
# Try to update a bogus cache entry
|
|
|
|
linecache.checkcache('dummy')
|
|
|
|
|
|
|
|
# Check that the cache matches the old contents
|
|
|
|
for index, line in enumerate(source_list):
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(line, getline(source_name, index + 1))
|
2010-05-21 21:45:06 +00:00
|
|
|
|
|
|
|
# Update the cache and check whether it matches the new source file
|
|
|
|
linecache.checkcache(source_name)
|
2021-04-06 11:18:41 +09:00
|
|
|
with open(source_name, encoding='utf-8') as source:
|
2010-05-21 21:45:06 +00:00
|
|
|
for index, line in enumerate(source):
|
2010-11-20 19:04:17 +00:00
|
|
|
self.assertEqual(line, getline(source_name, index + 1))
|
2010-05-21 21:45:06 +00:00
|
|
|
source_list.append(line)
|
2009-05-05 08:31:54 +00:00
|
|
|
|
2015-03-05 12:07:57 +13:00
|
|
|
def test_lazycache_no_globals(self):
|
|
|
|
lines = linecache.getlines(FILENAME)
|
|
|
|
linecache.clearcache()
|
|
|
|
self.assertEqual(False, linecache.lazycache(FILENAME, None))
|
|
|
|
self.assertEqual(lines, linecache.getlines(FILENAME))
|
|
|
|
|
|
|
|
def test_lazycache_smoke(self):
|
|
|
|
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
|
|
|
|
linecache.clearcache()
|
|
|
|
self.assertEqual(
|
|
|
|
True, linecache.lazycache(NONEXISTENT_FILENAME, globals()))
|
|
|
|
self.assertEqual(1, len(linecache.cache[NONEXISTENT_FILENAME]))
|
2016-05-26 05:35:26 +00:00
|
|
|
# Note here that we're looking up a nonexistent filename with no
|
2015-03-05 12:07:57 +13:00
|
|
|
# globals: this would error if the lazy value wasn't resolved.
|
|
|
|
self.assertEqual(lines, linecache.getlines(NONEXISTENT_FILENAME))
|
|
|
|
|
|
|
|
def test_lazycache_provide_after_failed_lookup(self):
|
|
|
|
linecache.clearcache()
|
|
|
|
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
|
|
|
|
linecache.clearcache()
|
|
|
|
linecache.getlines(NONEXISTENT_FILENAME)
|
|
|
|
linecache.lazycache(NONEXISTENT_FILENAME, globals())
|
|
|
|
self.assertEqual(lines, linecache.updatecache(NONEXISTENT_FILENAME))
|
|
|
|
|
|
|
|
def test_lazycache_check(self):
|
|
|
|
linecache.clearcache()
|
|
|
|
linecache.lazycache(NONEXISTENT_FILENAME, globals())
|
|
|
|
linecache.checkcache()
|
|
|
|
|
|
|
|
def test_lazycache_bad_filename(self):
|
|
|
|
linecache.clearcache()
|
|
|
|
self.assertEqual(False, linecache.lazycache('', globals()))
|
|
|
|
self.assertEqual(False, linecache.lazycache('<foo>', globals()))
|
|
|
|
|
|
|
|
def test_lazycache_already_cached(self):
|
|
|
|
linecache.clearcache()
|
|
|
|
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
|
|
|
|
self.assertEqual(
|
|
|
|
False,
|
|
|
|
linecache.lazycache(NONEXISTENT_FILENAME, globals()))
|
|
|
|
self.assertEqual(4, len(linecache.cache[NONEXISTENT_FILENAME]))
|
|
|
|
|
2015-04-01 16:54:05 +03:00
|
|
|
def test_memoryerror(self):
|
|
|
|
lines = linecache.getlines(FILENAME)
|
|
|
|
self.assertTrue(lines)
|
|
|
|
def raise_memoryerror(*args, **kwargs):
|
|
|
|
raise MemoryError
|
|
|
|
with support.swap_attr(linecache, 'updatecache', raise_memoryerror):
|
|
|
|
lines2 = linecache.getlines(FILENAME)
|
|
|
|
self.assertEqual(lines2, lines)
|
|
|
|
|
|
|
|
linecache.clearcache()
|
|
|
|
with support.swap_attr(linecache, 'updatecache', raise_memoryerror):
|
|
|
|
lines3 = linecache.getlines(FILENAME)
|
|
|
|
self.assertEqual(lines3, [])
|
|
|
|
self.assertEqual(linecache.getlines(FILENAME), lines)
|
2015-03-05 12:07:57 +13:00
|
|
|
|
2024-02-20 11:47:41 -05:00
|
|
|
def test_loader(self):
|
|
|
|
filename = 'scheme://path'
|
|
|
|
|
|
|
|
for loader in (None, object(), NoSourceLoader()):
|
|
|
|
linecache.clearcache()
|
|
|
|
module_globals = {'__name__': 'a.b.c', '__loader__': loader}
|
|
|
|
self.assertEqual(linecache.getlines(filename, module_globals), [])
|
|
|
|
|
|
|
|
linecache.clearcache()
|
|
|
|
module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader()}
|
|
|
|
self.assertEqual(linecache.getlines(filename, module_globals),
|
|
|
|
['source for a.b.c\n'])
|
|
|
|
|
|
|
|
for spec in (None, object(), ModuleSpec('', FakeLoader())):
|
|
|
|
linecache.clearcache()
|
|
|
|
module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader(),
|
|
|
|
'__spec__': spec}
|
|
|
|
self.assertEqual(linecache.getlines(filename, module_globals),
|
|
|
|
['source for a.b.c\n'])
|
|
|
|
|
|
|
|
linecache.clearcache()
|
|
|
|
spec = ModuleSpec('x.y.z', FakeLoader())
|
|
|
|
module_globals = {'__name__': 'a.b.c', '__loader__': spec.loader,
|
|
|
|
'__spec__': spec}
|
|
|
|
self.assertEqual(linecache.getlines(filename, module_globals),
|
|
|
|
['source for x.y.z\n'])
|
|
|
|
|
2025-04-02 19:50:01 -04:00
|
|
|
def test_frozen(self):
|
|
|
|
filename = '<frozen fakemodule>'
|
|
|
|
module_globals = {'__file__': FILENAME}
|
|
|
|
empty = linecache.getlines(filename)
|
|
|
|
self.assertEqual(empty, [])
|
|
|
|
lines = linecache.getlines(filename, module_globals)
|
|
|
|
self.assertGreater(len(lines), 0)
|
|
|
|
lines_cached = linecache.getlines(filename)
|
|
|
|
self.assertEqual(lines, lines_cached)
|
|
|
|
linecache.clearcache()
|
|
|
|
empty = linecache.getlines(filename)
|
|
|
|
self.assertEqual(empty, [])
|
|
|
|
|
2024-07-27 12:10:42 +02:00
|
|
|
def test_invalid_names(self):
|
|
|
|
for name, desc in [
|
|
|
|
('\x00', 'NUL bytes filename'),
|
|
|
|
(__file__ + '\x00', 'filename with embedded NUL bytes'),
|
|
|
|
# A filename with surrogate codes. A UnicodeEncodeError is raised
|
|
|
|
# by os.stat() upon querying, which is a subclass of ValueError.
|
|
|
|
("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'),
|
|
|
|
# For POSIX platforms, an OSError will be raised but for Windows
|
|
|
|
# platforms, a ValueError is raised due to the path_t converter.
|
|
|
|
# See: https://github.com/python/cpython/issues/122170
|
|
|
|
('a' * 1_000_000, 'very long filename'),
|
|
|
|
]:
|
|
|
|
with self.subTest(f'updatecache: {desc}'):
|
|
|
|
linecache.clearcache()
|
|
|
|
lines = linecache.updatecache(name)
|
|
|
|
self.assertListEqual(lines, [])
|
|
|
|
self.assertNotIn(name, linecache.cache)
|
|
|
|
|
|
|
|
# hack into the cache (it shouldn't be allowed
|
|
|
|
# but we never know what people do...)
|
|
|
|
for key, fullname in [(name, 'ok'), ('key', name), (name, name)]:
|
|
|
|
with self.subTest(f'checkcache: {desc}',
|
|
|
|
key=key, fullname=fullname):
|
|
|
|
linecache.clearcache()
|
|
|
|
linecache.cache[key] = (0, 1234, [], fullname)
|
|
|
|
linecache.checkcache(key)
|
|
|
|
self.assertNotIn(key, linecache.cache)
|
|
|
|
|
|
|
|
# just to be sure that we did not mess with cache
|
|
|
|
linecache.clearcache()
|
|
|
|
|
2025-03-10 21:54:05 +00:00
|
|
|
def test_linecache_python_string(self):
|
|
|
|
cmdline = "import linecache;assert len(linecache.cache) == 0"
|
|
|
|
retcode, stdout, stderr = assert_python_ok('-c', cmdline)
|
|
|
|
self.assertEqual(retcode, 0)
|
|
|
|
self.assertEqual(stdout, b'')
|
|
|
|
self.assertEqual(stderr, b'')
|
2009-05-05 08:31:54 +00:00
|
|
|
|
2021-05-18 17:56:52 +09:00
|
|
|
class LineCacheInvalidationTests(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
super().setUp()
|
|
|
|
linecache.clearcache()
|
|
|
|
self.deleted_file = os_helper.TESTFN + '.1'
|
|
|
|
self.modified_file = os_helper.TESTFN + '.2'
|
|
|
|
self.unchanged_file = os_helper.TESTFN + '.3'
|
|
|
|
|
|
|
|
for fname in (self.deleted_file,
|
|
|
|
self.modified_file,
|
|
|
|
self.unchanged_file):
|
|
|
|
self.addCleanup(os_helper.unlink, fname)
|
|
|
|
with open(fname, 'w', encoding='utf-8') as source:
|
|
|
|
source.write(f'print("I am {fname}")')
|
|
|
|
|
|
|
|
self.assertNotIn(fname, linecache.cache)
|
|
|
|
linecache.getlines(fname)
|
|
|
|
self.assertIn(fname, linecache.cache)
|
|
|
|
|
|
|
|
os.remove(self.deleted_file)
|
|
|
|
with open(self.modified_file, 'w', encoding='utf-8') as source:
|
|
|
|
source.write('print("was modified")')
|
|
|
|
|
|
|
|
def test_checkcache_for_deleted_file(self):
|
|
|
|
linecache.checkcache(self.deleted_file)
|
|
|
|
self.assertNotIn(self.deleted_file, linecache.cache)
|
|
|
|
self.assertIn(self.modified_file, linecache.cache)
|
|
|
|
self.assertIn(self.unchanged_file, linecache.cache)
|
|
|
|
|
|
|
|
def test_checkcache_for_modified_file(self):
|
|
|
|
linecache.checkcache(self.modified_file)
|
|
|
|
self.assertIn(self.deleted_file, linecache.cache)
|
|
|
|
self.assertNotIn(self.modified_file, linecache.cache)
|
|
|
|
self.assertIn(self.unchanged_file, linecache.cache)
|
|
|
|
|
|
|
|
def test_checkcache_with_no_parameter(self):
|
|
|
|
linecache.checkcache()
|
|
|
|
self.assertNotIn(self.deleted_file, linecache.cache)
|
|
|
|
self.assertNotIn(self.modified_file, linecache.cache)
|
|
|
|
self.assertIn(self.unchanged_file, linecache.cache)
|
|
|
|
|
|
|
|
|
2025-05-09 08:45:16 +02:00
|
|
|
class MultiThreadingTest(unittest.TestCase):
|
|
|
|
@threading_helper.reap_threads
|
|
|
|
@threading_helper.requires_working_threading()
|
|
|
|
def test_read_write_safety(self):
|
|
|
|
|
|
|
|
with tempfile.TemporaryDirectory() as tmpdirname:
|
|
|
|
filenames = []
|
|
|
|
for i in range(10):
|
|
|
|
name = os.path.join(tmpdirname, f"test_{i}.py")
|
|
|
|
with open(name, "w") as h:
|
|
|
|
h.write("import time\n")
|
|
|
|
h.write("import system\n")
|
|
|
|
filenames.append(name)
|
|
|
|
|
|
|
|
def linecache_get_line(b):
|
|
|
|
b.wait()
|
|
|
|
for _ in range(100):
|
|
|
|
for name in filenames:
|
|
|
|
linecache.getline(name, 1)
|
|
|
|
|
|
|
|
def check(funcs):
|
|
|
|
barrier = threading.Barrier(len(funcs))
|
|
|
|
threads = []
|
|
|
|
|
|
|
|
for func in funcs:
|
|
|
|
thread = threading.Thread(target=func, args=(barrier,))
|
|
|
|
|
|
|
|
threads.append(thread)
|
|
|
|
|
|
|
|
with threading_helper.start_threads(threads):
|
|
|
|
pass
|
|
|
|
|
|
|
|
check([linecache_get_line] * 20)
|
|
|
|
|
|
|
|
|
2009-05-05 08:31:54 +00:00
|
|
|
if __name__ == "__main__":
|
2015-04-01 16:54:05 +03:00
|
|
|
unittest.main()
|