Remove obsolete stuffs
This commit is contained in:
43
bin/python/python-3.13/Lib/test/test_tools/__init__.py
Normal file
43
bin/python/python-3.13/Lib/test/test_tools/__init__.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Support functions for testing scripts in the Tools directory."""
|
||||
import contextlib
|
||||
import importlib
|
||||
import os.path
|
||||
import unittest
|
||||
from test import support
|
||||
from test.support import import_helper
|
||||
|
||||
|
||||
if not support.has_subprocess_support:
|
||||
raise unittest.SkipTest("test module requires subprocess")
|
||||
|
||||
|
||||
basepath = os.path.normpath(
|
||||
os.path.dirname( # <src/install dir>
|
||||
os.path.dirname( # Lib
|
||||
os.path.dirname( # test
|
||||
os.path.dirname(__file__))))) # test_tools
|
||||
|
||||
toolsdir = os.path.join(basepath, 'Tools')
|
||||
scriptsdir = os.path.join(toolsdir, 'scripts')
|
||||
|
||||
def skip_if_missing(tool=None):
|
||||
if tool:
|
||||
tooldir = os.path.join(toolsdir, tool)
|
||||
else:
|
||||
tool = 'scripts'
|
||||
tooldir = scriptsdir
|
||||
if not os.path.isdir(tooldir):
|
||||
raise unittest.SkipTest(f'{tool} directory could not be found')
|
||||
|
||||
@contextlib.contextmanager
|
||||
def imports_under_tool(name, *subdirs):
|
||||
tooldir = os.path.join(toolsdir, name, *subdirs)
|
||||
with import_helper.DirsOnSysPath(tooldir) as cm:
|
||||
yield cm
|
||||
|
||||
def import_tool(toolname):
|
||||
with import_helper.DirsOnSysPath(scriptsdir):
|
||||
return importlib.import_module(toolname)
|
||||
|
||||
def load_tests(*args):
|
||||
return support.load_package_tests(os.path.dirname(__file__), *args)
|
||||
4
bin/python/python-3.13/Lib/test/test_tools/__main__.py
Normal file
4
bin/python/python-3.13/Lib/test/test_tools/__main__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from test.test_tools import load_tests
|
||||
import unittest
|
||||
|
||||
unittest.main()
|
||||
37
bin/python/python-3.13/Lib/test/test_tools/test_freeze.py
Normal file
37
bin/python/python-3.13/Lib/test/test_tools/test_freeze.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Sanity-check tests for the "freeze" tool."""
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
import unittest
|
||||
|
||||
from test import support
|
||||
from test.support import os_helper
|
||||
from test.test_tools import imports_under_tool, skip_if_missing
|
||||
|
||||
skip_if_missing('freeze')
|
||||
with imports_under_tool('freeze', 'test'):
|
||||
import freeze as helper
|
||||
|
||||
@support.requires_zlib()
|
||||
@unittest.skipIf(sys.platform.startswith('win'), 'not supported on Windows')
|
||||
@unittest.skipIf(sys.platform == 'darwin' and sys._framework,
|
||||
'not supported for frameworks builds on macOS')
|
||||
@support.skip_if_buildbot('not all buildbots have enough space')
|
||||
# gh-103053: Skip test if Python is built with Profile Guided Optimization
|
||||
# (PGO), since the test is just too slow in this case.
|
||||
@unittest.skipIf(support.check_cflags_pgo(),
|
||||
'test is too slow with PGO')
|
||||
class TestFreeze(unittest.TestCase):
|
||||
|
||||
@support.requires_resource('cpu') # Building Python is slow
|
||||
def test_freeze_simple_script(self):
|
||||
script = textwrap.dedent("""
|
||||
import sys
|
||||
print('running...')
|
||||
sys.exit(0)
|
||||
""")
|
||||
with os_helper.temp_dir() as outdir:
|
||||
outdir, scriptfile, python = helper.prepare(script, outdir)
|
||||
executable = helper.freeze(python, scriptfile, outdir)
|
||||
text = helper.run(executable)
|
||||
self.assertEqual(text, 'running...')
|
||||
338
bin/python/python-3.13/Lib/test/test_tools/test_i18n.py
Normal file
338
bin/python/python-3.13/Lib/test/test_tools/test_i18n.py
Normal file
@@ -0,0 +1,338 @@
|
||||
"""Tests to cover the Tools/i18n package"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from textwrap import dedent
|
||||
|
||||
from test.support.script_helper import assert_python_ok
|
||||
from test.test_tools import skip_if_missing, toolsdir
|
||||
from test.support.os_helper import temp_cwd, temp_dir
|
||||
|
||||
|
||||
skip_if_missing()
|
||||
|
||||
|
||||
class Test_pygettext(unittest.TestCase):
|
||||
"""Tests for the pygettext.py tool"""
|
||||
|
||||
script = os.path.join(toolsdir,'i18n', 'pygettext.py')
|
||||
|
||||
def get_header(self, data):
|
||||
""" utility: return the header of a .po file as a dictionary """
|
||||
headers = {}
|
||||
for line in data.split('\n'):
|
||||
if not line or line.startswith(('#', 'msgid','msgstr')):
|
||||
continue
|
||||
line = line.strip('"')
|
||||
key, val = line.split(':',1)
|
||||
headers[key] = val.strip()
|
||||
return headers
|
||||
|
||||
def get_msgids(self, data):
|
||||
""" utility: return all msgids in .po file as a list of strings """
|
||||
msgids = []
|
||||
reading_msgid = False
|
||||
cur_msgid = []
|
||||
for line in data.split('\n'):
|
||||
if reading_msgid:
|
||||
if line.startswith('"'):
|
||||
cur_msgid.append(line.strip('"'))
|
||||
else:
|
||||
msgids.append('\n'.join(cur_msgid))
|
||||
cur_msgid = []
|
||||
reading_msgid = False
|
||||
continue
|
||||
if line.startswith('msgid '):
|
||||
line = line[len('msgid '):]
|
||||
cur_msgid.append(line.strip('"'))
|
||||
reading_msgid = True
|
||||
else:
|
||||
if reading_msgid:
|
||||
msgids.append('\n'.join(cur_msgid))
|
||||
|
||||
return msgids
|
||||
|
||||
def extract_docstrings_from_str(self, module_content):
|
||||
""" utility: return all msgids extracted from module_content """
|
||||
filename = 'test_docstrings.py'
|
||||
with temp_cwd(None) as cwd:
|
||||
with open(filename, 'w', encoding='utf-8') as fp:
|
||||
fp.write(module_content)
|
||||
assert_python_ok(self.script, '-D', filename)
|
||||
with open('messages.pot', encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
return self.get_msgids(data)
|
||||
|
||||
def test_header(self):
|
||||
"""Make sure the required fields are in the header, according to:
|
||||
http://www.gnu.org/software/gettext/manual/gettext.html#Header-Entry
|
||||
"""
|
||||
with temp_cwd(None) as cwd:
|
||||
assert_python_ok(self.script)
|
||||
with open('messages.pot', encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
header = self.get_header(data)
|
||||
|
||||
self.assertIn("Project-Id-Version", header)
|
||||
self.assertIn("POT-Creation-Date", header)
|
||||
self.assertIn("PO-Revision-Date", header)
|
||||
self.assertIn("Last-Translator", header)
|
||||
self.assertIn("Language-Team", header)
|
||||
self.assertIn("MIME-Version", header)
|
||||
self.assertIn("Content-Type", header)
|
||||
self.assertIn("Content-Transfer-Encoding", header)
|
||||
self.assertIn("Generated-By", header)
|
||||
|
||||
# not clear if these should be required in POT (template) files
|
||||
#self.assertIn("Report-Msgid-Bugs-To", header)
|
||||
#self.assertIn("Language", header)
|
||||
|
||||
#"Plural-Forms" is optional
|
||||
|
||||
@unittest.skipIf(sys.platform.startswith('aix'),
|
||||
'bpo-29972: broken test on AIX')
|
||||
def test_POT_Creation_Date(self):
|
||||
""" Match the date format from xgettext for POT-Creation-Date """
|
||||
from datetime import datetime
|
||||
with temp_cwd(None) as cwd:
|
||||
assert_python_ok(self.script)
|
||||
with open('messages.pot', encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
header = self.get_header(data)
|
||||
creationDate = header['POT-Creation-Date']
|
||||
|
||||
# peel off the escaped newline at the end of string
|
||||
if creationDate.endswith('\\n'):
|
||||
creationDate = creationDate[:-len('\\n')]
|
||||
|
||||
# This will raise if the date format does not exactly match.
|
||||
datetime.strptime(creationDate, '%Y-%m-%d %H:%M%z')
|
||||
|
||||
def test_funcdocstring(self):
|
||||
for doc in ('"""doc"""', "r'''doc'''", "R'doc'", 'u"doc"'):
|
||||
with self.subTest(doc):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar):
|
||||
%s
|
||||
''' % doc))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_funcdocstring_bytes(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar):
|
||||
b"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_funcdocstring_fstring(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar):
|
||||
f"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_classdocstring(self):
|
||||
for doc in ('"""doc"""', "r'''doc'''", "R'doc'", 'u"doc"'):
|
||||
with self.subTest(doc):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
class C:
|
||||
%s
|
||||
''' % doc))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_classdocstring_bytes(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
class C:
|
||||
b"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_classdocstring_fstring(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
class C:
|
||||
f"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_moduledocstring(self):
|
||||
for doc in ('"""doc"""', "r'''doc'''", "R'doc'", 'u"doc"'):
|
||||
with self.subTest(doc):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
%s
|
||||
''' % doc))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_moduledocstring_bytes(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
b"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_moduledocstring_fstring(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"""doc"""
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_msgid(self):
|
||||
msgids = self.extract_docstrings_from_str(
|
||||
'''_("""doc""" r'str' u"ing")''')
|
||||
self.assertIn('docstring', msgids)
|
||||
|
||||
def test_msgid_bytes(self):
|
||||
msgids = self.extract_docstrings_from_str('_(b"""doc""")')
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_msgid_fstring(self):
|
||||
msgids = self.extract_docstrings_from_str('_(f"""doc""")')
|
||||
self.assertFalse([msgid for msgid in msgids if 'doc' in msgid])
|
||||
|
||||
def test_funcdocstring_annotated_args(self):
|
||||
""" Test docstrings for functions with annotated args """
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar: str):
|
||||
"""doc"""
|
||||
'''))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_funcdocstring_annotated_return(self):
|
||||
""" Test docstrings for functions with annotated return type """
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar) -> str:
|
||||
"""doc"""
|
||||
'''))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_funcdocstring_defvalue_args(self):
|
||||
""" Test docstring for functions with default arg values """
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo(bar=()):
|
||||
"""doc"""
|
||||
'''))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_funcdocstring_multiple_funcs(self):
|
||||
""" Test docstring extraction for multiple functions combining
|
||||
annotated args, annotated return types and default arg values
|
||||
"""
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
def foo1(bar: tuple=()) -> str:
|
||||
"""doc1"""
|
||||
|
||||
def foo2(bar: List[1:2]) -> (lambda x: x):
|
||||
"""doc2"""
|
||||
|
||||
def foo3(bar: 'func'=lambda x: x) -> {1: 2}:
|
||||
"""doc3"""
|
||||
'''))
|
||||
self.assertIn('doc1', msgids)
|
||||
self.assertIn('doc2', msgids)
|
||||
self.assertIn('doc3', msgids)
|
||||
|
||||
def test_classdocstring_early_colon(self):
|
||||
""" Test docstring extraction for a class with colons occurring within
|
||||
the parentheses.
|
||||
"""
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
class D(L[1:2], F({1: 2}), metaclass=M(lambda x: x)):
|
||||
"""doc"""
|
||||
'''))
|
||||
self.assertIn('doc', msgids)
|
||||
|
||||
def test_calls_in_fstrings(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_('foo bar')}"
|
||||
'''))
|
||||
self.assertIn('foo bar', msgids)
|
||||
|
||||
def test_calls_in_fstrings_raw(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
rf"{_('foo bar')}"
|
||||
'''))
|
||||
self.assertIn('foo bar', msgids)
|
||||
|
||||
def test_calls_in_fstrings_nested(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"""{f'{_("foo bar")}'}"""
|
||||
'''))
|
||||
self.assertIn('foo bar', msgids)
|
||||
|
||||
def test_calls_in_fstrings_attribute(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{obj._('foo bar')}"
|
||||
'''))
|
||||
self.assertIn('foo bar', msgids)
|
||||
|
||||
def test_calls_in_fstrings_with_call_on_call(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{type(str)('foo bar')}"
|
||||
'''))
|
||||
self.assertNotIn('foo bar', msgids)
|
||||
|
||||
def test_calls_in_fstrings_with_format(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_('foo {bar}').format(bar='baz')}"
|
||||
'''))
|
||||
self.assertIn('foo {bar}', msgids)
|
||||
|
||||
def test_calls_in_fstrings_with_wrong_input_1(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_(f'foo {bar}')}"
|
||||
'''))
|
||||
self.assertFalse([msgid for msgid in msgids if 'foo {bar}' in msgid])
|
||||
|
||||
def test_calls_in_fstrings_with_wrong_input_2(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_(1)}"
|
||||
'''))
|
||||
self.assertNotIn(1, msgids)
|
||||
|
||||
def test_calls_in_fstring_with_multiple_args(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_('foo', 'bar')}"
|
||||
'''))
|
||||
self.assertNotIn('foo', msgids)
|
||||
self.assertNotIn('bar', msgids)
|
||||
|
||||
def test_calls_in_fstring_with_keyword_args(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_('foo', bar='baz')}"
|
||||
'''))
|
||||
self.assertNotIn('foo', msgids)
|
||||
self.assertNotIn('bar', msgids)
|
||||
self.assertNotIn('baz', msgids)
|
||||
|
||||
def test_calls_in_fstring_with_partially_wrong_expression(self):
|
||||
msgids = self.extract_docstrings_from_str(dedent('''\
|
||||
f"{_(f'foo') + _('bar')}"
|
||||
'''))
|
||||
self.assertNotIn('foo', msgids)
|
||||
self.assertIn('bar', msgids)
|
||||
|
||||
def test_files_list(self):
|
||||
"""Make sure the directories are inspected for source files
|
||||
bpo-31920
|
||||
"""
|
||||
text1 = 'Text to translate1'
|
||||
text2 = 'Text to translate2'
|
||||
text3 = 'Text to ignore'
|
||||
with temp_cwd(None), temp_dir(None) as sdir:
|
||||
os.mkdir(os.path.join(sdir, 'pypkg'))
|
||||
with open(os.path.join(sdir, 'pypkg', 'pymod.py'), 'w',
|
||||
encoding='utf-8') as sfile:
|
||||
sfile.write(f'_({text1!r})')
|
||||
os.mkdir(os.path.join(sdir, 'pkg.py'))
|
||||
with open(os.path.join(sdir, 'pkg.py', 'pymod2.py'), 'w',
|
||||
encoding='utf-8') as sfile:
|
||||
sfile.write(f'_({text2!r})')
|
||||
os.mkdir(os.path.join(sdir, 'CVS'))
|
||||
with open(os.path.join(sdir, 'CVS', 'pymod3.py'), 'w',
|
||||
encoding='utf-8') as sfile:
|
||||
sfile.write(f'_({text3!r})')
|
||||
assert_python_ok(self.script, sdir)
|
||||
with open('messages.pot', encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
self.assertIn(f'msgid "{text1}"', data)
|
||||
self.assertIn(f'msgid "{text2}"', data)
|
||||
self.assertNotIn(text3, data)
|
||||
78
bin/python/python-3.13/Lib/test/test_tools/test_makefile.py
Normal file
78
bin/python/python-3.13/Lib/test/test_tools/test_makefile.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""
|
||||
Tests for `Makefile`.
|
||||
"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from test import support
|
||||
import sysconfig
|
||||
|
||||
MAKEFILE = sysconfig.get_makefile_filename()
|
||||
|
||||
if not support.check_impl_detail(cpython=True):
|
||||
raise unittest.SkipTest('cpython only')
|
||||
if not os.path.exists(MAKEFILE) or not os.path.isfile(MAKEFILE):
|
||||
raise unittest.SkipTest('Makefile could not be found')
|
||||
|
||||
|
||||
class TestMakefile(unittest.TestCase):
|
||||
def list_test_dirs(self):
|
||||
result = []
|
||||
found_testsubdirs = False
|
||||
with open(MAKEFILE, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
if line.startswith('TESTSUBDIRS='):
|
||||
found_testsubdirs = True
|
||||
result.append(
|
||||
line.removeprefix('TESTSUBDIRS=').replace(
|
||||
'\\', '',
|
||||
).strip(),
|
||||
)
|
||||
continue
|
||||
if found_testsubdirs:
|
||||
if '\t' not in line:
|
||||
break
|
||||
result.append(line.replace('\\', '').strip())
|
||||
return result
|
||||
|
||||
@unittest.skipUnless(support.TEST_MODULES_ENABLED, "requires test modules")
|
||||
def test_makefile_test_folders(self):
|
||||
test_dirs = self.list_test_dirs()
|
||||
idle_test = 'idlelib/idle_test'
|
||||
self.assertIn(idle_test, test_dirs)
|
||||
|
||||
used = set([idle_test])
|
||||
for dirpath, dirs, files in os.walk(support.TEST_HOME_DIR):
|
||||
dirname = os.path.basename(dirpath)
|
||||
# Skip temporary dirs:
|
||||
if dirname == '__pycache__' or dirname.startswith('.'):
|
||||
dirs.clear() # do not process subfolders
|
||||
continue
|
||||
# Skip empty dirs:
|
||||
if not dirs and not files:
|
||||
continue
|
||||
# Skip dirs with hidden-only files:
|
||||
if files and all(filename.startswith('.') for filename in files):
|
||||
continue
|
||||
|
||||
relpath = os.path.relpath(dirpath, support.STDLIB_DIR)
|
||||
with self.subTest(relpath=relpath):
|
||||
self.assertIn(
|
||||
relpath,
|
||||
test_dirs,
|
||||
msg=(
|
||||
f"{relpath!r} is not included in the Makefile's list "
|
||||
"of test directories to install"
|
||||
)
|
||||
)
|
||||
used.add(relpath)
|
||||
|
||||
# Don't check the wheel dir when Python is built --with-wheel-pkg-dir
|
||||
if sysconfig.get_config_var('WHEEL_PKG_DIR'):
|
||||
test_dirs.remove('test/wheeldata')
|
||||
used.discard('test/wheeldata')
|
||||
|
||||
# Check that there are no extra entries:
|
||||
unique_test_dirs = set(test_dirs)
|
||||
self.assertSetEqual(unique_test_dirs, used)
|
||||
self.assertEqual(len(test_dirs), len(unique_test_dirs))
|
||||
@@ -0,0 +1,122 @@
|
||||
import unittest
|
||||
from test.test_tools import skip_if_missing, imports_under_tool
|
||||
from test import support
|
||||
from test.support.hypothesis_helper import hypothesis
|
||||
|
||||
st = hypothesis.strategies
|
||||
given = hypothesis.given
|
||||
example = hypothesis.example
|
||||
|
||||
|
||||
skip_if_missing("unicode")
|
||||
with imports_under_tool("unicode"):
|
||||
from dawg import Dawg, build_compression_dawg, lookup, inverse_lookup
|
||||
|
||||
|
||||
@st.composite
|
||||
def char_name_db(draw, min_length=1, max_length=30):
|
||||
m = draw(st.integers(min_value=min_length, max_value=max_length))
|
||||
names = draw(
|
||||
st.sets(st.text("abcd", min_size=1, max_size=10), min_size=m, max_size=m)
|
||||
)
|
||||
characters = draw(st.sets(st.characters(), min_size=m, max_size=m))
|
||||
return list(zip(names, characters))
|
||||
|
||||
|
||||
class TestDawg(unittest.TestCase):
|
||||
"""Tests for the directed acyclic word graph data structure that is used
|
||||
to store the unicode character names in unicodedata. Tests ported from PyPy
|
||||
"""
|
||||
|
||||
def test_dawg_direct_simple(self):
|
||||
dawg = Dawg()
|
||||
dawg.insert("a", -4)
|
||||
dawg.insert("c", -2)
|
||||
dawg.insert("cat", -1)
|
||||
dawg.insert("catarr", 0)
|
||||
dawg.insert("catnip", 1)
|
||||
dawg.insert("zcatnip", 5)
|
||||
packed, data, inverse = dawg.finish()
|
||||
|
||||
self.assertEqual(lookup(packed, data, b"a"), -4)
|
||||
self.assertEqual(lookup(packed, data, b"c"), -2)
|
||||
self.assertEqual(lookup(packed, data, b"cat"), -1)
|
||||
self.assertEqual(lookup(packed, data, b"catarr"), 0)
|
||||
self.assertEqual(lookup(packed, data, b"catnip"), 1)
|
||||
self.assertEqual(lookup(packed, data, b"zcatnip"), 5)
|
||||
self.assertRaises(KeyError, lookup, packed, data, b"b")
|
||||
self.assertRaises(KeyError, lookup, packed, data, b"catni")
|
||||
self.assertRaises(KeyError, lookup, packed, data, b"catnipp")
|
||||
|
||||
self.assertEqual(inverse_lookup(packed, inverse, -4), b"a")
|
||||
self.assertEqual(inverse_lookup(packed, inverse, -2), b"c")
|
||||
self.assertEqual(inverse_lookup(packed, inverse, -1), b"cat")
|
||||
self.assertEqual(inverse_lookup(packed, inverse, 0), b"catarr")
|
||||
self.assertEqual(inverse_lookup(packed, inverse, 1), b"catnip")
|
||||
self.assertEqual(inverse_lookup(packed, inverse, 5), b"zcatnip")
|
||||
self.assertRaises(KeyError, inverse_lookup, packed, inverse, 12)
|
||||
|
||||
def test_forbid_empty_dawg(self):
|
||||
dawg = Dawg()
|
||||
self.assertRaises(ValueError, dawg.finish)
|
||||
|
||||
@given(char_name_db())
|
||||
@example([("abc", "a"), ("abd", "b")])
|
||||
@example(
|
||||
[
|
||||
("bab", "1"),
|
||||
("a", ":"),
|
||||
("ad", "@"),
|
||||
("b", "<"),
|
||||
("aacc", "?"),
|
||||
("dab", "D"),
|
||||
("aa", "0"),
|
||||
("ab", "F"),
|
||||
("aaa", "7"),
|
||||
("cbd", "="),
|
||||
("abad", ";"),
|
||||
("ac", "B"),
|
||||
("abb", "4"),
|
||||
("bb", "2"),
|
||||
("aab", "9"),
|
||||
("caaaaba", "E"),
|
||||
("ca", ">"),
|
||||
("bbaaa", "5"),
|
||||
("d", "3"),
|
||||
("baac", "8"),
|
||||
("c", "6"),
|
||||
("ba", "A"),
|
||||
]
|
||||
)
|
||||
@example(
|
||||
[
|
||||
("bcdac", "9"),
|
||||
("acc", "g"),
|
||||
("d", "d"),
|
||||
("daabdda", "0"),
|
||||
("aba", ";"),
|
||||
("c", "6"),
|
||||
("aa", "7"),
|
||||
("abbd", "c"),
|
||||
("badbd", "?"),
|
||||
("bbd", "f"),
|
||||
("cc", "@"),
|
||||
("bb", "8"),
|
||||
("daca", ">"),
|
||||
("ba", ":"),
|
||||
("baac", "3"),
|
||||
("dbdddac", "a"),
|
||||
("a", "2"),
|
||||
("cabd", "b"),
|
||||
("b", "="),
|
||||
("abd", "4"),
|
||||
("adcbd", "5"),
|
||||
("abc", "e"),
|
||||
("ab", "1"),
|
||||
]
|
||||
)
|
||||
def test_dawg(self, data):
|
||||
# suppress debug prints
|
||||
with support.captured_stdout() as output:
|
||||
# it's enough to build it, building will also check the result
|
||||
build_compression_dawg(data)
|
||||
35
bin/python/python-3.13/Lib/test/test_tools/test_reindent.py
Normal file
35
bin/python/python-3.13/Lib/test/test_tools/test_reindent.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Tests for scripts in the Tools directory.
|
||||
|
||||
This file contains regression tests for some of the scripts found in the
|
||||
Tools directory of a Python checkout or tarball, such as reindent.py.
|
||||
"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from test.support.script_helper import assert_python_ok
|
||||
from test.support import findfile
|
||||
|
||||
from test.test_tools import toolsdir, skip_if_missing
|
||||
|
||||
skip_if_missing()
|
||||
|
||||
class ReindentTests(unittest.TestCase):
|
||||
script = os.path.join(toolsdir, 'patchcheck', 'reindent.py')
|
||||
|
||||
def test_noargs(self):
|
||||
assert_python_ok(self.script)
|
||||
|
||||
def test_help(self):
|
||||
rc, out, err = assert_python_ok(self.script, '-h')
|
||||
self.assertEqual(out, b'')
|
||||
self.assertGreater(err, b'')
|
||||
|
||||
def test_reindent_file_with_bad_encoding(self):
|
||||
bad_coding_path = findfile('bad_coding.py', subdir='tokenizedata')
|
||||
rc, out, err = assert_python_ok(self.script, '-r', bad_coding_path)
|
||||
self.assertEqual(out, b'')
|
||||
self.assertNotEqual(err, b'')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
30
bin/python/python-3.13/Lib/test/test_tools/test_sundry.py
Normal file
30
bin/python/python-3.13/Lib/test/test_tools/test_sundry.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Tests for scripts in the Tools/scripts directory.
|
||||
|
||||
This file contains extremely basic regression tests for the scripts found in
|
||||
the Tools directory of a Python checkout or tarball which don't have separate
|
||||
tests of their own.
|
||||
"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from test.support import import_helper
|
||||
|
||||
from test.test_tools import scriptsdir, import_tool, skip_if_missing
|
||||
|
||||
skip_if_missing()
|
||||
|
||||
class TestSundryScripts(unittest.TestCase):
|
||||
# import logging registers "atfork" functions which keep indirectly the
|
||||
# logging module dictionary alive. Mock the function to be able to unload
|
||||
# cleanly the logging module.
|
||||
@import_helper.mock_register_at_fork
|
||||
def test_sundry(self, mock_os):
|
||||
for fn in os.listdir(scriptsdir):
|
||||
if not fn.endswith('.py'):
|
||||
continue
|
||||
name = fn[:-3]
|
||||
import_tool(name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user