Sign Up
Log In
Log In
or
Sign Up
Places
All Projects
Status Monitor
Collapse sidebar
devel:languages:python
python-compileall2
test_compileall2.py
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
File test_compileall2.py of Package python-compileall2
import sys import compileall2 as compileall import contextlib import importlib.util import test.test_importlib.util import marshal import os import pathlib import py_compile import shutil import struct import tempfile import time import unittest import io import functools import filecmp from unittest import mock, skipUnless from concurrent.futures import ProcessPoolExecutor try: # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists # and it can function. from concurrent.futures.process import _check_system_limits _check_system_limits() _have_multiprocessing = True except NotImplementedError: _have_multiprocessing = False from test import support try: # Python >= 3.5 from test.support import script_helper except ImportError: # Python 3.4 from test import script_helper try: # Python < 3.10 from test.support import skip_unless_symlink from test.support import unlink from test.support import rmtree from test.support import EnvironmentVarGuard except ImportError: # Python >= 3.10 from test.support.os_helper import skip_unless_symlink from test.support.os_helper import unlink from test.support.os_helper import rmtree from test.support.os_helper import EnvironmentVarGuard # Backported from subprocess/test.support module for Python <= 3.5 def _optim_args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current optimization settings in sys.flags.""" args = [] value = sys.flags.optimize if value > 0: args.append('-' + 'O' * value) return args # Backported from Lib/test/test_importlib/util.py # Added in: https://github.com/python/cpython/pull/18676/files def get_code_from_pyc(pyc_path): """Reads a pyc file and returns the unmarshalled code object within. No header validation is performed. """ with open(pyc_path, 'rb') as pyc_f: if compileall.PY37: pyc_f.seek(16) else: pyc_f.seek(12) return marshal.load(pyc_f) # Use the original one for Python > 3.5 and backported function otherwise if compileall.PY36: optim_args_from_interpreter_flags = support.optim_args_from_interpreter_flags else: optim_args_from_interpreter_flags = _optim_args_from_interpreter_flags # Backported from Lib/test/test_py_compile.py def without_source_date_epoch(fxn): """Runs function with SOURCE_DATE_EPOCH unset.""" @functools.wraps(fxn) def wrapper(*args, **kwargs): with EnvironmentVarGuard() as env: env.unset('SOURCE_DATE_EPOCH') return fxn(*args, **kwargs) return wrapper # Backported from Lib/test/test_py_compile.py def with_source_date_epoch(fxn): """Runs function with SOURCE_DATE_EPOCH set.""" @functools.wraps(fxn) def wrapper(*args, **kwargs): with EnvironmentVarGuard() as env: env['SOURCE_DATE_EPOCH'] = '123456789' return fxn(*args, **kwargs) return wrapper # Run tests with SOURCE_DATE_EPOCH set or unset explicitly. # Backported from Lib/test/test_py_compile.py class SourceDateEpochTestMeta(type(unittest.TestCase)): def __new__(mcls, name, bases, dct, *, source_date_epoch): cls = super().__new__(mcls, name, bases, dct) for attr in dir(cls): if attr.startswith('test_'): meth = getattr(cls, attr) if source_date_epoch: wrapper = with_source_date_epoch(meth) else: wrapper = without_source_date_epoch(meth) setattr(cls, attr, wrapper) return cls # This is actually noop but has to be there because Python <= 3.5 # doesn't support passing keyword arguments to type.__init__() method def __init__(mcls, name, bases, dct, source_date_epoch, **kwargs): super().__init__(name, bases, dct) class CompileallTestsBase: def setUp(self): self.directory = tempfile.mkdtemp() self.source_path = os.path.join(self.directory, '_test.py') self.bc_path = importlib.util.cache_from_source(self.source_path) with open(self.source_path, 'w', encoding="utf-8") as file: file.write('x = 123\n') self.source_path2 = os.path.join(self.directory, '_test2.py') self.bc_path2 = importlib.util.cache_from_source(self.source_path2) shutil.copyfile(self.source_path, self.source_path2) self.subdirectory = os.path.join(self.directory, '_subdir') os.mkdir(self.subdirectory) self.source_path3 = os.path.join(self.subdirectory, '_test3.py') shutil.copyfile(self.source_path, self.source_path3) def tearDown(self): shutil.rmtree(self.directory) def add_bad_source_file(self): self.bad_source_path = os.path.join(self.directory, '_test_bad.py') with open(self.bad_source_path, 'w', encoding="utf-8") as file: file.write('x (\n') def timestamp_metadata(self): with open(self.bc_path, 'rb') as file: data = file.read(compileall.pyc_header_lenght) mtime = int(os.stat(self.source_path).st_mtime) compare = struct.pack(*(compileall.pyc_header_format + (mtime & 0xFFFF_FFFF,))) return data, compare def test_year_2038_mtime_compilation(self): # Test to make sure we can handle mtimes larger than what a 32-bit # signed number can hold as part of bpo-34990 try: os.utime(self.source_path, (2**32 - 1, 2**32 - 1)) except (OverflowError, OSError): self.skipTest("filesystem doesn't support timestamps near 2**32") self.assertTrue(compileall.compile_file(self.source_path)) def test_larger_than_32_bit_times(self): # This is similar to the test above but we skip it if the OS doesn't # support modification times larger than 32-bits. try: os.utime(self.source_path, (2**35, 2**35)) except (OverflowError, OSError): self.skipTest("filesystem doesn't support large timestamps") self.assertTrue(compileall.compile_file(self.source_path)) def recreation_check(self, metadata): """Check that compileall recreates bytecode when the new metadata is used.""" if os.environ.get('SOURCE_DATE_EPOCH'): raise unittest.SkipTest('SOURCE_DATE_EPOCH is set') py_compile.compile(self.source_path) self.assertEqual(*self.timestamp_metadata()) with open(self.bc_path, 'rb') as file: bc = file.read()[len(metadata):] with open(self.bc_path, 'wb') as file: file.write(metadata) file.write(bc) self.assertNotEqual(*self.timestamp_metadata()) compileall.compile_dir(self.directory, force=False, quiet=True) self.assertTrue(*self.timestamp_metadata()) def test_mtime(self): # Test a change in mtime leads to a new .pyc. self.recreation_check(struct.pack(*(compileall.pyc_header_format + (1, )))) def test_magic_number(self): # Test a change in mtime leads to a new .pyc. self.recreation_check(b'\0\0\0\0') def test_compile_files(self): # Test compiling a single file, and complete directory for fn in (self.bc_path, self.bc_path2): try: os.unlink(fn) except: pass self.assertTrue(compileall.compile_file(self.source_path, force=False, quiet=True)) self.assertTrue(os.path.isfile(self.bc_path) and not os.path.isfile(self.bc_path2)) os.unlink(self.bc_path) self.assertTrue(compileall.compile_dir(self.directory, force=False, quiet=True)) self.assertTrue(os.path.isfile(self.bc_path) and os.path.isfile(self.bc_path2)) os.unlink(self.bc_path) os.unlink(self.bc_path2) # Test against bad files self.add_bad_source_file() self.assertFalse(compileall.compile_file(self.bad_source_path, force=False, quiet=2)) self.assertFalse(compileall.compile_dir(self.directory, force=False, quiet=2)) def test_compile_file_pathlike(self): self.assertFalse(os.path.isfile(self.bc_path)) # we should also test the output with support.captured_stdout() as stdout: self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path))) self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)') self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_ddir(self): self.assertFalse(os.path.isfile(self.bc_path)) self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), ddir=pathlib.Path('ddir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_stripdir(self): self.assertFalse(os.path.isfile(self.bc_path)) self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), stripdir=pathlib.Path('stripdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_prependdir(self): self.assertFalse(os.path.isfile(self.bc_path)) self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), prependdir=pathlib.Path('prependdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_path(self): with test.test_importlib.util.import_state(path=[self.directory]): self.assertTrue(compileall.compile_path(quiet=2)) with test.test_importlib.util.import_state(path=[self.directory]): self.add_bad_source_file() self.assertFalse(compileall.compile_path(skip_curdir=False, force=True, quiet=2)) def test_no_pycache_in_non_package(self): # Bug 8563 reported that __pycache__ directories got created by # compile_file() for non-.py files. data_dir = os.path.join(self.directory, 'data') data_file = os.path.join(data_dir, 'file') os.mkdir(data_dir) # touch data/file with open(data_file, 'wb'): pass compileall.compile_file(data_file) self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__'))) def test_compile_file_encoding_fallback(self): # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None self.add_bad_source_file() with contextlib.redirect_stdout(io.StringIO()): self.assertFalse(compileall.compile_file(self.bad_source_path)) def test_optimize(self): # make sure compiling with different optimization settings than the # interpreter's creates the correct file names optimize, opt = (1, 1) if __debug__ else (0, '') opt_kwarg = compileall.optimization_kwarg(opt) compileall.compile_dir(self.directory, quiet=True, optimize=optimize) cached = importlib.util.cache_from_source(self.source_path, **opt_kwarg) self.assertTrue(os.path.isfile(cached)) cached2 = importlib.util.cache_from_source(self.source_path2, **opt_kwarg) self.assertTrue(os.path.isfile(cached2)) cached3 = importlib.util.cache_from_source(self.source_path3, **opt_kwarg) self.assertTrue(os.path.isfile(cached3)) def test_compile_dir_pathlike(self): self.assertFalse(os.path.isfile(self.bc_path)) with support.captured_stdout() as stdout: compileall.compile_dir(pathlib.Path(self.directory)) line = stdout.getvalue().splitlines()[0] self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_dir_pathlike_stripdir(self): self.assertFalse(os.path.isfile(self.bc_path)) self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), stripdir=pathlib.Path('stripdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_dir_pathlike_prependdir(self): self.assertFalse(os.path.isfile(self.bc_path)) self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), prependdir=pathlib.Path('prependdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) @skipUnless(_have_multiprocessing, "requires multiprocessing") @mock.patch('concurrent.futures.ProcessPoolExecutor') def test_compile_pool_called(self, pool_mock): compileall.compile_dir(self.directory, quiet=True, workers=5) self.assertTrue(pool_mock.called) def test_compile_workers_non_positive(self): with self.assertRaisesRegex(ValueError, "workers must be greater or equal to 0"): compileall.compile_dir(self.directory, workers=-1) @skipUnless(_have_multiprocessing, "requires multiprocessing") @mock.patch('concurrent.futures.ProcessPoolExecutor') def test_compile_workers_cpu_count(self, pool_mock): compileall.compile_dir(self.directory, quiet=True, workers=0) self.assertEqual(pool_mock.call_args[1]['max_workers'], None) @skipUnless(_have_multiprocessing, "requires multiprocessing") @mock.patch('concurrent.futures.ProcessPoolExecutor') @mock.patch('compileall2.compile_file') def test_compile_one_worker(self, compile_file_mock, pool_mock): compileall.compile_dir(self.directory, quiet=True) self.assertFalse(pool_mock.called) self.assertTrue(compile_file_mock.called) @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None) @mock.patch('compileall2.compile_file') def test_compile_missing_multiprocessing(self, compile_file_mock): compileall.compile_dir(self.directory, quiet=True, workers=5) self.assertTrue(compile_file_mock.called) def test_compile_dir_maxlevels(self): # Test the actual impact of maxlevels parameter depth = 3 path = self.directory for i in range(1, depth + 1): path = os.path.join(path, "dir_{}".format(i)) source = os.path.join(path, 'script.py') os.mkdir(path) shutil.copyfile(self.source_path, source) pyc_filename = importlib.util.cache_from_source(source) compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1) self.assertFalse(os.path.isfile(pyc_filename)) compileall.compile_dir(self.directory, quiet=True, maxlevels=depth) self.assertTrue(os.path.isfile(pyc_filename)) def _test_ddir_only(self, *, ddir, parallel=True): """Recursive compile_dir ddir must contain package paths; bpo39769.""" fullpath = ["test", "foo"] path = self.directory mods = [] for subdir in fullpath: path = os.path.join(path, subdir) os.mkdir(path) script_helper.make_script(path, "__init__", "") mods.append(script_helper.make_script(path, "mod", "def fn(): 1/0\nfn()\n")) compileall.compile_dir( self.directory, quiet=True, ddir=ddir, workers=2 if parallel else 1) self.assertTrue(mods) for mod in mods: self.assertTrue(mod.startswith(self.directory), mod) modcode = importlib.util.cache_from_source(mod) modpath = mod[len(self.directory+os.sep):] _, _, err = script_helper.assert_python_failure(modcode) expected_in = os.path.join(ddir, modpath) mod_code_obj = get_code_from_pyc(modcode) self.assertEqual(mod_code_obj.co_filename, expected_in) self.assertIn('"{}"'.format(expected_in), os.fsdecode(err)) def test_ddir_only_one_worker(self): """Recursive compile_dir ddir= contains package paths; bpo39769.""" return self._test_ddir_only(ddir="<a prefix>", parallel=False) def test_ddir_multiple_workers(self): """Recursive compile_dir ddir= contains package paths; bpo39769.""" return self._test_ddir_only(ddir="<a prefix>", parallel=True) def test_ddir_empty_only_one_worker(self): """Recursive compile_dir ddir='' contains package paths; bpo39769.""" return self._test_ddir_only(ddir="", parallel=False) def test_ddir_empty_multiple_workers(self): """Recursive compile_dir ddir='' contains package paths; bpo39769.""" return self._test_ddir_only(ddir="", parallel=True) def test_strip_only(self): fullpath = ["test", "build", "real", "path"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) stripdir = os.path.join(self.directory, *fullpath[:2]) compileall.compile_dir(path, quiet=True, stripdir=stripdir) rc, out, err = script_helper.assert_python_failure(bc) expected_in = os.path.join(*fullpath[2:]) self.assertIn( expected_in, str(err, encoding=sys.getdefaultencoding()) ) self.assertNotIn( stripdir, str(err, encoding=sys.getdefaultencoding()) ) def test_strip_only_invalid(self): fullpath = ["test", "build", "real", "path"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) stripdir = os.path.join(self.directory, *(fullpath[:2] + ['fake'])) compileall.compile_dir(path, quiet=True, stripdir=stripdir) rc, out, err = script_helper.assert_python_failure(bc) expected_not_in = os.path.join(self.directory, *fullpath[2:]) self.assertIn( path, str(err, encoding=sys.getdefaultencoding()) ) self.assertNotIn( expected_not_in, str(err, encoding=sys.getdefaultencoding()) ) self.assertNotIn( stripdir, str(err, encoding=sys.getdefaultencoding()) ) def test_prepend_only(self): fullpath = ["test", "build", "real", "path"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) prependdir = "/foo" compileall.compile_dir(path, quiet=True, prependdir=prependdir) rc, out, err = script_helper.assert_python_failure(bc) expected_in = os.path.join(prependdir, self.directory, *fullpath) self.assertIn( expected_in, str(err, encoding=sys.getdefaultencoding()) ) def test_strip_and_prepend(self): fullpath = ["test", "build", "real", "path"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) stripdir = os.path.join(self.directory, *fullpath[:2]) prependdir = "/foo" compileall.compile_dir(path, quiet=True, stripdir=stripdir, prependdir=prependdir) rc, out, err = script_helper.assert_python_failure(bc) expected_in = os.path.join(prependdir, *fullpath[2:]) self.assertIn( expected_in, str(err, encoding=sys.getdefaultencoding()) ) self.assertNotIn( stripdir, str(err, encoding=sys.getdefaultencoding()) ) def test_strip_prepend_and_ddir(self): fullpath = ["test", "build", "real", "path", "ddir"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script_helper.make_script(path, "test", "1 / 0") with self.assertRaises(ValueError): compileall.compile_dir(path, quiet=True, ddir="/bar", stripdir="/foo", prependdir="/bar") def test_multiple_optimization_levels(self): script = script_helper.make_script(self.directory, "test_optimization", "a = 0") bc = [] for opt_level in "", 1, 2, 3: opt_kwarg = compileall.optimization_kwarg(opt_level) bc.append(importlib.util.cache_from_source(script, **opt_kwarg)) test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]] for opt_combination in test_combinations: compileall.compile_file(script, quiet=True, optimize=opt_combination) for opt_level in opt_combination: self.assertTrue(os.path.isfile(bc[opt_level])) for bc_file in bc: try: os.unlink(bc_file) except Exception: pass @skip_unless_symlink def test_ignore_symlink_destination(self): # Create folders for allowed files, symlinks and prohibited area allowed_path = os.path.join(self.directory, "test", "dir", "allowed") symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") os.makedirs(allowed_path) os.makedirs(symlinks_path) os.makedirs(prohibited_path) # Create scripts and symlinks and remember their byte-compiled versions allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") os.symlink(allowed_script, allowed_symlink) os.symlink(prohibited_script, prohibited_symlink) allowed_bc = importlib.util.cache_from_source(allowed_symlink) prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path) self.assertTrue(os.path.isfile(allowed_bc)) self.assertFalse(os.path.isfile(prohibited_bc)) def test_hardlink_deduplication_bad_args(self): # Bad arguments combination, hardlink deduplication make sense # only for more than one optimization level with self.assertRaises(ValueError): compileall.compile_dir(self.directory, quiet=True, optimize=0, hardlink_dupes=True) def test_hardlink_deduplication_same_bytecode_all_opt(self): # 'a = 0' produces the same bytecode for all optimization levels path = os.path.join(self.directory, "test", "same_all") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=True) # import pdb; pdb.set_trace() # All three files should have the same inode (hardlinks) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=False) # Deduplication disabled, all pyc files should have different inodes self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_same_bytecode_some_opt(self): # 'a = 0' produces the same bytecode for all optimization levels # only two levels of optimization [0, 1] tested path = os.path.join(self.directory, "test", "same_some") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 2], hardlink_dupes=True) # Both files should have the same inode (hardlink) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt2}: os.unlink(pyc_file) compileall.compile_dir(path, quiet=True, force=True, optimize=[0, 2], hardlink_dupes=False) # Deduplication disabled, both pyc files should have different inodes self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_same_bytecode_some_opt_2(self): # 'a = 0' produces the same bytecode for all optimization levels path = os.path.join(self.directory, "test", "same_some_2") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[1, 2], hardlink_dupes=True) # Both files should have the same inode (hardlinks) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt1, pyc_opt2}: os.unlink(pyc_file) compileall.compile_dir(path, quiet=True, optimize=[1, 2]) # Deduplication disabled, all pyc files should have different inodes if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) else: self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_different_bytecode_all_opt(self): # "'''string'''\nassert 1" produces a different bytecode for all optimization levels path = os.path.join(self.directory, "test", "different_all") os.makedirs(path) simple_script = script_helper.make_script(path, "test_different_bytecode", "'''string'''\nassert 1") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=True) # No hardlinks, bytecodes are different self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=False) # Disabling hardlink deduplication makes no difference self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_different_bytecode_one_hardlink(self): # "'''string'''\na = 1" produces the same bytecode only for level 0 and 1 path = os.path.join(self.directory, "test", "different_one") os.makedirs(path) simple_script = script_helper.make_script(path, "test_different_bytecode", "'''string'''\na = 1") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=True) # Only level 0 and 1 has the same inode, level 2 produces a different bytecode if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) else: self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=False) # Deduplication disabled, no hardlinks self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_recompilation(self): path = os.path.join(self.directory, "test", "module_change") os.makedirs(path) simple_script = script_helper.make_script(path, "module_change", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=True) # All three levels have the same inode self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) previous_inode = os.stat(pyc_opt0).st_ino # Change of the module content simple_script = script_helper.make_script(path, "module_change", "print(0)") # Recompilation without -o 1 compileall.compile_dir(path, force=True, quiet=True, optimize=[0, 2], hardlink_dupes=True) # opt-1.pyc should have the same inode as before and others should not if compileall.PY35: self.assertEqual(previous_inode, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) self.assertNotEqual(previous_inode, os.stat(pyc_opt2).st_ino) # opt-1.pyc and opt-2.pyc have different content if compileall.PY35: self.assertFalse(filecmp.cmp(pyc_opt1, pyc_opt2, shallow=True)) def test_hardlink_deduplication_import(self): path = os.path.join(self.directory, "test", "module_import") os.makedirs(path) simple_script = script_helper.make_script(path, "module", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) compileall.compile_dir(path, quiet=True, optimize=[0, 1, 2], hardlink_dupes=True) # All three levels have the same inode self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) previous_inode = os.stat(pyc_opt0).st_ino # Change of the module content simple_script = script_helper.make_script(path, "module", "print(0)") # Import the module in Python script_helper.assert_python_ok( "-O", "-c", "import module", __isolated=False, PYTHONPATH=path ) # Only opt-1.pyc is changed self.assertEqual(previous_inode, os.stat(pyc_opt0).st_ino) if compileall.PY35: self.assertEqual(previous_inode, os.stat(pyc_opt2).st_ino) self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) # opt-1.pyc and opt-2.pyc have different content if compileall.PY35: self.assertFalse(filecmp.cmp(pyc_opt1, pyc_opt2, shallow=True)) class CompileallTestsWithSourceEpoch(CompileallTestsBase, unittest.TestCase, metaclass=SourceDateEpochTestMeta, source_date_epoch=True): pass class CompileallTestsWithoutSourceEpoch(CompileallTestsBase, unittest.TestCase, metaclass=SourceDateEpochTestMeta, source_date_epoch=False): pass class EncodingTest(unittest.TestCase): """Issue 6716: compileall should escape source code when printing errors to stdout.""" def setUp(self): self.directory = tempfile.mkdtemp() self.source_path = os.path.join(self.directory, '_test.py') with open(self.source_path, 'w', encoding='utf-8') as file: file.write('# -*- coding: utf-8 -*-\n') file.write('print u"\u20ac"\n') def tearDown(self): shutil.rmtree(self.directory) def test_error(self): try: orig_stdout = sys.stdout sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii') compileall.compile_dir(self.directory) finally: sys.stdout = orig_stdout class CommandLineTestsBase: """Test compileall's CLI.""" @classmethod def setUpClass(cls): for path in filter(os.path.isdir, sys.path): directory_created = False directory = pathlib.Path(path) / '__pycache__' path = directory / 'test.try' try: if not directory.is_dir(): directory.mkdir() directory_created = True path.write_text('# for test_compileall', encoding="utf-8") except OSError: sys_path_writable = False break finally: unlink(str(path)) if directory_created: directory.rmdir() else: sys_path_writable = True cls._sys_path_writable = sys_path_writable def _skip_if_sys_path_not_writable(self): if not self._sys_path_writable: raise unittest.SkipTest('not all entries on sys.path are writable') def _get_run_args(self, args): return [o for o in optim_args_from_interpreter_flags()] + \ ['-m', 'compileall2'] + \ [a for a in args] def assertRunOK(self, *args, **env_vars): rc, out, err = script_helper.assert_python_ok( *self._get_run_args(args), __isolated=False, **env_vars) self.assertEqual(b'', err) return out def assertRunNotOK(self, *args, **env_vars): rc, out, err = script_helper.assert_python_failure( *self._get_run_args(args), __isolated=False, **env_vars) return rc, out, err def assertCompiled(self, fn): path = importlib.util.cache_from_source(fn) self.assertTrue(os.path.exists(path)) def assertNotCompiled(self, fn): path = importlib.util.cache_from_source(fn) self.assertFalse(os.path.exists(path)) def setUp(self): self.directory = tempfile.mkdtemp() self.addCleanup(rmtree, self.directory) self.pkgdir = os.path.join(self.directory, 'foo') os.mkdir(self.pkgdir) self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__') # Create the __init__.py and a package module. self.initfn = script_helper.make_script(self.pkgdir, '__init__', '') self.barfn = script_helper.make_script(self.pkgdir, 'bar', '') def test_no_args_compiles_path(self): # Note that -l is implied for the no args case. self._skip_if_sys_path_not_writable() bazfn = script_helper.make_script(self.directory, 'baz', '') self.assertRunOK(PYTHONPATH=self.directory) self.assertCompiled(bazfn) self.assertNotCompiled(self.initfn) self.assertNotCompiled(self.barfn) @without_source_date_epoch # timestamp invalidation test def test_no_args_respects_force_flag(self): self._skip_if_sys_path_not_writable() bazfn = script_helper.make_script(self.directory, 'baz', '') self.assertRunOK(PYTHONPATH=self.directory) pycpath = importlib.util.cache_from_source(bazfn) # Set atime/mtime backward to avoid file timestamp resolution issues os.utime(pycpath, (time.time()-60,)*2) mtime = os.stat(pycpath).st_mtime # Without force, no recompilation self.assertRunOK(PYTHONPATH=self.directory) mtime2 = os.stat(pycpath).st_mtime self.assertEqual(mtime, mtime2) # Now force it. self.assertRunOK('-f', PYTHONPATH=self.directory) mtime2 = os.stat(pycpath).st_mtime self.assertNotEqual(mtime, mtime2) def test_no_args_respects_quiet_flag(self): self._skip_if_sys_path_not_writable() script_helper.make_script(self.directory, 'baz', '') noisy = self.assertRunOK(PYTHONPATH=self.directory) self.assertIn(b'Listing ', noisy) quiet = self.assertRunOK('-q', PYTHONPATH=self.directory) self.assertNotIn(b'Listing ', quiet) # Ensure that the default behavior of compileall's CLI is to create # PEP 3147/PEP 488 pyc files. for name, ext, switch in [ ('normal', 'pyc', []), ('optimize', 'opt-1.pyc', ['-O']), ('doubleoptimize', 'opt-2.pyc', ['-OO']), ]: def f(self, ext=ext, switch=switch): if not compileall.PY35: raise unittest.SkipTest("Python 3.4 generates .pyo files") script_helper.assert_python_ok(*(switch + ['-m', 'compileall2', '-q', self.pkgdir]), __isolated=False) # Verify the __pycache__ directory contents. self.assertTrue(os.path.exists(self.pkgdir_cachedir)) expected = sorted(base.format(sys.implementation.cache_tag, ext) for base in ('__init__.{}.{}', 'bar.{}.{}')) self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected) # Make sure there are no .pyc files in the source directory. self.assertFalse([fn for fn in os.listdir(self.pkgdir) if fn.endswith(ext)]) locals()['test_pep3147_paths_' + name] = f for name, ext, switch in [ ('normal', 'pyc', []), ('optimize', 'pyo', ['-O']), ]: def f(self, ext=ext, switch=switch): if compileall.PY35: raise unittest.SkipTest("Python 3.4 only test") script_helper.assert_python_ok(*(switch + ['-m', 'compileall2', '-q', self.pkgdir]), __isolated=False) # Verify the __pycache__ directory contents. self.assertTrue(os.path.exists(self.pkgdir_cachedir)) expected = sorted(base.format(sys.implementation.cache_tag, ext) for base in ('__init__.{}.{}', 'bar.{}.{}')) self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected) # Make sure there are no .pyc files in the source directory. self.assertFalse([fn for fn in os.listdir(self.pkgdir) if fn.endswith(ext)]) locals()['test_python34_pyo_files_' + name] = f def test_legacy_paths(self): # Ensure that with the proper switch, compileall leaves legacy # pyc files, and no __pycache__ directory. self.assertRunOK('-b', '-q', self.pkgdir) # Verify the __pycache__ directory contents. self.assertFalse(os.path.exists(self.pkgdir_cachedir)) expected = sorted(['__init__.py', '__init__.pyc', 'bar.py', 'bar.pyc']) self.assertEqual(sorted(os.listdir(self.pkgdir)), expected) def test_multiple_runs(self): # Bug 8527 reported that multiple calls produced empty # __pycache__/__pycache__ directories. self.assertRunOK('-q', self.pkgdir) # Verify the __pycache__ directory contents. self.assertTrue(os.path.exists(self.pkgdir_cachedir)) cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__') self.assertFalse(os.path.exists(cachecachedir)) # Call compileall again. self.assertRunOK('-q', self.pkgdir) self.assertTrue(os.path.exists(self.pkgdir_cachedir)) self.assertFalse(os.path.exists(cachecachedir)) @without_source_date_epoch # timestamp invalidation test def test_force(self): self.assertRunOK('-q', self.pkgdir) pycpath = importlib.util.cache_from_source(self.barfn) # set atime/mtime backward to avoid file timestamp resolution issues os.utime(pycpath, (time.time()-60,)*2) mtime = os.stat(pycpath).st_mtime # without force, no recompilation self.assertRunOK('-q', self.pkgdir) mtime2 = os.stat(pycpath).st_mtime self.assertEqual(mtime, mtime2) # now force it. self.assertRunOK('-q', '-f', self.pkgdir) mtime2 = os.stat(pycpath).st_mtime self.assertNotEqual(mtime, mtime2) def test_recursion_control(self): subpackage = os.path.join(self.pkgdir, 'spam') os.mkdir(subpackage) subinitfn = script_helper.make_script(subpackage, '__init__', '') hamfn = script_helper.make_script(subpackage, 'ham', '') self.assertRunOK('-q', '-l', self.pkgdir) self.assertNotCompiled(subinitfn) self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__'))) self.assertRunOK('-q', self.pkgdir) self.assertCompiled(subinitfn) self.assertCompiled(hamfn) def test_recursion_limit(self): subpackage = os.path.join(self.pkgdir, 'spam') subpackage2 = os.path.join(subpackage, 'ham') subpackage3 = os.path.join(subpackage2, 'eggs') for pkg in (subpackage, subpackage2, subpackage3): script_helper.make_pkg(pkg) subinitfn = os.path.join(subpackage, '__init__.py') hamfn = script_helper.make_script(subpackage, 'ham', '') spamfn = script_helper.make_script(subpackage2, 'spam', '') eggfn = script_helper.make_script(subpackage3, 'egg', '') self.assertRunOK('-q', '-r 0', self.pkgdir) self.assertNotCompiled(subinitfn) self.assertFalse( os.path.exists(os.path.join(subpackage, '__pycache__'))) self.assertRunOK('-q', '-r 1', self.pkgdir) self.assertCompiled(subinitfn) self.assertCompiled(hamfn) self.assertNotCompiled(spamfn) self.assertRunOK('-q', '-r 2', self.pkgdir) self.assertCompiled(subinitfn) self.assertCompiled(hamfn) self.assertCompiled(spamfn) self.assertNotCompiled(eggfn) self.assertRunOK('-q', '-r 5', self.pkgdir) self.assertCompiled(subinitfn) self.assertCompiled(hamfn) self.assertCompiled(spamfn) self.assertCompiled(eggfn) @skip_unless_symlink def test_symlink_loop(self): # Currently, compileall ignores symlinks to directories. # If that limitation is ever lifted, it should protect against # recursion in symlink loops. pkg = os.path.join(self.pkgdir, 'spam') script_helper.make_pkg(pkg) os.symlink('.', os.path.join(pkg, 'evil')) os.symlink('.', os.path.join(pkg, 'evil2')) self.assertRunOK('-q', self.pkgdir) self.assertCompiled(os.path.join( self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py' )) def test_quiet(self): noisy = self.assertRunOK(self.pkgdir) quiet = self.assertRunOK('-q', self.pkgdir) self.assertNotEqual(b'', noisy) self.assertEqual(b'', quiet) def test_silent(self): script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir) _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir) self.assertNotEqual(b'', quiet) self.assertEqual(b'', silent) def test_regexp(self): self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir) self.assertNotCompiled(self.barfn) self.assertCompiled(self.initfn) def test_multiple_dirs(self): pkgdir2 = os.path.join(self.directory, 'foo2') os.mkdir(pkgdir2) init2fn = script_helper.make_script(pkgdir2, '__init__', '') bar2fn = script_helper.make_script(pkgdir2, 'bar2', '') self.assertRunOK('-q', self.pkgdir, pkgdir2) self.assertCompiled(self.initfn) self.assertCompiled(self.barfn) self.assertCompiled(init2fn) self.assertCompiled(bar2fn) def test_d_compile_error(self): script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) self.assertRegex(out, b'File "dinsdale') def test_d_runtime_error(self): bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz') pyc = importlib.util.cache_from_source(bazfn) os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc')) os.remove(bazfn) rc, out, err = script_helper.assert_python_failure(fn, __isolated=False) self.assertRegex(err, b'File "dinsdale') def test_include_bad_file(self): rc, out, err = self.assertRunNotOK( '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir) self.assertRegex(out, b'rror.*nosuchfile') self.assertNotRegex(err, b'Traceback') self.assertFalse(os.path.exists(importlib.util.cache_from_source( self.pkgdir_cachedir))) def test_include_file_with_arg(self): f1 = script_helper.make_script(self.pkgdir, 'f1', '') f2 = script_helper.make_script(self.pkgdir, 'f2', '') f3 = script_helper.make_script(self.pkgdir, 'f3', '') f4 = script_helper.make_script(self.pkgdir, 'f4', '') with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep) l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4) self.assertCompiled(f1) self.assertCompiled(f2) self.assertNotCompiled(f3) self.assertCompiled(f4) def test_include_file_no_arg(self): f1 = script_helper.make_script(self.pkgdir, 'f1', '') f2 = script_helper.make_script(self.pkgdir, 'f2', '') f3 = script_helper.make_script(self.pkgdir, 'f3', '') f4 = script_helper.make_script(self.pkgdir, 'f4', '') with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) self.assertRunOK('-i', os.path.join(self.directory, 'l1')) self.assertNotCompiled(f1) self.assertCompiled(f2) self.assertNotCompiled(f3) self.assertNotCompiled(f4) def test_include_on_stdin(self): f1 = script_helper.make_script(self.pkgdir, 'f1', '') f2 = script_helper.make_script(self.pkgdir, 'f2', '') f3 = script_helper.make_script(self.pkgdir, 'f3', '') f4 = script_helper.make_script(self.pkgdir, 'f4', '') p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-'])) p.stdin.write((f3+os.linesep).encode('ascii')) script_helper.kill_python(p) self.assertNotCompiled(f1) self.assertNotCompiled(f2) self.assertCompiled(f3) self.assertNotCompiled(f4) def test_compiles_as_much_as_possible(self): bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error') rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn, bingfn, self.barfn) self.assertRegex(out, b'rror') self.assertNotCompiled(bingfn) self.assertCompiled(self.initfn) self.assertCompiled(self.barfn) def test_invalid_arg_produces_message(self): out = self.assertRunOK('badfilename') self.assertRegex(out, b"Can't list 'badfilename'") @unittest.skipIf(not compileall.PY37, "Python <= 3.6 doesn't contain invalidation modes") def test_pyc_invalidation_mode(self): script_helper.make_script(self.pkgdir, 'f1', '') pyc = importlib.util.cache_from_source( os.path.join(self.pkgdir, 'f1.py')) self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir) with open(pyc, 'rb') as fp: data = fp.read() self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11) self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir) with open(pyc, 'rb') as fp: data = fp.read() self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01) @skipUnless(_have_multiprocessing, "requires multiprocessing") def test_workers(self): bar2fn = script_helper.make_script(self.directory, 'bar2', '') files = [] for suffix in range(5): pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix)) os.mkdir(pkgdir) fn = script_helper.make_script(pkgdir, '__init__', '') files.append(script_helper.make_script(pkgdir, 'bar2', '')) self.assertRunOK(self.directory, '-j', '0') self.assertCompiled(bar2fn) for file in files: self.assertCompiled(file) @mock.patch('compileall2.compile_dir') def test_workers_available_cores(self, compile_dir): with mock.patch("sys.argv", new=[sys.executable, self.directory, "-j0"]): compileall.main() self.assertTrue(compile_dir.called) self.assertEqual(compile_dir.call_args[-1]['workers'], 0) def test_strip_and_prepend(self): fullpath = ["test", "build", "real", "path"] path = os.path.join(self.directory, *fullpath) os.makedirs(path) script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) stripdir = os.path.join(self.directory, *fullpath[:2]) prependdir = "/foo" self.assertRunOK("-s", stripdir, "-p", prependdir, path) rc, out, err = script_helper.assert_python_failure(bc) expected_in = os.path.join(prependdir, *fullpath[2:]) self.assertIn( expected_in, str(err, encoding=sys.getdefaultencoding()) ) self.assertNotIn( stripdir, str(err, encoding=sys.getdefaultencoding()) ) def test_multiple_optimization_levels(self): path = os.path.join(self.directory, "optimizations") os.makedirs(path) script = script_helper.make_script(path, "test_optimization", "a = 0") bc = [] for opt_level in "", 1, 2, 3: opt_kwarg = compileall.optimization_kwarg(opt_level) bc.append(importlib.util.cache_from_source(script, **opt_kwarg)) test_combinations = [["0", "1"], ["1", "2"], ["0", "2"], ["0", "1", "2"]] for opt_combination in test_combinations: self.assertRunOK(path, *("-o" + str(n) for n in opt_combination)) for opt_level in opt_combination: self.assertTrue(os.path.isfile(bc[int(opt_level)])) for bc_file in bc: try: os.unlink(bc[opt_level]) except Exception: pass @skip_unless_symlink def test_ignore_symlink_destination(self): # Create folders for allowed files, symlinks and prohibited area allowed_path = os.path.join(self.directory, "test", "dir", "allowed") symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") os.makedirs(allowed_path) os.makedirs(symlinks_path) os.makedirs(prohibited_path) # Create scripts and symlinks and remember their byte-compiled versions allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") os.symlink(allowed_script, allowed_symlink) os.symlink(prohibited_script, prohibited_symlink) allowed_bc = importlib.util.cache_from_source(allowed_symlink) prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) self.assertRunOK(symlinks_path, "-e", allowed_path) self.assertTrue(os.path.isfile(allowed_bc)) self.assertFalse(os.path.isfile(prohibited_bc)) def test_hardlink_deduplication_bad_args(self): # Bad arguments combination, hardlink deduplication make sense # only for more than one optimization level self.assertRunNotOK(self.directory, "-o 1", "--hardlink_dupes") def test_hardlink_deduplication_same_bytecode_all_opt(self): # 'a = 0' produces the same bytecode for all optimization levels path = os.path.join(self.directory, "test", "same_all") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2", "--hardlink-dupes") # All three files should have the same inode (hardlinks) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2") # Deduplication disabled, all pyc files should have different inodes self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_same_bytecode_some_opt(self): # 'a = 0' produces the same bytecode for all optimization levels # only two levels of optimization [0, 1] tested path = os.path.join(self.directory, "test", "same_some") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-q", "-o 0", "-o 2", "--hardlink-dupes") # Both files should have the same inode (hardlink) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt2}: os.unlink(pyc_file) self.assertRunOK(path, "-q", "-o 0", "-o 2") # Deduplication disabled, both pyc files should have different inodes self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_same_bytecode_some_opt_2(self): # 'a = 0' produces the same bytecode for all optimization levels path = os.path.join(self.directory, "test", "same_some_2") os.makedirs(path) simple_script = script_helper.make_script(path, "test_same_bytecode", "a = 0") pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-q", "-o 1", "-o 2", "--hardlink-dupes") # Both files should have the same inode (hardlinks) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt1, pyc_opt2}: os.unlink(pyc_file) self.assertRunOK(path, "-q", "-o 1", "-o 2") # Deduplication disabled, all pyc files should have different inodes if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) else: self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_different_bytecode_all_opt(self): # "'''string'''\nassert 1" produces a different bytecode for all optimization levels path = os.path.join(self.directory, "test", "different_all") os.makedirs(path) simple_script = script_helper.make_script(path, "test_different_bytecode", "'''string'''\nassert 1") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2", "--hardlink-dupes") # No hardlinks, bytecodes are different self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2") # Disabling hardlink deduplication makes no difference self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_different_bytecode_one_hardlink(self): # "'''string'''\na = 1" produces the same bytecode only for level 0 and 1 path = os.path.join(self.directory, "test", "different_one") os.makedirs(path) simple_script = script_helper.make_script(path, "test_different_bytecode", "'''string'''\na = 1") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2", "--hardlink-dupes") # Only level 0 and 1 has the same inode, level 2 produces a different bytecode if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) else: self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) for pyc_file in {pyc_opt0, pyc_opt1, pyc_opt2}: os.unlink(pyc_file) self.assertRunOK(path, "-q", "-o 0", "-o 1", "-o 2") # Deduplication disabled, no hardlinks self.assertNotEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) if compileall.PY35: # Python 3.4 produces the same file for opt1 and opt2 self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) def test_hardlink_deduplication_recompilation(self): path = os.path.join(self.directory, "test", "module_change") os.makedirs(path) simple_script = script_helper.make_script(path, "module_change", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-f", "-q", "-o 0", "-o 1", "-o 2", "--hardlink-dupes") # All three levels have the same inode self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) previous_inode = os.stat(pyc_opt0).st_ino # Change of the module content simple_script = script_helper.make_script(path, "module_change", "print(0)") # Recompilation without -o 1 self.assertRunOK(path, "-f", "-q", "-o 0", "-o 2", "--hardlink-dupes") # opt-1.pyc should have the same inode as before and others should not if compileall.PY35: self.assertEqual(previous_inode, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt2).st_ino) self.assertNotEqual(previous_inode, os.stat(pyc_opt2).st_ino) # opt-1.pyc and opt-2.pyc have different content if compileall.PY35: self.assertFalse(filecmp.cmp(pyc_opt1, pyc_opt2, shallow=True)) def test_hardlink_deduplication_import(self): path = os.path.join(self.directory, "test", "module_import") os.makedirs(path) simple_script = script_helper.make_script(path, "module", "a = 0") pyc_opt0 = importlib.util.cache_from_source(simple_script) pyc_opt1 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(1) ) pyc_opt2 = importlib.util.cache_from_source( simple_script, **compileall.optimization_kwarg(2) ) self.assertRunOK(path, "-f", "-q", "-o 0", "-o 1", "-o 2", "--hardlink-dupes") # All three levels have the same inode self.assertEqual(os.stat(pyc_opt0).st_ino, os.stat(pyc_opt1).st_ino) self.assertEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) previous_inode = os.stat(pyc_opt0).st_ino # Change of the module content simple_script = script_helper.make_script(path, "module", "print(0)") # Import the module in Python script_helper.assert_python_ok( "-O", "-c", "import module", __isolated=False, PYTHONPATH=path ) # Only opt-1.pyc is changed self.assertEqual(previous_inode, os.stat(pyc_opt0).st_ino) if compileall.PY35: self.assertEqual(previous_inode, os.stat(pyc_opt2).st_ino) self.assertNotEqual(os.stat(pyc_opt1).st_ino, os.stat(pyc_opt2).st_ino) # opt-1.pyc and opt-2.pyc have different content if compileall.PY35: self.assertFalse(filecmp.cmp(pyc_opt1, pyc_opt2, shallow=True)) class CommmandLineTestsWithSourceEpoch(CommandLineTestsBase, unittest.TestCase, metaclass=SourceDateEpochTestMeta, source_date_epoch=True): pass class CommmandLineTestsNoSourceEpoch(CommandLineTestsBase, unittest.TestCase, metaclass=SourceDateEpochTestMeta, source_date_epoch=False): pass if __name__ == "__main__": unittest.main()
Locations
Projects
Search
Status Monitor
Help
OpenBuildService.org
Documentation
API Documentation
Code of Conduct
Contact
Support
@OBShq
Terms
openSUSE Build Service is sponsored by
The Open Build Service is an
openSUSE project
.
Sign Up
Log In
Places
Places
All Projects
Status Monitor