Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Remove duplicated optimization levels
  • Loading branch information
vstinner committed May 14, 2020
commit 7e92096fe6a6c675dbacff88c3e8e44596d8ce66
12 changes: 8 additions & 4 deletions Lib/compileall.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,13 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if isinstance(optimize, int):
optimize = [optimize]

if hardlink_dupes:
raise ValueError("Hardlinking of duplicated bytecode makes sense "
"only for more than one optimization level")
# Use set() to remove duplicates.
# Use sorted() to create pyc files in a deterministic order.
optimize = sorted(set(optimize))

if hardlink_dupes and len(optimize) < 2:
raise ValueError("Hardlinking of duplicated bytecode makes sense "
"only for more than one optimization level")

if rx is not None:
mo = rx.search(fullname)
Expand Down Expand Up @@ -229,7 +233,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
for index, opt_level in enumerate(sorted(optimize)):
for index, opt_level in enumerate(optimize):
cfile = opt_cfiles[opt_level]
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=opt_level,
Expand Down
23 changes: 20 additions & 3 deletions Lib/test/test_compileall.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,11 +911,16 @@ def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
def test_bad_args(self):
# Bad arguments combination, hardlink deduplication make sense
# only for more than one optimization level
with self.assertRaises(ValueError):
with self.temporary_directory():
self.make_script("pass")
with self.temporary_directory():
self.make_script("pass")
with self.assertRaises(ValueError):
compileall.compile_dir(self.path, quiet=True, optimize=0,
hardlink_dupes=True)
with self.assertRaises(ValueError):
# same optimization level specified twice:
# compile_dir() removes duplicates
compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
hardlink_dupes=True)

def create_code(self, docstring=False, assertion=False):
lines = []
Expand Down Expand Up @@ -975,6 +980,18 @@ def test_only_two_levels(self):
pyc2 = get_pyc(script, opts[1])
self.assertTrue(is_hardlink(pyc1, pyc2))

def test_duplicated_levels(self):
# compile_dir() must not fail if optimize contains duplicated
# optimization levels and/or if optimization levels are not sorted.
with self.temporary_directory():
# code with no dostring and no assertion:
# same bytecode for all optimization levels
script = self.make_script(self.create_code())
self.compile_dir(optimize=[1, 0, 1, 0])
pyc1 = get_pyc(script, 0)
pyc2 = get_pyc(script, 1)
self.assertTrue(is_hardlink(pyc1, pyc2))

def test_recompilation(self):
# Test compile_dir() when pyc files already exists and the script
# content changed
Expand Down