diff --git a/.authors.yml b/.authors.yml index 6b71ac9dc7..dc7b1a4258 100644 --- a/.authors.yml +++ b/.authors.yml @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 80 + num_commits: 82 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 167 + num_commits: 168 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard diff --git a/CHANGELOG.md b/CHANGELOG.md index dfe35b2345..840bc6636a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ [//]: # (current developments) +## 24.1.2 (2024-02-15) + +### Bug fixes + +* Fix rpaths patcher being run on symbolic links. (#5179 via #5181) +* Fix corrupted package cache for outputs in subpackage tests. (#5184) + +### Contributors + +* @mbargull + + + ## 24.1.1 (2024-02-07) ### Bug fixes diff --git a/conda_build/build.py b/conda_build/build.py index 28ffc04a70..526d665c8a 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -38,6 +38,7 @@ env_path_backup_var_exists, get_conda_channel, get_rc_urls, + pkgs_dirs, prefix_placeholder, reset_context, root_dir, @@ -3394,6 +3395,25 @@ def test( # folder destination _extract_test_files_from_package(metadata) + # Remove any previously cached build from the package cache to ensure we + # really test the requested build and not some clashing or corrupted build. + # (Corruption of the extracted package can happen, e.g., in multi-output + # builds if one of the subpackages overwrites files from the other.) + # Special case: + # If test is requested for .tar.bz2/.conda file from the pkgs dir itself, + # clean_pkg_cache() will remove it; don't call that function in this case. + in_pkg_cache = ( + not hasattr(recipedir_or_package_or_metadata, "config") + and os.path.isfile(recipedir_or_package_or_metadata) + and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) + and any( + os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dir + for pkgs_dir in pkgs_dirs + ) + ) + if not in_pkg_cache: + environ.clean_pkg_cache(metadata.dist(), metadata.config) + copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir diff --git a/conda_build/environ.py b/conda_build/environ.py index 3026f1bf60..762b9c7479 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -16,10 +16,15 @@ from logging import getLogger from os.path import join, normpath -from conda.base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + DEFAULTS_CHANNEL_NAME, + UNKNOWN_CHANNEL, +) from conda.common.io import env_vars from conda.core.index import LAST_CHANNEL_URLS from conda.core.link import PrefixSetup, UnlinkLinkTransaction +from conda.core.package_cache_data import PackageCacheData from conda.core.prefix_data import PrefixData from conda.models.channel import prioritize_channels @@ -43,6 +48,7 @@ reset_context, root_dir, ) +from .config import Config from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list @@ -1264,6 +1270,29 @@ def get_pkg_dirs_locks(dirs, config): return [utils.get_lock(folder, timeout=config.timeout) for folder in dirs] +def clean_pkg_cache(dist: str, config: Config) -> None: + with utils.LoggingContext(logging.DEBUG if config.debug else logging.WARN): + locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) + with utils.try_acquire_locks(locks, timeout=config.timeout): + for pkgs_dir in pkgs_dirs: + if any( + os.path.exists(os.path.join(pkgs_dir, f"{dist}{ext}")) + for ext in ("", *CONDA_PACKAGE_EXTENSIONS) + ): + log.debug( + "Conda caching error: %s package remains in cache after removal", + dist, + ) + log.debug("manually removing to compensate") + package_cache = PackageCacheData.first_writable([pkgs_dir]) + for cache_pkg_id in package_cache.query(dist): + package_cache.remove(cache_pkg_id) + + # Note that this call acquires the relevant locks, so this must be called + # outside the lock context above. + remove_existing_packages(pkgs_dirs, [dist], config) + + def remove_existing_packages(dirs, fns, config): locks = get_pkg_dirs_locks(dirs, config) if config.locking else [] diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 3f3fba7545..c5a1d92ec8 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -330,7 +330,7 @@ def inspect_objects( info = [] for f in obj_files: path = join(prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile == machofile: info.append( { diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 3ab78bc7fd..c45def903e 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -111,11 +111,17 @@ def _get_linkages( def get_package_obj_files( prec: PrefixRecord, prefix: str | os.PathLike | Path ) -> list[str]: - return [file for file in prec["files"] if codefile_class(Path(prefix, file))] + return [ + file + for file in prec["files"] + if codefile_class(Path(prefix, file), skip_symlinks=True) + ] @lru_cache(maxsize=None) def get_untracked_obj_files(prefix: str | os.PathLike | Path) -> list[str]: return [ - file for file in untracked(str(prefix)) if codefile_class(Path(prefix, file)) + file + for file in untracked(str(prefix)) + if codefile_class(Path(prefix, file), skip_symlinks=True) ] diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index c44a03f864..cc365decdf 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -504,7 +504,8 @@ def inspect_linkages_lief( while tmp_filename: if ( not parent_exe_dirname - and codefile_class(tmp_filename) == EXEfile + and codefile_class(tmp_filename, skip_symlinks=True) + == EXEfile ): parent_exe_dirname = os.path.dirname(tmp_filename) tmp_filename = parents_by_filename[tmp_filename] @@ -600,7 +601,8 @@ def get_linkages( result_pyldd = [] debug = False if not have_lief or debug: - if codefile_class(filename) not in (DLLfile, EXEfile): + codefile = codefile_class(filename, skip_symlinks=True) + if codefile not in (DLLfile, EXEfile): result_pyldd = inspect_linkages_pyldd( filename, resolve_filenames=resolve_filenames, @@ -612,7 +614,7 @@ def get_linkages( return result_pyldd else: print( - f"WARNING: failed to get_linkages, codefile_class('{filename}')={codefile_class(filename)}" + f"WARNING: failed to get_linkages, codefile_class('{filename}', True)={codefile}" ) return {} result_lief = inspect_linkages_lief( diff --git a/conda_build/post.py b/conda_build/post.py index 5a05eda077..3b9cb6c832 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -74,7 +74,7 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): path = join(prefix, f) - if codefile_class(path): + if codefile_class(path, skip_symlinks=True): return elif islink(path): return @@ -413,7 +413,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ".. seems to be linking to a compiler runtime, replacing build prefix with " "host prefix and" ) - if not codefile_class(link): + if not codefile_class(link, skip_symlinks=True): sys.exit( "Error: Compiler runtime library in build prefix not found in host prefix %s" % link @@ -653,7 +653,7 @@ def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: return { file for file in prec["files"] - if codefile_class(Path(prefix, file)) + if codefile_class(Path(prefix, file), skip_symlinks=True) # codefile_class already filters by extension/binary type, do we need this second filter? for ext in (".dylib", ".so", ".dll", ".pyd") if ext in file @@ -836,7 +836,7 @@ def _collect_needed_dsos( sysroots = list(sysroots_files.keys())[0] for f in files: path = join(run_prefix, f) - if not codefile_class(path): + if not codefile_class(path, skip_symlinks=True): continue build_prefix = build_prefix.replace(os.sep, "/") run_prefix = run_prefix.replace(os.sep, "/") @@ -1174,7 +1174,7 @@ def _show_linking_messages( ) for f in files: path = join(run_prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile not in filetypes_for_platform[subdir.split("-")[0]]: continue warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, "/")) @@ -1273,7 +1273,7 @@ def check_overlinking_impl( filesu = [] for file in files: path = join(run_prefix, file) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile in filetypes_for_platform[subdir.split("-")[0]]: files_to_inspect.append(file) filesu.append(file.replace("\\", "/")) @@ -1578,7 +1578,7 @@ def post_process_shared_lib(m, f, files, host_prefix=None): if not host_prefix: host_prefix = m.config.host_prefix path = join(host_prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if not codefile or path.endswith(".debug"): return rpaths = m.get_value("build/rpaths", ["lib"]) @@ -1737,7 +1737,9 @@ def check_symlinks(files, prefix, croot): # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS # If condition exists, then copy the file rather than symlink it. - if not dirname(link_path) == dirname(real_link_path) and codefile_class(f): + if not dirname(link_path) == dirname(real_link_path) and codefile_class( + f, skip_symlinks=True + ): os.remove(path) utils.copy_into(real_link_path, path) elif real_link_path.startswith(real_build_prefix): diff --git a/pyproject.toml b/pyproject.toml index 72f657031e..4ae5105a55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ "beautifulsoup4", "chardet", "conda >=22.11", - "conda-index", + "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", "jinja2", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index fb5088e15e..9b1ec2f3bc 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -31,7 +31,7 @@ requirements: - beautifulsoup4 - chardet - conda >=22.11.0 - - conda-index + - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock - jinja2 diff --git a/tests/test-recipes/metadata/_rpath_symlink/meta.yaml b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml new file mode 100644 index 0000000000..0ef58cdab2 --- /dev/null +++ b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml @@ -0,0 +1,39 @@ +{% set lib_file = "libthing.so.1.0.0" %} # [linux] +{% set lib_file = "libthing.1.0.0.dylib" %} # [osx] + +package: + name: rpath_symlink + version: 1.0.0 + +build: + skip: true # [not (linux or osx)] + rpaths_patcher: {{ rpaths_patcher }} + script: + - mkdir -p "${PREFIX}/lib" + - > + < /dev/null ${CC} ${CPPFLAGS} ${CFLAGS} ${LDFLAGS} + -x c - -nostdlib -s -o "${PREFIX}/lib/{{ lib_file }}" "-Wl,-rpath,${PREFIX}/lib" + -shared -Wl,-soname,libthing.so.1 # [linux] + -dynamiclib -install_name libthing.1.dylib # [osx] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.so.1" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.1.dylib" # [osx] + - mkdir -p "${PREFIX}/lib/subfolder" + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.so" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.dylib" # [osx] + +requirements: + build: + - {{ compiler("c") }} + +test: + requires: + - py-lief + commands: + # Test that we get only a single entry that is the library's own directory. + - | + python -c ' + import os, lief + lib = lief.parse(os.environ["PREFIX"] + "/lib/{{ lib_file }}") + assert {"$ORIGIN/."} == {e.rpath for e in lib.dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH} # [linux] + assert {"@loader_path/"} == {command.path for command in lib.commands if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH} # [osx] + ' diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat new file mode 100644 index 0000000000..b6584f3971 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat @@ -0,0 +1,2 @@ +:: Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +< nul set /p="%PKG_NAME:~0,4%" > "%PREFIX%\file" & call; diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh new file mode 100644 index 0000000000..cb0be8cb2b --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh @@ -0,0 +1,2 @@ +## Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +printf '%.4s' "${PKG_NAME}" > "${PREFIX}/file" diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml new file mode 100644 index 0000000000..1c27afc126 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml @@ -0,0 +1,40 @@ +{% set name = "outputs_overwrite_base_file" %} + +package: + name: {{ name }} + version: 1.0 + +outputs: + - name: base-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + + - name: first-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = base # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] + + - name: second-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = "base" # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] diff --git a/tests/test_post.py b/tests/test_post.py index c15fffaf2a..97ef1448fc 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -10,7 +10,13 @@ import pytest from conda_build import api, post -from conda_build.utils import get_site_packages, on_win, package_has_file +from conda_build.utils import ( + get_site_packages, + on_linux, + on_mac, + on_win, + package_has_file, +) from .utils import add_mangling, metadata_dir @@ -148,3 +154,19 @@ def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path): assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text assert "not a valid menuinst JSON document" in captured_text assert "JSONDecodeError" in captured_text + + +@pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.") +def test_rpath_symlink(mocker, testing_config): + if on_linux: + mk_relative = mocker.spy(post, "mk_relative_linux") + elif on_mac: + mk_relative = mocker.spy(post, "mk_relative_osx") + api.build( + os.path.join(metadata_dir, "_rpath_symlink"), + config=testing_config, + variants={"rpaths_patcher": ["patchelf", "LIEF"]}, + activate=True, + ) + # Should only be called on the actual binary, not its symlinks. (once per variant) + assert mk_relative.call_count == 2