diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index fb922265c4..31aaab749b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -53,12 +53,12 @@ repos:
files: \.py$
args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol]
- repo: https://github.com/asottile/blacken-docs
- rev: 1.16.0
+ rev: 1.18.0
hooks:
# auto format Python codes within docstrings
- id: blacken-docs
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.4.2
+ rev: v0.6.2
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
@@ -66,7 +66,7 @@ repos:
# compatible replacement for black
- id: ruff-format
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
- rev: v2.13.0
+ rev: v2.14.0
hooks:
- id: pretty-format-toml
args: [--autofix, --trailing-commas]
@@ -87,7 +87,7 @@ repos:
tests/
)
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.28.2
+ rev: 0.29.2
hooks:
# verify github syntaxes
- id: check-github-workflows
diff --git a/AUTHORS.md b/AUTHORS.md
index 969994f016..8953b0b356 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -51,6 +51,7 @@ Authors are sorted alphabetically.
* Dan Blanchard
* Dan Lovell
* Daniel Bast
+* Daniel Ching
* Daniel Damiani
* Daniel Holth
* Daniel Petry
@@ -120,12 +121,14 @@ Authors are sorted alphabetically.
* Juan Lasheras
* Julian Rüth
* Julien Schueller
+* Justin Wood (Callek)
* Jürgen Gmach
* Jędrzej Nowak
* Kai Tietz
* Kale Franz
* Katherine Kinnaman
* Ken Odegard
+* Klaus Zimmermann
* Korijn van Golen
* Kurt Schelfthout
* Kyle Leaders
@@ -216,7 +219,9 @@ Authors are sorted alphabetically.
* Thomas A Caswell
* Thomas Holder
* Thomas Kluyver
+* Tim Paine
* Tim Snyder
+* Tobias Fischer
* Todd Tomashek
* Tom Davidson
* Tom Pollard
@@ -235,6 +240,7 @@ Authors are sorted alphabetically.
* Wolf Vollprecht
* Wolfgang Ulmer
* Yann
+* Yannik Tausch
* Yoav Ram
* Yu Feng
* Zane Dufour
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 42d745f874..a088d2f953 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,296 @@
[//]: # (current developments)
+## 24.7.1 (2024-07-30)
+
+### Bug fixes
+
+* Check for WSL existence before calling `os.stat`. (#5433 via #5434)
+
+### Contributors
+
+* @kenodegard
+
+
+
+## 24.7.0 (2024-07-18)
+
+### Enhancements
+
+* Skip generating `repodata.json.bz2` for local index; generate `repodata.json`
+ only; require `conda-package-handling >=2.2.0` matching conda. (#5231)
+* Add new include/exclude sections for glob expressions in multi-output `outputs/files`. (#4196 via #5216)
+* Increase performance by using `pickle` instead of `copy.deepcopy`. (#5281)
+* Report fully rendered recipe to stdout before the build process starts. (#3798 via #5344)
+* Validate `run_constrained` dependencies to prevent faulty specs reaching final repodata. (#5047 via #5359)
+* `PIP_*` env variables are set when building outputs in multi-output recipes. (#3993 via #5368)
+* Reduce performance overhead of logging. (#5384)
+
+### Bug fixes
+
+* Include file path in addition to the content when generating the file hash to avoid unwanted caching during linkage analysis. (#4821)
+* Error handling when `LIEF` fails is now consistent with `patchelf`. (#5176)
+* Ensure cross-building recipes select the correct noarch package variants. (#5341 via #5350)
+* On Linux platforms, prefer the sysroot matching the target_platform when cross-compiling (#5403).
+
+### Deprecations
+
+* Mark `conda_build.build._construct_metadata_for_test_from_recipe` as deprecated. Test built packages instead, not recipes (e.g., `conda build --test package` instead of `conda build --test recipe/`). (#3192 via #5352)
+* Mark `conda_build.build.check_external` for deprecation. `patchelf` is an explicit conda-build dependency on Linux so it will always be installed. (#5355)
+* Remove the following deprecations:
+ * `conda_build.config.Config.override_channels` (use `conda.base.context.context.channels` instead). (#5333)
+ * `conda_build.config.noarch_python_build_age_default`. (#5333)
+ * `conda_build.conda_interface.add_parser_channels` (use `conda.cli.helpers.add_parser_channels` instead). (#5333)
+ * `conda_build.conda_interface.add_parser_prefix` (use `conda.cli.helpers.add_parser_prefix` instead). (#5333)
+ * `conda_build.conda_interface.ArgumentParser` (use `conda.cli.conda_argparse.ArgumentParser` instead). (#5333)
+ * `conda_build.conda_interface.binstar_upload` (use `conda.base.context.context.binstar_upload` instead). (#5333)
+ * `conda_build.conda_interface.cc_conda_build` (use `conda.base.context.context.conda_build` instead). (#5333)
+ * `conda_build.conda_interface.cc_platform` (use `conda.base.context.context.platform` instead). (#5333)
+ * `conda_build.conda_interface.Channel` (use `conda.models.channel.Channel` instead). (#5333)
+ * `conda_build.conda_interface.Completer`. (#5333)
+ * `conda_build.conda_interface.configparser` (use `configparser` instead). (#5333)
+ * `conda_build.conda_interface.CondaError` (use `conda.exceptions.CondaError` instead). (#5333)
+ * `conda_build.conda_interface.CondaHTTPError` (use `conda.exceptions.CondaHTTPError` instead). (#5333)
+ * `conda_build.conda_interface.CondaSession` (use `conda.gateways.connection.session.CondaSession` instead). (#5333)
+ * `conda_build.conda_interface.CONDA_VERSION` (use `conda.__version__` instead). (#5333)
+ * `conda_build.conda_interface.context` (use `conda.base.context.context` instead). (#5333)
+ * `conda_build.conda_interface.create_default_packages` (use `conda.base.context.context.create_default_packages` instead). (#5333)
+ * `conda_build.conda_interface.default_python` (use `conda.base.context.context.default_python` instead). (#5333)
+ * `conda_build.conda_interface.determine_target_prefix` (use `conda.base.context.determine_target_prefix` instead). (#5333)
+ * `conda_build.conda_interface.download` (use `conda.gateways.connection.download.download` instead). (#5333)
+ * `conda_build.conda_interface.env_path_backup_var_exists`. (#5333)
+ * `conda_build.conda_interface.envs_dirs` (use `conda.base.context.context.envs_dirs` instead). (#5333)
+ * `conda_build.conda_interface.EntityEncoder` (use `conda.auxlib.entity.EntityEncoder` instead). (#5333)
+ * `conda_build.conda_interface.FileMode` (use `conda.models.enums.FileMode` instead). (#5333)
+ * `conda_build.conda_interface.get_conda_build_local_url` (use `conda.models.channel.get_conda_build_local_url` instead). (#5333)
+ * `conda_build.conda_interface.get_conda_channel` (use `conda.models.channel.Channel.from_value` instead). (#5333)
+ * `conda_build.conda_interface.get_prefix` (use `conda.base.context.context.target_prefix` instead). (#5333)
+ * `conda_build.conda_interface.get_rc_urls` (use `conda.base.context.context.channels` instead). (#5333)
+ * `conda_build.conda_interface.human_bytes` (use `conda.utils.human_bytes` instead). (#5333)
+ * `conda_build.conda_interface.import_module` (use `importlib.import_module` instead). (#5333)
+ * `conda_build.conda_interface.input` (use `input` instead). (#5333)
+ * `conda_build.conda_interface.InstalledPackages`. (#5333)
+ * `conda_build.conda_interface.lchmod` (use `conda.gateways.disk.link.lchmod` instead). (#5333)
+ * `conda_build.conda_interface.LinkError` (use `conda.exceptions.LinkError` instead). (#5333)
+ * `conda_build.conda_interface.LockError` (use `conda.exceptions.LockError` instead). (#5333)
+ * `conda_build.conda_interface.MatchSpec` (use `conda.models.match_spec.MatchSpec` instead). (#5333)
+ * `conda_build.conda_interface.non_x86_linux_machines` (use `conda.base.context.non_x86_machines` instead). (#5333)
+ * `conda_build.conda_interface.NoPackagesFound` (use `conda.exceptions.ResolvePackageNotFound` instead). (#5333)
+ * `conda_build.conda_interface.NoPackagesFoundError` (use `conda.exceptions.NoPackagesFoundError` instead). (#5333)
+ * `conda_build.conda_interface.normalized_version` (use `conda.models.version.normalized_version` instead). (#5333)
+ * `conda_build.conda_interface.os` (use `os` instead). (#5333)
+ * `conda_build.conda_interface.PackageRecord` (use `conda.models.records.PackageRecord` instead). (#5333)
+ * `conda_build.conda_interface.PaddingError` (use `conda.exceptions.PaddingError` instead). (#5333)
+ * `conda_build.conda_interface.partial` (use `functools.partial` instead). (#5333)
+ * `conda_build.conda_interface.PathType` (use `conda.models.enums.PathType` instead). (#5333)
+ * `conda_build.conda_interface.pkgs_dirs` (use `conda.base.context.context.pkgs_dirs` instead). (#5333)
+ * `conda_build.conda_interface.prefix_placeholder` (use `conda.base.constants.PREFIX_PLACEHOLDER` instead). (#5333)
+ * `conda_build.conda_interface.ProgressiveFetchExtract` (use `conda.core.package_cache_data.ProgressiveFetchExtract` instead). (#5333)
+ * `conda_build.conda_interface.reset_context` (use `conda.base.context.reset_context` instead). (#5333)
+ * `conda_build.conda_interface.Resolve` (use `conda.resolve.Resolve` instead). (#5333)
+ * `conda_build.conda_interface.rm_rf` (use `conda_build.utils.rm_rf` instead). (#5333)
+ * `conda_build.conda_interface.root_dir` (use `conda.base.context.context.root_prefix` instead). (#5333)
+ * `conda_build.conda_interface.root_writable` (use `conda.base.context.context.root_writable` instead). (#5333)
+ * `conda_build.conda_interface.spec_from_line` (use `conda.cli.common.spec_from_line` instead). (#5333)
+ * `conda_build.conda_interface.specs_from_args` (use `conda.cli.common.specs_from_args` instead). (#5333)
+ * `conda_build.conda_interface.specs_from_url` (use `conda.cli.common.specs_from_url` instead). (#5333)
+ * `conda_build.conda_interface.StringIO` (use `io.StringIO` instead). (#5333)
+ * `conda_build.conda_interface.subdir` (use `conda.base.context.context.subdir` instead). (#5333)
+ * `conda_build.conda_interface.symlink_conda`. (#5333)
+ * `conda_build.conda_interface.TemporaryDirectory` (use `conda.gateways.disk.create.TemporaryDirectory` instead). (#5333)
+ * `conda_build.conda_interface.TmpDownload` (use `conda.gateways.connection.download.TmpDownload` instead). (#5333)
+ * `conda_build.conda_interface._toposort` (use `conda.common.toposort._toposort` instead). (#5333)
+ * `conda_build.conda_interface.unix_path_to_win` (use `conda.utils.unix_path_to_win` instead). (#5333)
+ * `conda_build.conda_interface.untracked` (use `conda.misc.untracked` instead). (#5333)
+ * `conda_build.conda_interface.Unsatisfiable` (use `conda.exceptions.UnsatisfiableError` instead). (#5333)
+ * `conda_build.conda_interface.UnsatisfiableError` (use `conda.exceptions.UnsatisfiableError` instead). (#5333)
+ * `conda_build.conda_interface.url_path` (use `conda.utils.url_path` instead). (#5333)
+ * `conda_build.conda_interface.VersionOrder` (use `conda.models.version.VersionOrder` instead). (#5333)
+ * `conda_build.conda_interface.walk_prefix` (use `conda.misc.walk_prefix` instead). (#5333)
+ * `conda_build.conda_interface.win_path_to_unix` (use `conda.common.path.win_path_to_unix` instead). (#5333)
+ * `conda_build.index.channel_data`; `conda_build.index.get_build_index` return value for `channel_data` is now always `None`. (#5333)
+ * `conda_build.metadata.check_circular_dependencies` (use `conda_build.metadata._check_circular_dependencies` instead). (#5406)
+ * `conda_build.metadata.toposort` (use `conda_build.metadata.toposort_outputs` instead). (#5406)
+ * `conda_build.utils._convert_lists_to_sets` (use `frozendict.deepfreeze` instead). (#5333)
+ * `conda_build.utils.HashableDict` (use `frozendict.deepfreeze` instead). (#5333)
+ * `conda_build.utils.represent_hashabledict` (use `frozendict.deepfreeze` instead). (#5333)
+ * `conda_build.utils.rm_rf(config)`. (#5333)
+ * `conda_build.variants.get_vars(loop_only)`. (#5333)
+
+### Other
+
+* Do not generate conda error reports for common exceptions. (#5264)
+* Add `CondaBuildUserError` exception to replace `sys.exit` calls. (#5353)
+* Updated the CI to download the MacOSX 10.15 SDK. (#5387)
+
+### Contributors
+
+* @beeankha
+* @conda-bot
+* @carterbox made their first contribution in https://github.com/conda/conda-build/pull/5216
+* @dholth
+* @isuruf
+* @jaimergp
+* @kenodegard
+* @zklaus made their first contribution in https://github.com/conda/conda-build/pull/5364
+* @beckermr
+* @minrk
+* @timkpaine made their first contribution in https://github.com/conda/conda-build/pull/4821
+* @dependabot[bot]
+* @pre-commit-ci[bot]
+
+
+
+## 24.5.1 (2024-05-23)
+
+### Bug fixes
+
+* Fix issue with modifying a `frozendict` when specifying `outputs/files` in `meta.yaml`. (#5342 via #5345)
+* Fix excessive memory use in `inspect_linkages_lief`. (#5267 via #5348)
+
+### Deprecations
+
+* Mark `conda_build.metadata.toposort` as deprecated. Use `conda_build.metadata.toposort_outputs` instead. (#5342 via #5345)
+* Mark `conda_build.metadata.check_circular_dependencies` as deprecated. Use `conda_build.metadata._check_circular_dependencies` instead. (#5342 via #5345)
+
+### Contributors
+
+* @beeankha
+* @kenodegard
+* @mbargull
+
+
+
+## 24.5.0 (2024-05-06)
+
+### Enhancements
+
+* Only fetch `lfs` files for specific `git_ref`. (#5202)
+* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237)
+* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238)
+* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252)
+* Require `conda >=23.7.0`. (#5271)
+
+### Bug fixes
+
+* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271)
+
+### Deprecations
+
+* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222)
+* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222)
+* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222)
+* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222)
+* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222)
+* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222)
+* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222)
+* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222)
+* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222)
+* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222)
+* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222)
+* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222)
+* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222)
+* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222)
+* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251)
+* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251)
+* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251)
+* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251)
+* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251)
+* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251)
+* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251)
+* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251)
+* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251)
+* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251)
+* Deprecate `conda_build.config.Config.override_channels`. Defer to `conda.base.context.context.channels` instead. (#5271, #5324)
+* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276)
+* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276)
+* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276)
+* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276)
+* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276)
+* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276)
+* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276)
+* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276)
+* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276)
+* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276)
+* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276)
+* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276)
+* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276)
+* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276)
+* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276)
+* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276)
+* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276)
+* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276)
+* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276)
+* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276)
+* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276)
+* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276)
+* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276)
+* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276)
+* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276)
+* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276)
+* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276)
+* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276)
+* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276)
+* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276)
+* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276)
+* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276)
+* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276)
+* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276)
+* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276)
+* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276)
+* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276)
+* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276)
+* Deprecate `conda_build.variants.get_vars(loop_only)`. (#5280)
+* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284)
+* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284)
+* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284)
+* Deprecate `conda_build.config.noarch_python_build_age_default`. (#5298)
+* Postpone `conda_build.index.channel_data` deprecation. (#5299)
+* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299)
+* Remove `conda_build.bdist_conda`. (#5299)
+* Remove `conda_build.build.have_prefix_files`. (#5299)
+* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299)
+* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299)
+* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299)
+* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299)
+* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299)
+* Remove `conda_build.environ._load_all_json`. (#5299)
+* Remove `conda_build.environ._load_json`. (#5299)
+* Remove `conda_build.environ.cached_actions`. (#5299)
+* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299)
+* Remove `conda_build.environ.InvalidEnvironment`. (#5299)
+* Remove `conda_build.environ.LINK_ACTION`. (#5299)
+* Remove `conda_build.environ.PREFIX_ACTION`. (#5299)
+* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299)
+* Remove `conda_build.index.DummyExecutor`. (#5299)
+* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299)
+* Remove `conda_build.index.LOCKFILE_NAME`. (#5299)
+* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299)
+
+### Other
+
+* Enable CodSpeed benchmarks for select tests. (#5233)
+
+### Contributors
+
+* @beeankha
+* @conda-bot
+* @jaimergp
+* @Callek made their first contribution in https://github.com/conda/conda-build/pull/5252
+* @kenodegard
+* @mbargull
+* @Tobias-Fischer made their first contribution in https://github.com/conda/conda-build/pull/5202
+* @ytausch made their first contribution in https://github.com/conda/conda-build/pull/5214
+* @dependabot[bot]
+* @pre-commit-ci[bot]
+
+
+
## 24.3.0 (2024-03-15)
### Enhancements
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 663464fe82..dfcca8ea82 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,8 +1,5 @@
# Conda Organization Code of Conduct
-> [!NOTE]
-> Below is the short version of our CoC, see the long version [here](https://github.com/conda-incubator/governance/blob/main/CODE_OF_CONDUCT.md).
-
# The Short Version
Be kind to others. Do not insult or put down others. Behave professionally. Remember that harassment and sexist, racist, or exclusionary jokes are not appropriate for the conda Organization.
@@ -18,3 +15,289 @@ Thank you for helping make this a welcoming, friendly community for all.
* Report a code of conduct incident [using a form](https://form.jotform.com/221527028480048).
* Report a code of conduct incident via email: [conduct@conda.org](mailto:conduct@conda.org).
* Contact [an individual committee member](#committee-membership) or [CoC event representative](#coc-representatives) to report an incident in confidence.
+
+
+
+And now the longer version...
+
+# Conda Organization Diversity Statement
+
+The conda Organization welcomes and encourages participation in our community by people of all backgrounds and identities. We are committed to promoting and sustaining a culture that values mutual respect, tolerance, and learning, and we work together as a community to help each other live out these values.
+
+We have created this diversity statement because we believe that a diverse community is stronger, more vibrant, and produces better software and better science. A diverse community where people treat each other with respect has more potential contributors, more sources for ideas, and fewer shared assumptions that might hinder development or research.
+
+Although we have phrased the formal diversity statement generically to make it all-inclusive, we recognize that there are specific identities that are impacted by systemic discrimination and marginalization. We welcome all people to participate in the conda Organization community regardless of their identity or background.
+
+# Conda Organization Code of Conduct: Introduction & Scope
+
+This code of conduct should be honored by everyone who participates in the conda Organization community. It should be honored in any conda Organization-related activities, by anyone claiming affiliation with the conda Organization, and especially when someone is representing the conda Organization in any role (including as an event volunteer or speaker).
+
+This code of conduct applies to all spaces managed by the conda Organization, including all public and private mailing lists, issue trackers, wikis, forums, and any other communication channel used by our community. The code of conduct equally applies at conda Organization events and governs standards of behavior for attendees, speakers, volunteers, booth staff, and event sponsors.
+
+This code is not exhaustive or complete. It serves to distill our understanding of a collaborative, inclusive community culture. Please try to follow this code in spirit as much as in letter, to create a friendly and productive environment that enriches the conda Organization community.
+
+The conda Organization Code of Conduct follows below.
+
+# Standards for Behavior
+
+The conda Organization is a worldwide community. All communication should be appropriate for a professional audience including people of many different backgrounds.
+
+**Please always be kind and courteous. There's never a need to be mean or rude or disrespectful.** Thank you for helping make this a welcoming, friendly community for all.
+
+We strive to:
+
+**Be empathetic, welcoming, friendly, and patient.** We remember that the conda Organization is crafted by human beings who deserve to be treated with kindness and empathy. We work together to resolve conflict and assume good intentions. We may all experience some frustration from time to time, but we do not allow frustration to turn into a personal attack. A community where people feel uncomfortable or threatened is not a productive one.
+
+**Be collaborative.** Our work depends on the participation of many people, and in turn others depend on our work. Open source communities depend on effective and friendly collaboration to achieve their goals.
+
+**Be inquisitive.** Nobody knows everything! Asking questions early avoids many problems later, so we encourage questions, although we may direct them to the appropriate forum. We will try hard to be responsive and helpful.
+
+**Be careful in the words that we choose.** We are careful and respectful in our communication and we take responsibility for our own speech. Be kind to others. Do not insult or put down other members of the community.
+
+## Unacceptable Behavior
+
+We are committed to making participation in this community a harassment-free experience.
+
+We will not accept harassment or other exclusionary behaviors, such as:
+
+- The use of sexualized language or imagery
+- Excessive profanity (please avoid curse words; people differ greatly in their sensitivity to swearing)
+- Posting sexually explicit or violent material
+- Violent or intimidating threats or language directed against another person
+- Inappropriate physical contact and/or unwelcome sexual attention or sexual comments
+- Sexist, racist, or otherwise discriminatory jokes and language
+- Trolling or insulting and derogatory comments
+- Written or verbal comments which have the effect of excluding people on the basis of membership in a specific group, including level of experience, gender, gender identity and expression, sexual orientation, disability, neurotype, personal appearance, body size, race, ethnicity, age, religion, or nationality
+- Public or private harassment
+- Sharing private content, such as emails sent privately or non-publicly, or direct message history, without the sender's consent
+- Continuing to initiate interaction (such as photography, recording, messaging, or conversation) with someone after being asked to stop
+- Sustained disruption of talks, events, or communications, such as heckling of a speaker
+- Publishing (or threatening to post) other people's personally identifying information ("doxing"), such as physical or electronic addresses, without explicit permission
+- Other unethical or unprofessional conduct
+- Advocating for, or encouraging, any of the above behaviors
+
+The conda Organization prioritizes marginalized people’s safety over privileged people’s comfort. The conda CoC Committee reserves the right not to act on complaints including, but not limited to:
+
+* ‘Reverse’ -isms, including ‘reverse racism,’ ‘reverse sexism,’ and ‘cisphobia’.
+* Reasonable communication of boundaries, such as “leave me alone,” “go away,” or “I’m not discussing this with you.”
+* Communicating in a ‘tone’ you don’t find congenial.
+* Criticizing racist, sexist, cissexist, or otherwise oppressive behavior or assumptions.
+
+## Behavior Outside of conda Organization Spaces
+
+The CoC Committee does not influence behavior and membership in spaces outside the conda Organization. However, if you are being harassed by a member of the conda community outside our spaces, you may still report it to the CoC Committee. We will take all good-faith reports of harassment by conda community members seriously. This includes harassment outside our spaces and harassment that took place at any point in time.
+
+The CoC Committee reserves the right to exclude people from conda Organization spaces based on their past behavior, including behavior outside conda Organization spaces and behavior towards people who are not in the conda community.
+
+# Confidentiality and Public Statements to the Community
+
+The CoC Committee will keep the identity of the reporter confidential.
+
+Whenever possible, CoC cases will be reported to the community. The level of detail in reports will vary from case to case. Reports will describe at least the type of infraction that was reported, and the Committee's decision and any action taken. In most cases, the report will not include personally identifiable information.
+
+# Live Events
+
+> **If you feel your safety is in jeopardy or the situation is an emergency, we urge you to contact local law enforcement before making a report to the event's Code of Conduct committee members, [representatives](#coc-representatives), or other staff.** (In the U.S., call 911.)
+
+Live events present particular challenges:
+
+**Code of conduct reports, and consequences that stem from them, merit a thoughtful and deliberative process. Decisions and consequences matter for the reporter, the reported, and for the community at large. However, many reports, especially at live events, require rapid action to quickly address the behavior being reported.**
+
+To better support situations where immediate action may be required, these guidelines are used *during* live events:
+
+* All conda Organization events will have specific, named Code of Conduct contacts for the events.
+* The names and contact mechanisms for the Code of Conduct representatives will be clearly and frequently communicated to event participants.
+
+## CoC Representatives
+
+Every conda Organization associated event will have named CoC Committee members or *CoC representatives* that are the first point of contact for that event. Who these people are will be clearly and frequently communicated to event participants. CoC approved representatives are used when there are no committee members participating in the event.
+
+## Live Events: Reporting and Actions
+At conda Organization events, Code of Conduct committee members or representatives will attempt to gather and write down [information](#what-to-include-in-a-report) from anyone making a verbal report at a live event. Recording the details in writing is exceedingly important in order for us to effectively respond to reports. If event staff write down a report taken verbally, then the person making the report will be asked to review the written report for accuracy.
+
+For reports made during live events, or in any situation where urgent action is needed:
+
+* Any two (or more) event organizers, event staff, CoC Committee members or CoC representatives can decide if immediate action is to be taken and what that action is. In exceptionally dangerous situations, this decision can be made by a single person.
+* These rapid decisions can be reconsidered during the event as more information becomes available.
+* The scope of any rapid decision is limited to the current event / situation.
+* The report, any related information, and any decisions and consequences will be reported to the full Code of Conduct Committee as soon as possible.
+
+The full Code of Conduct Committee will then consider the report using the full timeline and processes defined below. The Committee may decide to apply consequences in other spaces beyond the space where the behavior was reported.
+
+Potential *immediate* consequences for violating the conda Organization Code of Conduct at a live event include, but are not limited to:
+
+- Warning the person to cease their behavior and that any further reports will result in sanctions
+- Requiring that the person avoid any interaction with, and physical proximity to, the person they are harassing for the remainder of the event
+- Ending a talk that violates the policy early
+- Not publishing the video or slides of a talk that violated the policy
+- Not allowing a speaker who violated the policy to give (further) talks at the event now or in the future
+- Immediately ending any event volunteer responsibilities and privileges the reported person holds
+- Expelling the person from the event without a refund
+- Requiring that the person immediately leave the event and not return
+- Any other response that the CoC members, representatives, or event staff deem necessary and appropriate to the situation
+
+# Reporting Guidelines
+
+If you believe someone is violating the code of conduct, please report this in a timely manner. Code of conduct violations reduce the value of the community for everyone. The conda Code of Conduct (CoC) Committee and the conda Organization take reports of misconduct very seriously and are committed to preserving and maintaining the welcoming nature of our community.
+
+> [!NOTE]
+> You are also encouraged to reach out to the conda Code of Conduct (CoC) Committee if you want clarification on something, if you notice some borderline behavior, or just have a concern. Send us a note at [conduct@conda.org](mailto:conduct@conda.org).
+
+All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The conda CoC Committee commits to maintaining confidentiality with regard to the reporter of an incident.
+
+For possibly unintentional breaches of the code of conduct, you may want to respond to the person and point out this code of conduct (either in public or in private, whatever is most appropriate). If you would prefer not to do that, please report the issue to the conda CoC Committee directly.
+
+Take care of each other. Alert someone if you notice a dangerous situation, someone in distress, or violations of this code of conduct, even if they seem inconsequential.
+
+## How to Submit a Report
+
+The CoC Committee is committed to promptly addressing any reported issues. If you have experienced or witnessed behavior that violates the conda Organization Code of Conduct, please let us know.
+
+You can report an incident
+
+* via the **[Incident Reporting Form](https://form.jotform.com/221527028480048)**
+* via email: [conduct@conda.org](mailto:conduct@conda.org)
+* contact [an individual committee member](#committee-membership) or [CoC event representative](#coc-representatives) to report an incident in confidence.
+
+Reports submitted via the form or committee email address are sent to the [full conda Code of Conduct Committee](#committee-membership).
+
+## What to Include in a Report
+
+Our ability to address any code of conduct breaches in a timely and effective manner is impacted by the amount of information you can provide, so, we ask you to include as much of the following information as you can**:
+
+- **Your contact info** (so we can get in touch with you if we need to follow up). This will be kept confidential. You can also file a report [anonymously](#anonymous-reporting).
+- The **approximate time and location of the incident** (please be as specific as possible).
+- **Identifying information** (e.g. name, nickname, screen name, physical description) of the individual whose behavior is being reported.
+- **Description of the behavior** (if reporting harassing language, please be specific about the words used), **your account of what happened**, and any available **supporting records** (e.g. email, GitHub issue, screenshots, etc.).
+- **Description of the circumstances/context** surrounding the incident.
+- Let us know **if the incident is ongoing**, and/or if this is part of an ongoing pattern of behavior.
+- Names and contact info, if possible, of **anyone else who witnessed** or was involved in this incident. (Did anyone else observe the incident?)
+- **Any other relevant information** you believe we should have.
+
+## Anonymous Reporting
+
+The reporting form supports anonymous incident reporting. Anonymous reporting works best when the behavior happened in a public space and was witnessed by many. If an incident is reported anonymously and was not witnessed by others, then the committee may be limited in what actions it can take and what it can report to the larger community. Nevertheless, the CoC Committee is still interested in receiving these reports. They are helpful when determining what we need to address as a community, and when looking for evidence of repeated behavior.
+
+## Conflicts of Interest
+
+Committee members are expected to recuse themselves if they have a conflict of interest, and are required to recuse themselves if they are the accused or the target of the reported behavior. In addition, the CoC Committee can [vote](#voting-and-decision-making) to remove a committee member from a case, if the committee feels that the member has a conflict of interest. This [vote](#voting-and-decision-making) requires a simple majority.
+
+If you are concerned about making a report that will be read by all committee members, you are strongly encouraged to contact [individual committee members](#committee-membership) directly.
+
+# Enforcement: What Happens After a Report is Filed?
+
+## Acknowledgment and Responding to Immediate Needs
+
+CoC Committee members and/or event staff will attempt to ensure your safety and help with any immediate needs. The CoC Committee will make every effort to **acknowledge receipt within 24 hours** (and we'll aim for much more quickly than that).
+
+## Reviewing the Report
+
+The CoC Committee will make all efforts to **review the incident within three days** and determine:
+
+- Whether this is an ongoing situation, or if there is a threat to anyone's physical safety
+- What happened
+- Whether this event constitutes a code of conduct violation
+- Who the bad actor was, if any
+
+## Contacting the Person Reported
+
+After the CoC Committee has had time to review and discuss the report, someone will attempt to contact the person who is the subject of the report to inform them of what has been reported about them. We will then ask that person for their account of what happened.
+
+## Response and Potential Consequences
+
+Once the CoC Committee has completed our investigation of the report, we will make a decision as to how to respond. The person making a report will not normally be consulted as to the proposed resolution of the issue, except insofar as we need to understand how to help them feel safe.
+
+Potential consequences for violating the conda Organization code of conduct include:
+
+- Nothing (if we determine that no violation occurred)
+- Private feedback or reprimand from the CoC Committee to the individual(s) involved
+- Warning the person to cease their behavior and that any further reports will result in sanctions
+- A public announcement that an incident occurred
+- Mediation (only if both reporter and reportee agree)
+- An imposed vacation (e.g. asking someone to "take a week off" from a mailing list)
+- A permanent or temporary ban from some or all the conda Organization spaces (mailing lists, GitHub repos, in-person events, etc.)
+- Assistance to the complainant with a report to other bodies, for example, institutional offices or appropriate law enforcement agencies
+- Removing a person from the conda Organization membership or other formal affiliation
+- Publishing an account of the harassment and calling for the resignation of the alleged harasser from their responsibilities (may be called for if the person is an event leader, or refuses to stand aside from the conflict of interest, or similar)
+- Any other response that the CoC Committee deems necessary and appropriate to the situation
+
+No one espousing views or values contrary to the standards of our code of conduct will be permitted to hold any position representing the conda Organization, including volunteer positions. The CoC Committee has the right and responsibility to remove, edit, or reject comments, commits, code, website edits, issues, and other contributions that are not aligned with this code of conduct.
+
+We aim to **respond within one week** to the original reporter with either a resolution or an explanation of why the situation is not yet resolved.
+
+We will contact the person who is the subject of the report to let them know what actions will be taken as a result of the report, if any.
+
+Our policy is to make sure that everyone aware of the initial incident is also made aware that official action has been taken, while still respecting the privacy of individuals. In addition, we will also usually [notify the community](#confidentiality-and-public-statements-to-the-community) that an incident has been reported, what type of incident it was, and what the response was, again respecting the privacy of individuals.
+
+## Appealing a Decision
+
+To appeal a decision of the CoC Committee, contact the [Committee Co-Chairs](#committee-membership), with your appeal. Please include as much detail as possible about why you are appealing the decision. The Co-Chairs will review the appeal, possibly consulting with the full Committee, and then issue a decision.
+
+# Timeline Summary:
+
+| Time | Event | Details |
+| ---- | ---- | ---- |
+| Within 24 Hours | Acknowledge | The CoC Committee will make every effort to **acknowledge receipt of a report within 24 hours**. |
+| Within 3 Days | Review | The CoC Committee aims to **review the incident within three days**. |
+| Within 1 Week | Resolve | We will **respond within one week** to the original reporter with either a resolution or an explanation of why the situation is not yet resolved. |
+
+# Voting and Decision Making
+
+Committee votes and decisions require both a minimum quorum size for the vote to be counted, and then a minimum percentage of cast affirmative votes to pass.
+
+Except where otherwise noted, votes require a quorum and a simple majority to pass:
+
+* Minimum Quorum:
+ * More than 50% of eligible committee members must vote.
+ * Eligibile members do not include those excluded because of [conflicts of interest](#conflicts-of-interest).
+* Affirmative vote threshold:
+ * More than 50% of the votes cast need to be affirmative to take action.
+
+# Committee Membership
+
+You can reach the entire CoC Committee by emailing [conduct@conda.org](mailto:conduct@conda.org).
+
+| Name | Employer / Funding | Steering Council Member | Current Term Ends |
+| ---- | ---- | ---- | --- |
+| [Eric Dill](https://github.com/ericdill) | [Anaconda](https://anaconda.com/) | | 2026-07-01 |
+| [Dasha Gurova](https://github.com/dashagurova) | [Anaconda](https://anaconda.com/) | | 2026-07-01 |
+| [Bianca Henderson](https://github.com/beeankha) | [Anaconda](https://anaconda.com/) | | 2026-07-01 |
+| [Katherine Kinnaman](https://github.com/kathatherine) | [Anaconda](https://anadonda.com/) | | 2026-07-01 |
+| [Mahe Iram Khan](https://github.com/ForgottenProgramme) | [Anaconda](https://anaconda.com/) | | 2025-07-01 |
+| [Ken Odegard](https://github.com/kenodegard) | [Anaconda](https://anaconda.com/) | | 2025-07-01 |
+| [Crystal Soja](https://github.com/csoja), Co-Chair | [Anaconda](https://anaconda.com/) | | 2025-07-01 |
+| [Jaime Rodríguez-Guerra](https://github.com/jaimergp), Co-Chair | [Quansight](https://quansight.com/) | yes | 2025-07-01 |
+
+# Terms and New Members
+
+* Committee members are appointed for two year terms. Committee members can choose to renew their memberships.
+* Committee members can resign before their term ends.
+* Committee members can also be removed by a [simple majority vote](#voting-and-decision-making) from their fellow committee members.
+* New committee members are added by a simple majority vote as well.
+
+# Eligibility
+
+Anyone from the community who is interested and able to do CoC Committee work is eligible to be nominated for the committee. New committee members can be nominated by any community member, including nominating themselves.
+
+## Shared Funding
+
+Unlike the Steering Council, we are not limiting the number of Committee members who share a common source of funding. However, if a report involves someone who shares funding with CoC Committee members, then the remainder of the committee may vote to exclude some or all Committee members with that same funding, even if that excludes a majority of the CoC Committee. This should be done only if the separately funded Committee members feel that the common funding is interfering with decision making.
+
+Note: This requires tracking the funding sources of CoC Committee members.
+
+## Overlap with Steering Council Membership
+
+Committee members can also be on the conda Organization Steering Council. However, Steering Council members have to make up less than 50% of the Code of Conduct Committee.
+
+# Updating this Code of Conduct
+
+The conda Organization's Code of Conduct can be updated by a [simple majority vote](#voting-and-decision-making) of the CoC Committee.
+
+
+# License
+
+This code of conduct is based on the [NumFOCUS code of conduct template](https://github.com/numfocus/numfocus/blob/8759e21481552f213489e3718979ccecf68e9ead/manual/numfocus-coc.md) as it existed on 2022/03/08 (which is the 2019/11/20 version). Several added sections are based on the [Galaxy Community Code of Conduct](https://galaxyproject.org/community/coc/).
+
+The NumFOCUS code of conduct template was itself adapted from numerous sources, including the [*Geek Feminism wiki, created by the Ada Initiative and other volunteers, which is under a Creative Commons Zero license*](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy), the [*Contributor Covenant version 1.2.0*](http://contributor-covenant.org/version/1/2/0/), the [*Bokeh Code of Conduct*](https://github.com/bokeh/bokeh/blob/master/CODE_OF_CONDUCT.md), the [*SciPy Code of Conduct*](https://github.com/jupyter/governance/blob/master/conduct/enforcement.md), the [*Carpentries Code of Conduct*](https://docs.carpentries.org/topic_folders/policies/code-of-conduct.html#enforcement-manual), and the [*NeurIPS Code of Conduct*](https://neurips.cc/public/CodeOfConduct).
+
+**The conda Organization Code of Conduct is licensed under the [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/).**
diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md
index 46a13ecd98..f642341e1b 100644
--- a/HOW_WE_USE_GITHUB.md
+++ b/HOW_WE_USE_GITHUB.md
@@ -1,3 +1,5 @@
+
+
[conda-org]: https://github.com/conda
[sub-team]: https://github.com/conda-incubator/governance#sub-teams
@@ -15,17 +17,18 @@
[infrastructure]: https://github.com/conda/infrastructure
[workflow-sync]: https://github.com/conda/infrastructure/blob/main/.github/workflows/sync.yml
+[workflow-update]: https://github.com/conda/conda-build/blob/main/.github/workflows/update.yml
[labels-global]: https://github.com/conda/infrastructure/blob/main/.github/global.yml
-[workflow-cla]: /.github/workflows/cla.yml
-[workflow-issues]: /.github/workflows/issues.yml
-[workflow-labels]: /.github/workflows/labels.yml
-[workflow-lock]: /.github/workflows/lock.yml
-[workflow-project]: /.github/workflows/project.yml
-[workflow-stale]: /.github/workflows/stale.yml
-[labels-local]: /.github/labels.yml
-[labels-page]: ../../labels
+[workflow-cla]: https://github.com/conda/conda-build/blob/main/.github/workflows/cla.yml
+[workflow-issues]: https://github.com/conda/conda-build/blob/main/.github/workflows/issues.yml
+[workflow-labels]: https://github.com/conda/conda-build/blob/main/.github/workflows/labels.yml
+[workflow-lock]: https://github.com/conda/conda-build/blob/main/.github/workflows/lock.yml
+[workflow-project]: https://github.com/conda/conda-build/blob/main/.github/workflows/project.yml
+[workflow-stale]: https://github.com/conda/conda-build/blob/main/.github/workflows/stale.yml
+[labels-local]: https://github.com/conda/conda-build/blob/main/.github/labels.yml
+[labels-page]: https://github.com/conda/conda-build/labels
# How We Use GitHub
@@ -105,7 +108,7 @@ Sorting engineers are a conda governance [sub-team][sub-team]; they are a group
New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/project.yml`][workflow-project].
-The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there.
+The GitHub workflows in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] pushes any modifications to other repositories from there and individual repositories can pull additional files using the [`.github/workflows/update.yml`][workflow-update] workflow.
### What is done about the issues in the "Sorting" tab?
@@ -126,8 +129,8 @@ For more information on the sorting process, see [Issue Sorting Procedures](#iss
Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following:
-- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) label to move an issue to this tab.
-- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the [`backlog`](https://github.com/conda/infrastructure/labels/backlog) label to move an issue to this tab.
+- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the https://github.com/conda/conda-build/labels/type%3A%3Asupport label to move an issue to this tab.
+- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the https://github.com/conda/conda-build/labels/backlog label to move an issue to this tab.
- **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc.
### Where do work issues go after being sorted?
@@ -143,12 +146,12 @@ Issues are "backlogged" when they have been sorted but not yet earmarked for an
Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include:
- [Marking of issues and pull requests as stale][workflow-stale], resulting in:
- - issues marked as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total)
- - all other inactive issues (not labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity)
+ - issues marked as https://github.com/conda/conda-build/labels/type%3A%3Asupport being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total)
+ - all other inactive issues (not labeled as https://github.com/conda/conda-build/labels/type%3A%3Asupport being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity)
- all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity)
- [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days
- [Adding new issues and pull requests to the respective project boards][workflow-project]
-- [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling [`pending::feedback`](https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback) with [`pending::support`](https://github.com/conda/infrastructure/labels/pending%3A%3Asupport) after a contributor leaves a comment
+- [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling https://github.com/conda/conda-build/labels/pending%3A%3Afeedback with https://github.com/conda/conda-build/labels/pending%3A%3Asupport after a contributor leaves a comment
- [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done
- [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories
@@ -166,9 +169,9 @@ Labeling is a very important means for sorting engineers to keep track of the cu
Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category.
-Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%3Abug), [`type::feature`](https://github.com/conda/infrastructure/labels/type%3A%3Afeature), and [`type::documentation`](https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation), where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, [`os::linux`](https://github.com/conda/infrastructure/labels/os%3A%3Alinux), [`os::macos`](https://github.com/conda/infrastructure/labels/os%3A%3Amacos), and [`os::windows`](https://github.com/conda/infrastructure/labels/os%3A%3Awindows)), an issue could be labeled with one or more, depending on the system(s) the issue occurs on.
+Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/conda-build/labels/type%3A%3Abug, https://github.com/conda/conda-build/labels/type%3A%3Afeature, and https://github.com/conda/conda-build/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/conda-build/labels/os%3A%3Alinux, https://github.com/conda/conda-build/labels/os%3A%3Amacos, and https://github.com/conda/conda-build/labels/os%3A%3Awindows), an issue could be labeled with one or more, depending on the system(s) the issue occurs on.
-Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport), that issue will be marked [`stale`](https://github.com/conda/infrastructure/labels/stale) after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details.
+Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as https://github.com/conda/conda-build/labels/type%3A%3Asupport, that issue will be marked https://github.com/conda/conda-build/labels/stale after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details.
### What labels are required for each issue?
@@ -178,7 +181,7 @@ The `type` labels are exclusive of each other: each sorted issue should have exa
The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups.
-The `severity` labels are exclusive of each other and, while required for the [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%bug) label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration.
+The `severity` labels are exclusive of each other and, while required for the https://github.com/conda/conda-build/labels/type%3A%bug label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration.
Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use.
@@ -213,69 +216,49 @@ Below are some boilerplate responses for the most commonly-seen issues to be sor
This is a duplicate of [link to primary issue]; please feel free to continue the discussion there.
-> [!WARNING]
-> Apply the https://github.com/conda/infrastructure/labels/duplicate label to the issue being closed and https://github.com/conda/infrastructure/labels/duplicate%3A%3Aprimary to the original issue.
+
+
+> **Warning**
+> Apply the https://github.com/conda/conda-build/labels/duplicate label to the issue being closed and https://github.com/conda/conda-build/labels/duplicate%3A%3Aprimary to the original issue.
-Requesting an Uninstall/Reinstall of conda
+Anaconda Products
+Thank you for filing this issue! Unfortunately, this is off-topic for this repo because it is related to an Anaconda product.
+If you are encountering issues with Anaconda products or services, you have several options for receiving community
+support:
-Please uninstall your current version of `conda` and reinstall the latest version.
-Feel free to use either the [miniconda](https://docs.anaconda.com/free/miniconda/)
-or [anaconda](https://www.anaconda.com/products/individual) installer,
-whichever is more appropriate for your needs.
+- [Anaconda community forums](https://community.anaconda.cloud)
+- [Anaconda issue tracker on GitHub](https://github.com/ContinuumIO/anaconda-issues/issues)
-
-
-
-Redirect to Anaconda Issue Tracker
-
-
-
-
-Thank you for filing this issue! Unfortunately, this is off-topic for this repo.
-If you are still encountering this issue please reopen in the
-[Anaconda issue tracker](https://github.com/ContinuumIO/anaconda-issues/issues)
-where `conda` installer/package issues are addressed.
-
-
-> [!WARNING]
-> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
+
+
+> **Warning**
+> Apply the https://github.com/conda/conda-build/labels/off-topic label to these issues before closing them out.
-Redirecting to Nucleus Forums
+General Off Topic
-
+Unfortunately, this issue is outside the scope of support we offer via GitHub or is not directly related to this project.
+Community support can be found elsewhere, though, and we encourage you to explore the following options:
-Unfortunately, this issue is outside the scope of support we offer via GitHub;
-if you continue to experience the problems described here,
-please post details to the [Nucleus forums](https://community.anaconda.cloud/).
+- [Conda discourse forum](https://conda.discourse.group/)
+- [Community chat channels](https://conda.org/community#chat)
+- [Stack Overflow posts tagged "conda"](https://stackoverflow.com/questions/tagged/conda)
-> [!WARNING]
-> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
-
-
-
-
-Slow solving of conda environment
-
-
-
-Hi [@username],
-
-Thanks for voicing your concern about the performance of the classic dependency solver. To fix this, our official recommendation is using the new default "conda-libmamba-solver" instead of the classic solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
-
-In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
-
+
+
+> **Warning**
+> Apply the https://github.com/conda/conda-build/labels/off-topic label to these issues before closing them out.
diff --git a/README.md b/README.md
index cae61abbfd..06d2889d93 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,19 @@
-[ci-tests-badge]: https://github.com/conda/conda-build/actions/workflows/tests.yml/badge.svg
-[codecov-badge]: https://img.shields.io/codecov/c/github/conda/conda-build/main.svg?label=coverage
-[release-badge]: https://img.shields.io/github/release/conda/conda-build.svg
+[tests-badge]: https://img.shields.io/github/actions/workflow/status/conda/conda-build/tests.yml?branch=main&event=schedule&logo=github&label=tests
+[codecov-badge]: https://img.shields.io/codecov/c/github/conda/conda-build/main?logo=codecov
+[codspeed-badge]: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
+[release-badge]: https://img.shields.io/github/v/release/conda/conda?logo=github
+[anaconda-badge]: https://img.shields.io/conda/vn/anaconda/conda-build?logo=anaconda
+[conda-forge-badge]: https://img.shields.io/conda/vn/conda-forge/conda-build?logo=conda-forge
# `conda-build`
-[![CI Tests (GitHub Actions)][ci-tests-badge]](https://github.com/conda/conda-build/actions/workflows/tests.yml)
+[![GitHub Scheduled Tests][tests-badge]](https://github.com/conda/conda-build/actions/workflows/tests.yml?query=branch%3Amain+event%3Aschedule)
[![Codecov Status][codecov-badge]](https://codecov.io/gh/conda/conda-build/branch/main)
-[![latest release version][release-badge]](https://github.com/conda/conda/releases)
+[![CodSpeed Performance Benchmarks][codspeed-badge]](https://codspeed.io/conda/conda-build)
+
+[![GitHub Release][release-badge]](https://github.com/conda/conda-build/releases)
+[![Anaconda Package][anaconda-badge]](https://anaconda.org/anaconda/conda-build)
+[![conda-forge Package][conda-forge-badge]](https://anaconda.org/conda-forge/conda-build)
## Installation
diff --git a/RELEASE.md b/RELEASE.md
index d45614facc..ade91a373c 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -1,36 +1,34 @@
-
+
-
-[epic template]: https://github.com/conda/conda/issues/new?assignees=&labels=epic&template=epic.yml
-[compare]: https://github.com/conda/infrastructure/compare
-[new release]: https://github.com/conda/infrastructure/releases/new
-
+[epic template]: https://github.com/conda/conda-build/issues/new?assignees=&labels=epic&template=epic.yml
+[compare]: https://github.com/conda/conda-build/compare
+[new release]: https://github.com/conda/conda-build/releases/new
[infrastructure]: https://github.com/conda/infrastructure
[rever docs]: https://regro.github.io/rever-docs
[release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
[merge conflicts]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts
-[Anaconda Recipes]: https://github.com/AnacondaRecipes/conda-feedstock
-[conda-forge]: https://github.com/conda-forge/conda-feedstock
+[Anaconda Recipes]: https://github.com/AnacondaRecipes/conda-build-feedstock
+[conda-forge]: https://github.com/conda-forge/conda-build-feedstock
# Release Process
-> **Note:**
+> [!NOTE]
> Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`.
## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release)
-> **Note:**
+> [!NOTE]
> The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers.
Use the issue template below to create the release issue. After creating the release issue, pin it for easy access.
-GitHub Issue Template
+Release Template
```markdown
### Summary
-Placeholder for `{{ repo.name }} YY.M.x` release.
+Placeholder for `conda-build YY.M.x` release.
| Pilot | |
|---|---|
@@ -38,25 +36,30 @@ Placeholder for `{{ repo.name }} YY.M.x` release.
### Tasks
-[milestone]: {{ repo.url }}/milestone/
-[process]: {{ repo.url }}/blob/main/RELEASE.md
-[releases]: {{ repo.url }}/releases
-[main]: https://github.com/AnacondaRecipes/{{ repo.name }}-feedstock
-[conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock
-[ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/
+[milestone]: https://github.com/conda/conda-build/milestone/
+[process]: https://github.com/conda/conda-build/blob/main/RELEASE.md
+[releases]: https://github.com/conda/conda-build/releases
+[main]: https://github.com/AnacondaRecipes/conda-build-feedstock
+[conda-forge]: https://github.com/conda-forge/conda-build-feedstock
+[ReadTheDocs]: https://readthedocs.com/projects/continuumio-conda-build/
-#### The week before release week
+
+The week before release week
- [ ] Create release branch (named `YY.M.x`)
-- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`)
+- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-conda-build-YY.M.x`)
- [ ] [Complete outstanding PRs][milestone]
- [ ] Test release candidates
-#### Release week
+
+
+
+Release week
- [ ] Create release PR (see [release process][process])
- [ ] [Publish release][releases]
+- [ ] Merge `YY.M.x` back into `main`
- [ ] Activate the `YY.M.x` branch on [ReadTheDocs][ReadTheDocs]
- [ ] Feedstocks
- [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main]
@@ -72,22 +75,56 @@ Placeholder for `{{ repo.name }} YY.M.x` release.
- [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse)
- Summary
- [ ] [Twitter](https://twitter.com/condaproject)
+
+
```
-> **Note:**
+If a patch release is necessary, reopen the original release issue and append the following template to the release issue summary.
+
+
+Patch Release Template
+
+```markdown
+
+Patch YY.M.[$patch_number]
+
+- [ ]
+- [ ] Create release PR (see [release process][process])
+- [ ] [Publish release][releases]
+- [ ] Merge `YY.M.x` back into `main`
+- [ ] Feedstocks
+ - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main]
+ - [ ] Bump version & update dependencies/tests in [conda-forge feedstock][conda-forge]
+- [ ] Hand off to the Anaconda packaging team
+
+
+```
+
+
+
+> [!NOTE]
> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label.
## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release)
Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue).
-## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date.
+## 3. Manually test canary build(s).
+
+### Canary Builds for Manual Testing
+
+Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-conda-build-YY.M.x` for manual testing.
+
+> [!NOTE]
+> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label.
+
+## 4. Ensure `rever.xsh` and `news/TEMPLATE` are up to date.
These are synced from [`conda/infrastructure`][infrastructure].
-4. Run rever. (ideally done on the Monday of release week)
+5. Run rever. (ideally done on the Monday of release week)
Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc.
@@ -102,7 +139,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
2. Clone and `cd` into the repository if you haven't done so already:
```bash
- (rever) $ git clone git@github.com:{{ repo.user }}/{{ repo.name }}.git
+ (rever) $ git clone git@github.com:/conda-build.git
(rever) $ cd conda
```
@@ -119,9 +156,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
(rever) $ git checkout -b changelog-YY.M.[$patch_number]
```
-2. Run `rever --activities authors`:
+2. Run `rever --activities authors `:
- > **Note:**
+ > **Note:**
> Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version).
```bash
@@ -166,7 +203,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
(rever) $ git commit -m "Update .authors.yml"
```
- - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running:
+ - Rerun `rever --activities authors --force ` and finally check that your `.mailmap` is correct by running:
```bash
git shortlog -se
@@ -194,7 +231,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
- Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch:
```bash
- (rever) $ git cherry -v main
+ (rever) $ git cherry -v
+ 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml
+ 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap
```
@@ -202,7 +239,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary.
- > **Note:**
+ > **Note:**
> We've found it useful to name news snippets with the following format: `-`.
>
> We've also found that we like to include the PR #s inline with the text itself, e.g.:
@@ -213,7 +250,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
> * Add `win-arm64` as a known platform (subdir). (#11778)
> ```
- - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release.
+ - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release. Make sure you compare the current release branch against the previous one (e.g., `24.5.x` would be compared against `24.3.x`)
- Add a new news snippet for any PRs of importance that are missing.
@@ -227,7 +264,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
- After completing this, you will have at most three commits on your release branch:
```bash
- (rever) $ git cherry -v main
+ (rever) $ git cherry -v
+ 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml
+ 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap
+ 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news
@@ -235,7 +272,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
5. Run `rever --activities changelog`:
- > **Note:**
+ > **Note:**
> This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue.
```bash
@@ -254,7 +291,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
- After completing this, you will have at most three commits on your release branch:
```bash
- (rever) $ git cherry -v main
+ (rever) $ git cherry -v
+ 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml
+ 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap
+ 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news
@@ -269,7 +306,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
- After completing this, you will have at most five commits on your release branch:
```bash
- (rever) $ git cherry -v main
+ (rever) $ git cherry -v
+ 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml
+ 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap
+ 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news
@@ -291,7 +328,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
- After completing this, you will have at most six commits on your release branch:
```bash
- (rever) $ git cherry -v main
+ (rever) $ git cherry -v
+ 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml
+ 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap
+ 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news
@@ -325,7 +362,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values:
- > **Note:**
+ > **Note:**
> Only publish the release after the release PR is merged, until then always **save as draft**.
| Field | Value |
@@ -336,22 +373,13 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 5. Wait for review and approval of release PR.
-
-## 6. Manually test canary build(s).
-
-### Canary Builds for Manual Testing
-
-Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing.
-
-> **Note:**
-> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label.
+## 6. Wait for review and approval of release PR.
## 7. Merge release PR and publish release.
To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release.
-> **Note:**
+> [!NOTE]
> Release notes can be drafted and saved ahead of time.
## 8. Merge/cherry pick the release branch over to the `main` branch.
@@ -367,19 +395,19 @@ To publish the release, go to the project's release page (e.g., https://github.c
4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request".
-> **Note:**
+> [!NOTE]
> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches.
5. Review and merge the pull request the same as any code change pull request.
-> **Note:**
+> [!NOTE]
> The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action.
## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`.
-> **Note:**
+> [!NOTE]
> Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch):
> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax)
> - Add any changes via commits to that new branch
@@ -392,7 +420,7 @@ To publish the release, go to the project's release page (e.g., https://github.c
## 10. Hand off to Anaconda's packaging team.
-> **Note:**
+> [!NOTE]
> This step should NOT be done past Thursday morning EST; please start the process on a Monday, Tuesday, or Wednesday instead in order to avoid any potential debugging sessions over evenings or weekends.
diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py
index 9180c404fc..b2d8d0731b 100644
--- a/conda_build/_load_setup_py_data.py
+++ b/conda_build/_load_setup_py_data.py
@@ -111,7 +111,7 @@ def setup(**kw):
exec(code, ns, ns)
else:
if not permit_undefined_jinja:
- raise TypeError(f"{setup_file} is not a file that can be read")
+ raise TypeError("%s is not a file that can be read" % setup_file) # noqa: UP031
sys.modules["versioneer"] = versioneer
diff --git a/conda_build/api.py b/conda_build/api.py
index cc866a865d..eaea8f50b8 100644
--- a/conda_build/api.py
+++ b/conda_build/api.py
@@ -52,11 +52,7 @@ def render(
templates evaluated.
Returns a list of (metadata, need_download, need_reparse in env) tuples"""
-
- from conda.exceptions import NoPackagesFoundError
-
- from .exceptions import DependencyNeedsBuildingError
- from .render import finalize_metadata, render_recipe
+ from .render import render_metadata_tuples, render_recipe
config = get_or_merge_config(config, **kwargs)
@@ -68,50 +64,13 @@ def render(
variants=variants,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
)
- output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {}
- for meta, download, render_in_env in metadata_tuples:
- if not meta.skip() or not config.trim_skip:
- for od, om in meta.get_output_metadata_set(
- permit_unsatisfiable_variants=permit_unsatisfiable_variants,
- permit_undefined_jinja=not finalize,
- bypass_env_check=bypass_env_check,
- ):
- if not om.skip() or not config.trim_skip:
- if "type" not in od or od["type"] == "conda":
- if finalize and not om.final:
- try:
- om = finalize_metadata(
- om,
- permit_unsatisfiable_variants=permit_unsatisfiable_variants,
- )
- except (DependencyNeedsBuildingError, NoPackagesFoundError):
- if not permit_unsatisfiable_variants:
- raise
-
- # remove outputs section from output objects for simplicity
- if not om.path and (outputs := om.get_section("outputs")):
- om.parent_outputs = outputs
- del om.meta["outputs"]
-
- output_metas[
- om.dist(),
- om.config.variant.get("target_platform"),
- tuple(
- (var, om.config.variant[var])
- for var in om.get_used_vars()
- ),
- ] = MetaDataTuple(om, download, render_in_env)
- else:
- output_metas[
- f"{om.type}: {om.name()}",
- om.config.variant.get("target_platform"),
- tuple(
- (var, om.config.variant[var])
- for var in om.get_used_vars()
- ),
- ] = MetaDataTuple(om, download, render_in_env)
-
- return list(output_metas.values())
+ return render_metadata_tuples(
+ metadata_tuples,
+ config=config,
+ permit_unsatisfiable_variants=permit_unsatisfiable_variants,
+ finalize=finalize,
+ bypass_env_check=bypass_env_check,
+ )
def output_yaml(
diff --git a/conda_build/build.py b/conda_build/build.py
index 6dd2b49256..5d062f7720 100644
--- a/conda_build/build.py
+++ b/conda_build/build.py
@@ -42,7 +42,13 @@
from . import environ, noarch_python, source, tarcheck, utils
from .config import Config
from .create_test import create_all_test_files
-from .exceptions import CondaBuildException, DependencyNeedsBuildingError
+from .deprecations import deprecated
+from .exceptions import (
+ BuildScriptException,
+ CondaBuildException,
+ CondaBuildUserError,
+ DependencyNeedsBuildingError,
+)
from .index import _delegated_update_index, get_build_index
from .metadata import FIELDS, MetaData
from .os_utils import external
@@ -59,6 +65,7 @@
execute_download_actions,
expand_outputs,
output_yaml,
+ render_metadata_tuples,
render_recipe,
reparse,
try_download,
@@ -69,7 +76,6 @@
CONDA_PACKAGE_EXTENSIONS,
env_var,
glob,
- on_linux,
on_mac,
on_win,
shutil_move_more_retrying,
@@ -769,12 +775,12 @@ def copy_recipe(m):
yaml.dump(m.config.variant, f)
-def copy_readme(m):
+def copy_readme(m: MetaData):
readme = m.get_value("about/readme")
if readme:
src = join(m.config.work_dir, readme)
if not isfile(src):
- sys.exit(f"Error: no readme file: {readme}")
+ raise CondaBuildUserError(f"`about/readme` file ({readme}) doesn't exist")
dst = join(m.config.info_dir, readme)
utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking)
if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}:
@@ -919,7 +925,7 @@ def copy_test_source_files(m, destination):
)
except OSError as e:
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
f"Failed to copy {f} into test files. Error was: {str(e)}"
)
for ext in ".pyc", ".pyo":
@@ -1640,27 +1646,9 @@ def post_process_files(m: MetaData, initial_prefix_files):
# The post processing may have deleted some files (like easy-install.pth)
current_prefix_files = utils.prefix_files(prefix=host_prefix)
new_files = sorted(current_prefix_files - initial_prefix_files)
- """
- if m.noarch == 'python' and m.config.subdir == 'win-32':
- # Delete any PIP-created .exe launchers and fix entry_points.txt
- # .. but we need to provide scripts instead here.
- from .post import caseless_sepless_fnmatch
- exes = caseless_sepless_fnmatch(new_files, 'Scripts/*.exe')
- for ff in exes:
- os.unlink(os.path.join(m.config.host_prefix, ff))
- new_files.remove(ff)
- """
+
+ # filter_files will remove .git, trash directories, and conda-meta directories
new_files = utils.filter_files(new_files, prefix=host_prefix)
- meta_dir = m.config.meta_dir
- if any(meta_dir in join(host_prefix, f) for f in new_files):
- meta_files = (
- tuple(f for f in new_files if m.config.meta_dir in join(host_prefix, f)),
- )
- sys.exit(
- f"Error: Untracked file(s) {meta_files} found in conda-meta directory. This error usually comes "
- "from using conda in the build script. Avoid doing this, as it can lead to packages "
- "that include their dependencies."
- )
post_build(m, new_files, build_python=python)
entry_point_script_names = get_entry_point_script_names(
@@ -1687,7 +1675,14 @@ def post_process_files(m: MetaData, initial_prefix_files):
return new_files
-def bundle_conda(output, metadata: MetaData, env, stats, **kw):
+def bundle_conda(
+ output,
+ metadata: MetaData,
+ env,
+ stats,
+ new_prefix_files: set[str] = set(),
+ **kw,
+):
log = utils.get_logger(__name__)
log.info("Packaging %s", metadata.dist())
get_all_replacements(metadata.config)
@@ -1739,13 +1734,19 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
output["script"],
args[0],
)
- if "system32" in args[0] and "bash" in args[0]:
- print(
- "ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n"
- " use MSYS2 packages. Add `m2-base` and more (depending on what your"
- " script needs) to `requirements/build` instead."
+ if (
+ # WSL bash is always the same path, it is an alias to the default
+ # distribution as configured by the user
+ on_win
+ # check if WSL is installed before calling Path.samefile/os.stat
+ and (wsl_bash := Path("C:\\Windows\\System32\\bash.exe")).exists()
+ and wsl_bash.samefile(args[0])
+ ):
+ raise CondaBuildUserError(
+ "WSL bash.exe is not supported. Please use MSYS2 packages. Add "
+ "`m2-base` and more (depending on what your script needs) to "
+ "`requirements/build` instead."
)
- sys.exit(1)
else:
args = interpreter.split(" ")
@@ -1758,19 +1759,19 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
env_output["RECIPE_DIR"] = metadata.path
env_output["MSYS2_PATH_TYPE"] = "inherit"
env_output["CHERE_INVOKING"] = "1"
+ _set_env_variables_for_build(metadata, env_output)
for var in utils.ensure_list(metadata.get_value("build/script_env")):
if "=" in var:
val = var.split("=", 1)[1]
var = var.split("=", 1)[0]
+ env_output[var] = val
elif var not in os.environ:
warnings.warn(
f"The environment variable '{var}' specified in script_env is undefined.",
UserWarning,
)
- val = ""
else:
- val = os.environ[var]
- env_output[var] = val
+ env_output[var] = os.environ[var]
dest_file = os.path.join(metadata.config.work_dir, output["script"])
utils.copy_into(os.path.join(metadata.path, output["script"]), dest_file)
from os import stat
@@ -1781,12 +1782,15 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
_write_activation_text(dest_file, metadata)
bundle_stats = {}
- utils.check_call_env(
- [*args, dest_file],
- cwd=metadata.config.work_dir,
- env=env_output,
- stats=bundle_stats,
- )
+ try:
+ utils.check_call_env(
+ [*args, dest_file],
+ cwd=metadata.config.work_dir,
+ env=env_output,
+ stats=bundle_stats,
+ )
+ except subprocess.CalledProcessError as exc:
+ raise BuildScriptException(str(exc), caused_by=exc) from exc
log_stats(bundle_stats, f"bundling {metadata.name()}")
if stats is not None:
stats[stats_key(metadata, f"bundle_{metadata.name()}")] = bundle_stats
@@ -1794,10 +1798,26 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
if files:
# Files is specified by the output
# we exclude the list of files that we want to keep, so post-process picks them up as "new"
- keep_files = {
- os.path.normpath(pth)
- for pth in utils.expand_globs(files, metadata.config.host_prefix)
- }
+ if isinstance(files, dict):
+ # When file matching with include/exclude lists, only
+ # new_prefix_files are considered. Files in the PREFIX from other
+ # recipes (dependencies) are ignored
+ include = files.get("include") or []
+ exclude = files.get("exclude") or []
+ exclude_files = {
+ os.path.normpath(pth)
+ for pth in utils.expand_globs(exclude, metadata.config.host_prefix)
+ }
+ keep_files = {
+ os.path.normpath(pth)
+ for pth in utils.expand_globs(include, metadata.config.host_prefix)
+ }
+ keep_files = new_prefix_files.intersection(keep_files) - exclude_files
+ else:
+ keep_files = {
+ os.path.normpath(pth)
+ for pth in utils.expand_globs(files, metadata.config.host_prefix)
+ }
pfx_files = set(utils.prefix_files(metadata.config.host_prefix))
initial_files = {
item
@@ -1808,7 +1828,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
}
elif not output.get("script"):
if not metadata.always_include_files():
- log.warn(
+ log.warning(
"No files or script found for output {}".format(output.get("name"))
)
build_deps = metadata.get_value("requirements/build")
@@ -1846,7 +1866,9 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
initial_files.remove(f)
has_matches = True
if not has_matches:
- log.warn("Glob %s from always_include_files does not match any files", pat)
+ log.warning(
+ "Glob %s from always_include_files does not match any files", pat
+ )
files = post_process_files(metadata, initial_files)
if output.get("name") and output.get("name") != "conda":
@@ -1905,7 +1927,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
from conda_verify.verify import Verify
except ImportError:
Verify = None
- log.warn(
+ log.warning(
"Importing conda-verify failed. Please be sure to test your packages. "
"conda install conda-verify to make this message go away."
)
@@ -1922,7 +1944,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
exit_on_error=metadata.config.exit_on_verify_error,
)
except KeyError as e:
- log.warn(
+ log.warning(
"Package doesn't have necessary files. It might be too old to inspect."
f"Legacy noarch packages are known to fail. Full message was {e}"
)
@@ -1972,7 +1994,13 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw):
return final_outputs
-def bundle_wheel(output, metadata: MetaData, env, stats):
+def bundle_wheel(
+ output,
+ metadata: MetaData,
+ env,
+ stats,
+ new_prefix_files: set[str] = set(),
+):
ext = ".bat" if utils.on_win else ".sh"
with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir):
dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext)
@@ -2149,7 +2177,7 @@ def _write_activation_text(script_path, m):
_write_sh_activation_text(fh, m)
else:
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
f"not adding activation to {script_path} - I don't know how to do so for "
"this file type"
)
@@ -2367,7 +2395,7 @@ def build(
):
specs.append(vcs_source)
- log.warn(
+ log.warning(
"Your recipe depends on %s at build time (for templates), "
"but you have not listed it as a build dependency. Doing "
"so for this build.",
@@ -2425,6 +2453,24 @@ def build(
# Write out metadata for `conda debug`, making it obvious that this is what it is, must be done
# after try_download()
output_yaml(m, os.path.join(m.config.work_dir, "metadata_conda_debug.yaml"))
+ if m.config.verbose:
+ m_copy = m.copy()
+ for om, _, _ in render_metadata_tuples(
+ [(m_copy, False, False)], m_copy.config
+ ):
+ print(
+ "",
+ "Rendered as:",
+ "```yaml",
+ output_yaml(om).rstrip(),
+ "```",
+ "",
+ sep="\n",
+ )
+ # Each iteration returns the whole meta yaml, and then we are supposed to remove
+ # the outputs we don't want. Instead we just take the first and print it fully
+ break
+ del m_copy
# get_dir here might be just work, or it might be one level deeper,
# dependening on the source.
@@ -2459,9 +2505,12 @@ def build(
with codecs.getwriter("utf-8")(open(build_file, "wb")) as bf:
bf.write(script)
- windows.build(
- m, build_file, stats=build_stats, provision_only=provision_only
- )
+ try:
+ windows.build(
+ m, build_file, stats=build_stats, provision_only=provision_only
+ )
+ except subprocess.CalledProcessError as exc:
+ raise BuildScriptException(str(exc), caused_by=exc) from exc
else:
build_file = join(m.path, "build.sh")
if isfile(build_file) and script:
@@ -2503,13 +2552,16 @@ def build(
del env["CONDA_BUILD"]
# this should raise if any problems occur while building
- utils.check_call_env(
- cmd,
- env=env,
- rewrite_stdout_env=rewrite_env,
- cwd=src_dir,
- stats=build_stats,
- )
+ try:
+ utils.check_call_env(
+ cmd,
+ env=env,
+ rewrite_stdout_env=rewrite_env,
+ cwd=src_dir,
+ stats=build_stats,
+ )
+ except subprocess.CalledProcessError as exc:
+ raise BuildScriptException(str(exc), caused_by=exc) from exc
utils.remove_pycache_from_scripts(m.config.host_prefix)
if build_stats and not provision_only:
log_stats(build_stats, f"building {m.name()}")
@@ -2692,8 +2744,8 @@ def build(
# This is wrong, files has not been expanded at this time and could contain
# wildcards. Also well, I just do not understand this, because when this
# does contain wildcards, the files in to_remove will slip back in.
- if "files" in output_d:
- output_d["files"] = set(output_d["files"]) - to_remove
+ if (files := output_d.get("files")) and not isinstance(files, dict):
+ output_d["files"] = set(files) - to_remove
# copies the backed-up new prefix files into the newly created host env
for f in new_prefix_files:
@@ -2708,7 +2760,9 @@ def build(
with utils.path_prepended(m.config.build_prefix):
env = environ.get_dict(m=m)
pkg_type = "conda" if not hasattr(m, "type") else m.type
- newly_built_packages = bundlers[pkg_type](output_d, m, env, stats)
+ newly_built_packages = bundlers[pkg_type](
+ output_d, m, env, stats, new_prefix_files
+ )
# warn about overlapping files.
if "checksums" in output_d:
for file, csum in output_d["checksums"].items():
@@ -2815,6 +2869,15 @@ def warn_on_use_of_SRC_DIR(metadata):
)
+@deprecated(
+ "3.16.0",
+ "24.9.0",
+ addendum=(
+ "Test built packages instead, not recipes "
+ "(e.g., `conda build --test package` instead of `conda build --test recipe/`)."
+ ),
+ deprecation_type=FutureWarning, # we need to warn users, not developers
+)
def _construct_metadata_for_test_from_recipe(recipe_dir, config):
config.need_cleanup = False
config.recipe_dir = None
@@ -2822,11 +2885,6 @@ def _construct_metadata_for_test_from_recipe(recipe_dir, config):
metadata = expand_outputs(
render_recipe(recipe_dir, config=config, reset_build_id=False)
)[0][1]
- log = utils.get_logger(__name__)
- log.warn(
- "Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust "
- "your code to pass your desired conda package to test instead."
- )
utils.rm_rf(metadata.config.test_dir)
@@ -2873,7 +2931,7 @@ def _construct_metadata_for_test_from_package(package, config):
is_channel = True
if not is_channel:
- log.warn(
+ log.warning(
"Copying package to conda-build croot. No packages otherwise alongside yours will"
" be available unless you specify -c local. To avoid this warning, your package "
"must reside in a channel structure with platform-subfolders. See more info on "
@@ -2990,13 +3048,7 @@ def construct_metadata_for_test(recipedir_or_package, config):
return m, hash_input
-def write_build_scripts(m, script, build_file):
- # TODO: Prepending the prefixes here should probably be guarded by
- # if not m.activate_build_script:
- # Leaving it as is, for now, since we need a quick, non-disruptive patch release.
- with utils.path_prepended(m.config.host_prefix, False):
- with utils.path_prepended(m.config.build_prefix, False):
- env = environ.get_dict(m=m)
+def _set_env_variables_for_build(m, env):
env["CONDA_BUILD_STATE"] = "BUILD"
# hard-code this because we never want pip's build isolation
@@ -3028,6 +3080,17 @@ def write_build_scripts(m, script, build_file):
if "replacements" in env:
del env["replacements"]
+
+def write_build_scripts(m, script, build_file):
+ # TODO: Prepending the prefixes here should probably be guarded by
+ # if not m.activate_build_script:
+ # Leaving it as is, for now, since we need a quick, non-disruptive patch release.
+ with utils.path_prepended(m.config.host_prefix, False):
+ with utils.path_prepended(m.config.build_prefix, False):
+ env = environ.get_dict(m=m)
+
+ _set_env_variables_for_build(m, env)
+
work_file = join(m.config.work_dir, "conda_build.sh")
env_file = join(m.config.work_dir, "build_env_setup.sh")
with open(env_file, "w") as bf:
@@ -3127,7 +3190,7 @@ def _write_test_run_script(
tf.write(f'call "{shell_file}"\n')
tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n")
else:
- log.warn(
+ log.warning(
"Found sh test file on windows. Ignoring this for now (PRs welcome)"
)
elif os.path.splitext(shell_file)[1] == ".sh":
@@ -3312,7 +3375,7 @@ def test(
# Needs to come after create_files in case there's test/source_files
shutil_move_more_retrying(config.work_dir, dest, "work")
else:
- log.warn(
+ log.warning(
"Not moving work directory after build. Your package may depend on files "
"in the work directory that are not included with your package"
)
@@ -3376,7 +3439,7 @@ def test(
CondaError,
AssertionError,
) as exc:
- log.warn(
+ log.warning(
"failed to get package records, retrying. exception was: %s", str(exc)
)
tests_failed(
@@ -3483,7 +3546,12 @@ def test(
return True
-def tests_failed(package_or_metadata, move_broken, broken_dir, config):
+def tests_failed(
+ package_or_metadata: str | os.PathLike | Path | MetaData,
+ move_broken: bool,
+ broken_dir: str | os.PathLike | Path,
+ config: Config,
+) -> None:
"""
Causes conda to exit if any of the given package's tests failed.
@@ -3503,7 +3571,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config):
log = utils.get_logger(__name__)
try:
shutil.move(pkg, dest)
- log.warn(
+ log.warning(
f"Tests failed for {os.path.basename(pkg)} - moving package to {broken_dir}"
)
except OSError:
@@ -3511,20 +3579,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config):
_delegated_update_index(
os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1
)
- sys.exit("TESTS FAILED: " + os.path.basename(pkg))
-
-
-def check_external():
- if on_linux:
- patchelf = external.find_executable("patchelf")
- if patchelf is None:
- sys.exit(
- "Error:\n"
- f" Did not find 'patchelf' in: {os.pathsep.join(external.dir_paths)}\n"
- " 'patchelf' is necessary for building conda packages on Linux with\n"
- " relocatable ELF libraries. You can install patchelf using conda install\n"
- " patchelf.\n"
- )
+ raise CondaBuildUserError("TESTS FAILED: " + os.path.basename(pkg))
def build_tree(
@@ -3668,7 +3723,7 @@ def build_tree(
# downstreams can be a dict, for adding capability for worker labels
if hasattr(downstreams, "keys"):
downstreams = list(downstreams.keys())
- log.warn(
+ log.warning(
"Dictionary keys for downstreams are being "
"ignored right now. Coming soon..."
)
@@ -3707,7 +3762,7 @@ def build_tree(
UnsatisfiableError,
DependencyNeedsBuildingError,
) as e:
- log.warn(
+ log.warning(
f"Skipping downstream test for spec {dep}; was "
f"unsatisfiable. Error was {e}"
)
@@ -3902,7 +3957,10 @@ def build_tree(
return list(built_packages.keys())
-def handle_anaconda_upload(paths, config):
+def handle_anaconda_upload(
+ paths: Iterable[str | os.PathLike | Path],
+ config: Config,
+) -> None:
from .os_utils.external import find_executable
paths = utils.ensure_list(paths)
@@ -3936,7 +3994,7 @@ def handle_anaconda_upload(paths, config):
"# To have conda build upload to anaconda.org automatically, use\n"
f"# {prompter}conda config --set anaconda_upload yes\n"
)
- no_upload_message += f"anaconda upload{joiner}" + joiner.join(paths)
+ no_upload_message += f"anaconda upload{joiner}" + joiner.join(map(str, paths))
if not upload:
print(no_upload_message)
@@ -3944,7 +4002,7 @@ def handle_anaconda_upload(paths, config):
if not anaconda:
print(no_upload_message)
- sys.exit(
+ raise CondaBuildUserError(
"Error: cannot locate anaconda command (required for upload)\n"
"# Try:\n"
f"# {prompter}conda install anaconda-client"
@@ -4001,11 +4059,11 @@ def handle_pypi_upload(wheels, config):
try:
utils.check_call_env(args + [f])
except:
- utils.get_logger(__name__).warn(
+ utils.get_logger(__name__).warning(
"wheel upload failed - is twine installed?"
" Is this package registered?"
)
- utils.get_logger(__name__).warn(f"Wheel file left in {f}")
+ utils.get_logger(__name__).warning(f"Wheel file left in {f}")
else:
print(f"anaconda_upload is not set. Not uploading wheels: {wheels}")
diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py
index a966677471..13e129910d 100644
--- a/conda_build/cli/main_build.py
+++ b/conda_build/cli/main_build.py
@@ -535,7 +535,6 @@ def execute(args: Sequence[str] | None = None) -> int:
context.__init__(argparse_args=parsed)
config = get_or_merge_config(None, **parsed.__dict__)
- build.check_external()
# change globals in build module, see comment there as well
config.channel_urls = get_channel_urls(parsed.__dict__)
diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py
deleted file mode 100644
index 18056cc368..0000000000
--- a/conda_build/conda_interface.py
+++ /dev/null
@@ -1,550 +0,0 @@
-# Copyright (C) 2014 Anaconda, Inc
-# SPDX-License-Identifier: BSD-3-Clause
-from __future__ import annotations
-
-import configparser as _configparser
-import os as _os
-from builtins import input as _input
-from functools import partial as _partial
-from importlib import import_module as _import_module
-from io import StringIO as _StringIO
-
-from conda import __version__
-from conda.auxlib.entity import EntityEncoder as _EntityEncoder
-from conda.base.constants import PREFIX_PLACEHOLDER as _PREFIX_PLACEHOLDER
-from conda.base.context import context as _context
-from conda.base.context import determine_target_prefix as _determine_target_prefix
-from conda.base.context import non_x86_machines as _non_x86_linux_machines
-from conda.base.context import reset_context as _reset_context
-from conda.cli.common import spec_from_line as _spec_from_line
-from conda.cli.common import specs_from_args as _specs_from_args
-from conda.cli.common import specs_from_url as _specs_from_url
-from conda.cli.conda_argparse import ArgumentParser as _ArgumentParser
-from conda.common.path import win_path_to_unix as _win_path_to_unix
-from conda.common.toposort import _toposort as __toposort
-from conda.core.package_cache_data import (
- ProgressiveFetchExtract as _ProgressiveFetchExtract,
-)
-from conda.exceptions import CondaError as _CondaError
-from conda.exceptions import CondaHTTPError as _CondaHTTPError
-from conda.exceptions import LinkError as _LinkError
-from conda.exceptions import LockError as _LockError
-from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError
-from conda.exceptions import PaddingError as _PaddingError
-from conda.exceptions import ResolvePackageNotFound as _ResolvePackageNotFound
-from conda.exceptions import UnsatisfiableError as _UnsatisfiableError
-from conda.exports import Completer as _Completer
-from conda.exports import InstalledPackages as _InstalledPackages
-from conda.exports import symlink_conda as _symlink_conda
-from conda.gateways.connection.download import TmpDownload as _TmpDownload
-from conda.gateways.connection.download import download as _download
-from conda.gateways.connection.session import CondaSession as _CondaSession
-from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory
-from conda.gateways.disk.link import lchmod as _lchmod
-from conda.misc import untracked as _untracked
-from conda.misc import walk_prefix as _walk_prefix
-from conda.models.channel import Channel as _Channel
-from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url
-from conda.models.enums import FileMode as _FileMode
-from conda.models.enums import PathType as _PathType
-from conda.models.match_spec import MatchSpec as _MatchSpec
-from conda.models.records import PackageRecord as _PackageRecord
-from conda.models.version import VersionOrder as _VersionOrder
-from conda.models.version import normalized_version as _normalized_version
-from conda.resolve import Resolve as _Resolve
-from conda.utils import human_bytes as _human_bytes
-from conda.utils import unix_path_to_win as _unix_path_to_win
-from conda.utils import url_path as _url_path
-
-from .deprecations import deprecated
-from .utils import rm_rf as _rm_rf
-
-try:
- from conda.cli.helpers import add_parser_channels as _add_parser_channels
- from conda.cli.helpers import add_parser_prefix as _add_parser_prefix
-except ImportError:
- # conda<23.11
- from conda.cli.conda_argparse import add_parser_channels as _add_parser_channels
- from conda.cli.conda_argparse import add_parser_prefix as _add_parser_prefix
-
-deprecated.constant(
- "24.5",
- "24.7",
- "Completer",
- _Completer,
- addendum="Unused.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "CondaSession",
- _CondaSession,
- addendum="Use `conda.gateways.connection.session.CondaSession` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "InstalledPackages",
- _InstalledPackages,
- addendum="Unused.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "NoPackagesFound",
- _ResolvePackageNotFound,
- addendum="Use `conda.exceptions.ResolvePackageNotFound` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "Unsatisfiable",
- _UnsatisfiableError,
- addendum="Use `conda.exceptions.UnsatisfiableError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "symlink_conda",
- _symlink_conda,
- addendum="Unused.",
-)
-
-
-deprecated.constant(
- "24.5",
- "24.7",
- "ArgumentParser",
- _ArgumentParser,
- addendum="Use `conda.cli.conda_argparse.ArgumentParser` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "add_parser_channels",
- _add_parser_channels,
- addendum="Use `conda.cli.helpers.add_parser_channels` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "add_parser_prefix",
- _add_parser_prefix,
- addendum="Use `conda.cli.helpers.add_parser_prefix` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "Channel",
- _Channel,
- addendum="Use `conda.models.channel.Channel` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "FileMode",
- _FileMode,
- addendum="Use `conda.models.enums.FileMode` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "PathType",
- _PathType,
- addendum="Use `conda.models.enums.PathType` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "MatchSpec",
- _MatchSpec,
- addendum="Use `conda.models.match_spec.MatchSpec` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "PackageRecord",
- _PackageRecord,
- addendum="Use `conda.models.records.PackageRecord` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "VersionOrder",
- _VersionOrder,
- addendum="Use `conda.models.version.VersionOrder` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "normalized_version",
- _normalized_version,
- addendum="Use `conda.models.version.normalized_version` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "EntityEncoder",
- _EntityEncoder,
- addendum="Use `conda.auxlib.entity.EntityEncoder` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "Resolve",
- _Resolve,
- addendum="Use `conda.resolve.Resolve` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "TemporaryDirectory",
- _TemporaryDirectory,
- addendum="Use `conda.gateways.disk.create.TemporaryDirectory` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "TmpDownload",
- _TmpDownload,
- addendum="Use `conda.gateways.connection.download.TmpDownload` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "download",
- _download,
- addendum="Use `conda.gateways.connection.download.download` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "_toposort",
- __toposort,
- addendum="Use `conda.common.toposort._toposort` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "human_bytes",
- _human_bytes,
- addendum="Use `conda.utils.human_bytes` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "lchmod",
- _lchmod,
- addendum="Use `conda.gateways.disk.link.lchmod` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "prefix_placeholder",
- _PREFIX_PLACEHOLDER,
- addendum="Use `conda.base.constants.PREFIX_PLACEHOLDER` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "rm_rf",
- _rm_rf,
- addendum="Use `conda_build.utils.rm_rf` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "spec_from_line",
- _spec_from_line,
- addendum="Use `conda.cli.common.spec_from_line` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "specs_from_args",
- _specs_from_args,
- addendum="Use `conda.cli.common.specs_from_args` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "specs_from_url",
- _specs_from_url,
- addendum="Use `conda.cli.common.specs_from_url` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "unix_path_to_win",
- _unix_path_to_win,
- addendum="Use `conda.utils.unix_path_to_win` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "untracked",
- _untracked,
- addendum="Use `conda.misc.untracked` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "url_path",
- _url_path,
- addendum="Use `conda.utils.url_path` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "walk_prefix",
- _walk_prefix,
- addendum="Use `conda.misc.walk_prefix` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "win_path_to_unix",
- _win_path_to_unix,
- addendum="Use `conda.common.path.win_path_to_unix` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "configparser",
- _configparser,
- addendum="Use `configparser` instead.",
-)
-deprecated.constant("24.5", "24.7", "os", _os, addendum="Use `os` instead.")
-deprecated.constant(
- "24.5",
- "24.7",
- "partial",
- _partial,
- addendum="Use `functools.partial` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "import_module",
- _import_module,
- addendum="Use `importlib.import_module` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "StringIO",
- _StringIO,
- addendum="Use `io.StringIO` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "input",
- _input,
- addendum="Use `input` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "context",
- _context,
- addendum="Use `conda.base.context.context` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "determine_target_prefix",
- _determine_target_prefix,
- addendum="Use `conda.base.context.determine_target_prefix` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "non_x86_linux_machines",
- _non_x86_linux_machines,
- addendum="Use `conda.base.context.non_x86_machines` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "ProgressiveFetchExtract",
- _ProgressiveFetchExtract,
- addendum="Use `conda.core.package_cache_data.ProgressiveFetchExtract` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "CondaError",
- _CondaError,
- addendum="Use `conda.exceptions.CondaError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "CondaHTTPError",
- _CondaHTTPError,
- addendum="Use `conda.exceptions.CondaHTTPError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "LinkError",
- _LinkError,
- addendum="Use `conda.exceptions.LinkError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "LockError",
- _LockError,
- addendum="Use `conda.exceptions.LockError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "NoPackagesFoundError",
- _NoPackagesFoundError,
- addendum="Use `conda.exceptions.NoPackagesFoundError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "PaddingError",
- _PaddingError,
- addendum="Use `conda.exceptions.PaddingError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "UnsatisfiableError",
- _UnsatisfiableError,
- addendum="Use `conda.exceptions.UnsatisfiableError` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "get_conda_build_local_url",
- _get_conda_build_local_url,
- addendum="Use `conda.models.channel.get_conda_build_local_url` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "reset_context",
- _reset_context,
- addendum="Use `conda.base.context.reset_context` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "binstar_upload",
- _context.binstar_upload,
- addendum="Use `conda.base.context.context.binstar_upload` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "default_python",
- _context.default_python,
- addendum="Use `conda.base.context.context.default_python` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "envs_dirs",
- _context.envs_dirs,
- addendum="Use `conda.base.context.context.envs_dirs` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "pkgs_dirs",
- list(_context.pkgs_dirs),
- addendum="Use `conda.base.context.context.pkgs_dirs` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "cc_platform",
- _context.platform,
- addendum="Use `conda.base.context.context.platform` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "root_dir",
- _context.root_prefix,
- addendum="Use `conda.base.context.context.root_prefix` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "root_writable",
- _context.root_writable,
- addendum="Use `conda.base.context.context.root_writable` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "subdir",
- _context.subdir,
- addendum="Use `conda.base.context.context.subdir` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "create_default_packages",
- _context.create_default_packages,
- addendum="Use `conda.base.context.context.create_default_packages` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "get_rc_urls",
- lambda: list(_context.channels),
- addendum="Use `conda.base.context.context.channels` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "get_prefix",
- _partial(_determine_target_prefix, _context),
- addendum="Use `conda.base.context.context.target_prefix` instead.",
-)
-deprecated.constant(
- "24.5",
- "24.7",
- "cc_conda_build",
- _context.conda_build,
- addendum="Use `conda.base.context.context.conda_build` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "get_conda_channel",
- _Channel.from_value,
- addendum="Use `conda.models.channel.Channel.from_value` instead.",
-)
-
-deprecated.constant(
- "24.5",
- "24.7",
- "env_path_backup_var_exists",
- _os.getenv("CONDA_PATH_BACKUP"),
- addendum="Unused.",
-)
-
-
-deprecated.constant(
- "24.5",
- "24.7",
- "CONDA_VERSION",
- __version__,
- addendum="Use `conda.__version__` instead.",
-)
diff --git a/conda_build/config.py b/conda_build/config.py
index 09ce6b0718..465058701f 100644
--- a/conda_build/config.py
+++ b/conda_build/config.py
@@ -9,6 +9,7 @@
import copy
import math
import os
+import pickle
import re
import shutil
import time
@@ -19,7 +20,6 @@
from conda.base.context import context
from conda.utils import url_path
-from .deprecations import deprecated
from .utils import (
get_build_folders,
get_conda_operation_locks,
@@ -54,7 +54,6 @@ def set_invocation_time():
_src_cache_root_default = None
error_overlinking_default = "false"
error_overdepending_default = "false"
-deprecated.constant("24.5", "24.7", "noarch_python_build_age_default", 0)
enable_static_default = "false"
no_rewrite_stdout_env_default = "false"
ignore_verify_codes_default = []
@@ -326,7 +325,7 @@ def arch(self):
@arch.setter
def arch(self, value):
log = get_logger(__name__)
- log.warn(
+ log.warning(
"Setting build arch. This is only useful when pretending to be on another "
"arch, such as for rendering necessary dependencies on a non-native arch. "
"I trust that you know what you're doing."
@@ -342,7 +341,7 @@ def platform(self):
@platform.setter
def platform(self, value):
log = get_logger(__name__)
- log.warn(
+ log.warning(
"Setting build platform. This is only useful when "
"pretending to be on another platform, such as "
"for rendering necessary dependencies on a non-native "
@@ -777,15 +776,6 @@ def test_dir(self):
def subdirs_same(self):
return self.host_subdir == self.build_subdir
- @property
- @deprecated(
- "24.5",
- "24.7",
- addendum="Use `conda.base.context.context.override_channels` instead.",
- )
- def override_channels(self):
- return context.override_channels
-
def clean(self, remove_folders=True):
# build folder is the whole burrito containing envs and source folders
# It will only exist if we download source, or create a build or test environment
@@ -831,9 +821,12 @@ def clean_pkgs(self):
def copy(self) -> Config:
new = copy.copy(self)
- new.variant = copy.deepcopy(self.variant)
+ # Use picke.loads(pickle.dumps(...) as a faster copy.deepcopy alternative.
+ new.variant = pickle.loads(pickle.dumps(self.variant, pickle.HIGHEST_PROTOCOL))
if hasattr(self, "variants"):
- new.variants = copy.deepcopy(self.variants)
+ new.variants = pickle.loads(
+ pickle.dumps(self.variants, pickle.HIGHEST_PROTOCOL)
+ )
return new
# context management - automatic cleanup if self.dirty or self.keep_old_work is not True
diff --git a/conda_build/convert.py b/conda_build/convert.py
index e910d47e21..628ba95109 100644
--- a/conda_build/convert.py
+++ b/conda_build/convert.py
@@ -12,12 +12,12 @@
import os
import re
import shutil
-import sys
import tarfile
import tempfile
from pathlib import Path
from typing import TYPE_CHECKING
+from .exceptions import CondaBuildUserError
from .utils import ensure_list, filter_info_files, walk
if TYPE_CHECKING:
@@ -818,13 +818,15 @@ def conda_convert(
else:
for c_extension in imports:
print(c_extension)
- sys.exit()
+ return
if not show_imports and len(platforms) == 0:
- sys.exit("Error: --platform option required for conda package conversion.")
+ raise CondaBuildUserError(
+ "Error: --platform option required for conda package conversion."
+ )
if len(retrieve_c_extensions(file_path)) > 0 and not force:
- sys.exit(
+ raise CondaBuildUserError(
f"WARNING: Package {os.path.basename(file_path)} contains C extensions; skipping conversion. "
"Use -f to force conversion."
)
diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py
index f691b5192d..9155ed153d 100644
--- a/conda_build/deprecations.py
+++ b/conda_build/deprecations.py
@@ -86,6 +86,7 @@ def __call__(
*,
addendum: str | None = None,
stack: int = 0,
+ deprecation_type: type[Warning] = DeprecationWarning,
) -> Callable[[Callable[P, T]], Callable[P, T]]:
"""Deprecation decorator for functions, methods, & classes.
@@ -102,6 +103,7 @@ def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]:
remove_in=remove_in,
prefix=f"{func.__module__}.{func.__qualname__}",
addendum=addendum,
+ deprecation_type=deprecation_type,
)
# alert developer that it's time to remove something
@@ -128,6 +130,7 @@ def argument(
rename: str | None = None,
addendum: str | None = None,
stack: int = 0,
+ deprecation_type: type[Warning] = DeprecationWarning,
) -> Callable[[Callable[P, T]], Callable[P, T]]:
"""Deprecation decorator for keyword arguments.
@@ -149,6 +152,7 @@ def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]:
addendum=(
f"Use '{rename}' instead." if rename and not addendum else addendum
),
+ deprecation_type=deprecation_type,
)
# alert developer that it's time to remove something
@@ -181,6 +185,7 @@ def action(
*,
addendum: str | None = None,
stack: int = 0,
+ deprecation_type: type[Warning] = FutureWarning,
) -> ActionType:
"""Wraps any argparse.Action to issue a deprecation warning."""
@@ -203,7 +208,7 @@ def __init__(inner_self: Self, *args: Any, **kwargs: Any) -> None:
else f"`{inner_self.dest}`"
),
addendum=addendum,
- deprecation_type=FutureWarning,
+ deprecation_type=deprecation_type,
)
# alert developer that it's time to remove something
@@ -263,6 +268,7 @@ def constant(
*,
addendum: str | None = None,
stack: int = 0,
+ deprecation_type: type[Warning] = DeprecationWarning,
) -> None:
"""Deprecation function for module constant/global.
@@ -281,6 +287,7 @@ def constant(
remove_in=remove_in,
prefix=f"{fullname}.{constant}",
addendum=addendum,
+ deprecation_type=deprecation_type,
)
# alert developer that it's time to remove something
@@ -292,7 +299,7 @@ def constant(
def __getattr__(name: str) -> Any:
if name == constant:
- warnings.warn(message, category, stacklevel=2 + stack)
+ warnings.warn(message, category, stacklevel=3 + stack)
return value
if super_getattr:
@@ -310,6 +317,7 @@ def topic(
topic: str,
addendum: str | None = None,
stack: int = 0,
+ deprecation_type: type[Warning] = DeprecationWarning,
) -> None:
"""Deprecation function for a topic.
@@ -325,6 +333,7 @@ def topic(
remove_in=remove_in,
prefix=topic,
addendum=addendum,
+ deprecation_type=deprecation_type,
)
# alert developer that it's time to remove something
@@ -379,7 +388,7 @@ def _generate_message(
prefix: str,
addendum: str | None,
*,
- deprecation_type: type[Warning] = DeprecationWarning,
+ deprecation_type: type[Warning],
) -> tuple[type[Warning] | None, str]:
"""Generate the standardized deprecation message and determine whether the
deprecation is pending, active, or past.
diff --git a/conda_build/develop.py b/conda_build/develop.py
index d0e3d59fd6..a0c71e1669 100644
--- a/conda_build/develop.py
+++ b/conda_build/develop.py
@@ -5,11 +5,17 @@
import shutil
import sys
from os.path import abspath, exists, expanduser, isdir, join
+from pathlib import Path
+from typing import TYPE_CHECKING
+from .exceptions import CondaBuildUserError
from .os_utils.external import find_executable
from .post import mk_relative_osx
from .utils import check_call_env, get_site_packages, on_mac, rec_glob
+if TYPE_CHECKING:
+ from pathlib import Path
+
def relink_sharedobjects(pkg_path, build_prefix):
"""
@@ -56,13 +62,13 @@ def write_to_conda_pth(sp_dir, pkg_path):
print("added " + pkg_path)
-def get_setup_py(path_):
- """Return full path to setup.py or exit if not found"""
+def get_setup_py(path_: Path) -> Path:
+ """Return full path to setup.py or raise error if not found"""
# build path points to source dir, builds are placed in the
setup_py = join(path_, "setup.py")
if not exists(setup_py):
- sys.exit(f"No setup.py found in {path_}. Exiting.")
+ raise CondaBuildUserError(f"No setup.py found in {path_}.")
return setup_py
@@ -136,12 +142,10 @@ def execute(
uninstall: bool = False,
) -> None:
if not isdir(prefix):
- sys.exit(
- f"""\
-Error: environment does not exist: {prefix}
-#
-# Use 'conda create' to create the environment first.
-#"""
+ raise CondaBuildUserError(
+ f"Error: environment does not exist: {prefix}\n"
+ f"\n"
+ f"Use 'conda create' to create the environment first."
)
assert find_executable("python", prefix=prefix)
diff --git a/conda_build/environ.py b/conda_build/environ.py
index 7a3a7ca8cb..3113ec7f8a 100644
--- a/conda_build/environ.py
+++ b/conda_build/environ.py
@@ -888,7 +888,8 @@ def get_install_actions(
with utils.LoggingContext(conda_log_level):
with capture():
try:
- precs = _install_actions(prefix, index, specs)["LINK"]
+ _actions = _install_actions(prefix, index, specs, subdir=subdir)
+ precs = _actions["LINK"]
except (NoPackagesFoundError, UnsatisfiableError) as exc:
raise DependencyNeedsBuildingError(exc, subdir=subdir)
except (
@@ -901,7 +902,7 @@ def get_install_actions(
BuildLockError,
) as exc:
if "lock" in str(exc):
- log.warn(
+ log.warning(
"failed to get package records, retrying. exception was: %s",
str(exc),
)
@@ -922,7 +923,7 @@ def get_install_actions(
):
pkg_dir = os.path.dirname(pkg_dir)
folder += 1
- log.warn(
+ log.warning(
"I think conda ended up with a partial extraction for %s. "
"Removing the folder and retrying",
pkg_dir,
@@ -930,7 +931,7 @@ def get_install_actions(
if pkg_dir in context.pkgs_dirs and os.path.isdir(pkg_dir):
utils.rm_rf(pkg_dir)
if retries < max_env_retry:
- log.warn(
+ log.warning(
"failed to get package records, retrying. exception was: %s",
str(exc),
)
@@ -1063,20 +1064,20 @@ def create_env(
or isinstance(exc, PaddingError)
) and config.prefix_length > 80:
if config.prefix_length_fallback:
- log.warn(
+ log.warning(
"Build prefix failed with prefix length %d",
config.prefix_length,
)
- log.warn("Error was: ")
- log.warn(str(exc))
- log.warn(
+ log.warning("Error was: ")
+ log.warning(str(exc))
+ log.warning(
"One or more of your package dependencies needs to be rebuilt "
"with a longer prefix length."
)
- log.warn(
+ log.warning(
"Falling back to legacy prefix length of 80 characters."
)
- log.warn(
+ log.warning(
"Your package will not install into prefixes > 80 characters."
)
config.prefix_length = 80
@@ -1098,7 +1099,7 @@ def create_env(
raise
elif "lock" in str(exc):
if retry < config.max_env_retry:
- log.warn(
+ log.warning(
"failed to create env, retrying. exception was: %s",
str(exc),
)
@@ -1124,7 +1125,7 @@ def create_env(
):
pkg_dir = os.path.dirname(pkg_dir)
folder += 1
- log.warn(
+ log.warning(
"I think conda ended up with a partial extraction for %s. "
"Removing the folder and retrying",
pkg_dir,
@@ -1132,7 +1133,7 @@ def create_env(
if os.path.isdir(pkg_dir):
utils.rm_rf(pkg_dir)
if retry < config.max_env_retry:
- log.warn(
+ log.warning(
"failed to create env, retrying. exception was: %s",
str(exc),
)
@@ -1163,7 +1164,7 @@ def create_env(
if isinstance(exc, AssertionError):
with utils.try_acquire_locks(locks, timeout=config.timeout):
pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
- log.warn(
+ log.warning(
"I think conda ended up with a partial extraction for %s. "
"Removing the folder and retrying",
pkg_dir,
@@ -1171,7 +1172,7 @@ def create_env(
if os.path.isdir(pkg_dir):
utils.rm_rf(pkg_dir)
if retry < config.max_env_retry:
- log.warn(
+ log.warning(
"failed to create env, retrying. exception was: %s", str(exc)
)
create_env(
@@ -1256,14 +1257,19 @@ def install_actions(
prefix: str | os.PathLike | Path,
index,
specs: Iterable[str | MatchSpec],
+ subdir: str | None = None,
) -> InstallActionsType:
# This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471
# but reduced to only the functionality actually used within conda-build.
+ subdir_kwargs = {}
+ if subdir not in (None, "", "noarch"):
+ subdir_kwargs["CONDA_SUBDIR"] = subdir
with env_vars(
{
"CONDA_ALLOW_NON_CHANNEL_URLS": "true",
"CONDA_SOLVER_IGNORE_TIMESTAMPS": "false",
+ **subdir_kwargs,
},
callback=reset_context,
):
diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py
index 9744ca14b4..c815b401a7 100644
--- a/conda_build/exceptions.py
+++ b/conda_build/exceptions.py
@@ -2,12 +2,14 @@
# SPDX-License-Identifier: BSD-3-Clause
import textwrap
+from conda import CondaError
+
SEPARATOR = "-" * 70
indent = lambda s: textwrap.fill(textwrap.dedent(s))
-class CondaBuildException(Exception):
+class CondaBuildException(CondaError):
pass
@@ -107,22 +109,30 @@ class BuildLockError(CondaBuildException):
"""Raised when we failed to acquire a lock."""
-class OverLinkingError(RuntimeError):
+class OverLinkingError(RuntimeError, CondaBuildException):
def __init__(self, error, *args):
self.error = error
self.msg = f"overlinking check failed \n{error}"
super().__init__(self.msg)
-class OverDependingError(RuntimeError):
+class OverDependingError(RuntimeError, CondaBuildException):
def __init__(self, error, *args):
self.error = error
self.msg = f"overdepending check failed \n{error}"
super().__init__(self.msg)
-class RunPathError(RuntimeError):
+class RunPathError(RuntimeError, CondaBuildException):
def __init__(self, error, *args):
self.error = error
self.msg = f"runpaths check failed \n{error}"
super().__init__(self.msg)
+
+
+class BuildScriptException(CondaBuildException):
+ pass
+
+
+class CondaBuildUserError(CondaBuildException):
+ pass
diff --git a/conda_build/index.py b/conda_build/index.py
index 3a2f9ab10b..bcb9c6a9d0 100644
--- a/conda_build/index.py
+++ b/conda_build/index.py
@@ -1,9 +1,7 @@
# Copyright (C) 2014 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
-import json
import logging
import os
-import time
from functools import partial
from os.path import dirname
@@ -14,9 +12,7 @@
from conda_index.index import update_index as _update_index
from . import utils
-from .deprecations import deprecated
from .utils import (
- JSONDecodeError,
get_logger,
)
@@ -28,8 +24,6 @@
local_subdir = ""
local_output_folder = ""
cached_channels = []
-_channel_data = {}
-deprecated.constant("24.1", "24.7", "channel_data", _channel_data)
# TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though.
# os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false"
@@ -56,7 +50,6 @@ def get_build_index(
global local_output_folder
global cached_index
global cached_channels
- global _channel_data
mtime = 0
channel_urls = list(utils.ensure_list(channel_urls))
@@ -131,55 +124,11 @@ def get_build_index(
platform=subdir,
)
- expanded_channels = {rec.channel for rec in cached_index}
-
- superchannel = {}
- # we need channeldata.json too, as it is a more reliable source of run_exports data
- for channel in expanded_channels:
- if channel.scheme == "file":
- location = channel.location
- if utils.on_win:
- location = location.lstrip("/")
- elif not os.path.isabs(channel.location) and os.path.exists(
- os.path.join(os.path.sep, channel.location)
- ):
- location = os.path.join(os.path.sep, channel.location)
- channeldata_file = os.path.join(
- location, channel.name, "channeldata.json"
- )
- retry = 0
- max_retries = 1
- if os.path.isfile(channeldata_file):
- while retry < max_retries:
- try:
- with open(channeldata_file, "r+") as f:
- _channel_data[channel.name] = json.load(f)
- break
- except (OSError, JSONDecodeError):
- time.sleep(0.2)
- retry += 1
- else:
- # download channeldata.json for url
- if not context.offline:
- try:
- _channel_data[channel.name] = utils.download_channeldata(
- channel.base_url + "/channeldata.json"
- )
- except CondaHTTPError:
- continue
- # collapse defaults metachannel back into one superchannel, merging channeldata
- if channel.base_url in context.default_channels and _channel_data.get(
- channel.name
- ):
- packages = superchannel.get("packages", {})
- packages.update(_channel_data[channel.name])
- superchannel["packages"] = packages
- _channel_data["defaults"] = superchannel
local_index_timestamp = os.path.getmtime(index_file)
local_subdir = subdir
local_output_folder = output_folder
cached_channels = channel_urls
- return cached_index, local_index_timestamp, _channel_data
+ return cached_index, local_index_timestamp, None
def _ensure_valid_channel(local_folder, subdir):
@@ -227,4 +176,6 @@ def _delegated_update_index(
warn=warn,
current_index_versions=current_index_versions,
debug=debug,
+ write_bz2=False,
+ write_zst=False,
)
diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py
index 43fc401551..54ec2c6f28 100644
--- a/conda_build/inspect_pkg.py
+++ b/conda_build/inspect_pkg.py
@@ -20,6 +20,7 @@
from conda.core.prefix_data import PrefixData
from conda.models.records import PrefixRecord
+from .exceptions import CondaBuildUserError
from .os_utils.ldd import (
get_linkages,
get_package_obj_files,
@@ -219,9 +220,13 @@ def inspect_linkages(
sysroot: str = "",
) -> str:
if not packages and not untracked and not all_packages:
- sys.exit("At least one package or --untracked or --all must be provided")
+ raise CondaBuildUserError(
+ "At least one package or --untracked or --all must be provided"
+ )
elif on_win:
- sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X")
+ raise CondaBuildUserError(
+ "`conda inspect linkages` is only implemented on Linux and macOS"
+ )
prefix = Path(prefix)
installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()}
@@ -237,7 +242,7 @@ def inspect_linkages(
if name == untracked_package:
obj_files = get_untracked_obj_files(prefix)
elif name not in installed:
- sys.exit(f"Package {name} is not installed in {prefix}")
+ raise CondaBuildUserError(f"Package {name} is not installed in {prefix}")
else:
obj_files = get_package_obj_files(installed[name], prefix)
@@ -258,7 +263,7 @@ def inspect_linkages(
if relative:
precs = list(which_package(relative, prefix))
if len(precs) > 1:
- get_logger(__name__).warn(
+ get_logger(__name__).warning(
"Warning: %s comes from multiple packages: %s",
path,
comma_join(map(str, precs)),
@@ -308,7 +313,9 @@ def inspect_objects(
groupby: str = "package",
):
if not on_mac:
- sys.exit("Error: conda inspect objects is only implemented in OS X")
+ raise CondaBuildUserError(
+ "`conda inspect objects` is only implemented on macOS"
+ )
prefix = Path(prefix)
installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()}
diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py
index 6ec2195eb0..307a13ecc9 100644
--- a/conda_build/jinja_context.py
+++ b/conda_build/jinja_context.py
@@ -10,6 +10,7 @@
import time
from functools import partial
from io import StringIO, TextIOBase
+from subprocess import CalledProcessError
from typing import TYPE_CHECKING
from warnings import warn
@@ -165,7 +166,12 @@ def load_setup_py_data(
args.extend(["--recipe-dir", recipe_dir])
if permit_undefined_jinja:
args.append("--permit-undefined-jinja")
- check_call_env(args, env=env)
+ try:
+ check_call_env(args, env=env)
+ except CalledProcessError as exc:
+ raise CondaBuildException(
+ "Could not run load_setup_py_data in subprocess"
+ ) from exc
# this is a file that the subprocess will have written
with open(
os.path.join(m.config.work_dir, "conda_build_loaded_setup_py.json")
diff --git a/conda_build/metadata.py b/conda_build/metadata.py
index 2552682840..c254329d28 100644
--- a/conda_build/metadata.py
+++ b/conda_build/metadata.py
@@ -12,17 +12,26 @@
import warnings
from collections import OrderedDict
from functools import lru_cache
-from os.path import isfile, join
+from os.path import isdir, isfile, join
from typing import TYPE_CHECKING, NamedTuple, overload
+import yaml
from bs4 import UnicodeDammit
-from conda.base.context import context
+from conda.base.context import locate_prefix_by_name
from conda.gateways.disk.read import compute_sum
from conda.models.match_spec import MatchSpec
from frozendict import deepfreeze
-from . import exceptions, utils
+from . import utils
from .config import Config, get_or_merge_config
+from .exceptions import (
+ CondaBuildException,
+ CondaBuildUserError,
+ DependencyNeedsBuildingError,
+ RecipeError,
+ UnableToParse,
+ UnableToParseMissingJinja2,
+)
from .features import feature_list
from .license_family import ensure_valid_license_family
from .utils import (
@@ -45,7 +54,10 @@
)
if TYPE_CHECKING:
- from typing import Any, Literal
+ from typing import Any, Literal, Self
+
+ OutputDict = dict[str, Any]
+ OutputTuple = tuple[OutputDict, "MetaData"]
try:
import yaml
@@ -195,7 +207,7 @@ def get_selectors(config: Config) -> dict[str, bool]:
if not np:
np = defaults["numpy"]
if config.verbose:
- utils.get_logger(__name__).warn(
+ utils.get_logger(__name__).warning(
"No numpy version specified in conda_build_config.yaml. "
"Falling back to default numpy value of {}".format(defaults["numpy"])
)
@@ -332,12 +344,12 @@ def select_lines(text: str, namespace: dict[str, Any], variants_in_place: bool)
if value:
lines.append(line)
except Exception as e:
- sys.exit(
- f"Error: Invalid selector in meta.yaml line {i + 1}:\n"
- f"offending line:\n"
- f"{line}\n"
+ raise CondaBuildUserError(
+ f"Invalid selector in meta.yaml line {i + 1}:\n"
+ f"offending selector:\n"
+ f" [{selector}]\n"
f"exception:\n"
- f"{e.__class__.__name__}: {e}\n"
+ f" {e.__class__.__name__}: {e}\n"
)
return "\n".join(lines) + "\n"
@@ -352,10 +364,10 @@ def yamlize(data):
jinja2 # Avoid pyflakes failure: 'jinja2' imported but unused
except ImportError:
- raise exceptions.UnableToParseMissingJinja2(original=e)
+ raise UnableToParseMissingJinja2(original=e)
print("Problematic recipe:", file=sys.stderr)
print(data, file=sys.stderr)
- raise exceptions.UnableToParse(original=e)
+ raise UnableToParse(original=e)
def ensure_valid_fields(meta):
@@ -396,9 +408,7 @@ def _trim_None_strings(meta_dict):
def ensure_valid_noarch_value(meta):
build_noarch = meta.get("build", {}).get("noarch")
if build_noarch and build_noarch not in NOARCH_TYPES:
- raise exceptions.CondaBuildException(
- f"Invalid value for noarch: {build_noarch}"
- )
+ raise CondaBuildException(f"Invalid value for noarch: {build_noarch}")
def _get_all_dependencies(metadata, envs=("host", "build", "run")):
@@ -408,29 +418,55 @@ def _get_all_dependencies(metadata, envs=("host", "build", "run")):
return reqs
-def check_circular_dependencies(render_order, config=None):
+def _check_circular_dependencies(
+ render_order: list[OutputTuple],
+ config: Config | None = None,
+) -> None:
+ envs: tuple[str, ...]
if config and config.host_subdir != config.build_subdir:
# When cross compiling build dependencies are already built
# and cannot come from the recipe as subpackages
envs = ("host", "run")
else:
envs = ("build", "host", "run")
- pairs = []
- for idx, m in enumerate(render_order.values()):
- for other_m in list(render_order.values())[idx + 1 :]:
+
+ pairs: list[tuple[str, str]] = []
+ for idx, (_, metadata) in enumerate(render_order):
+ name = metadata.name()
+ for _, other_metadata in render_order[idx + 1 :]:
+ other_name = other_metadata.name()
if any(
- m.name() == dep or dep.startswith(m.name() + " ")
- for dep in _get_all_dependencies(other_m, envs=envs)
+ name == dep.split(" ")[0]
+ for dep in _get_all_dependencies(other_metadata, envs=envs)
) and any(
- other_m.name() == dep or dep.startswith(other_m.name() + " ")
- for dep in _get_all_dependencies(m, envs=envs)
+ other_name == dep.split(" ")[0]
+ for dep in _get_all_dependencies(metadata, envs=envs)
):
- pairs.append((m.name(), other_m.name()))
+ pairs.append((name, other_name))
+
if pairs:
error = "Circular dependencies in recipe: \n"
for pair in pairs:
error += " {} <-> {}\n".format(*pair)
- raise exceptions.RecipeError(error)
+ raise RecipeError(error)
+
+
+def _check_run_constrained(metadata_tuples):
+ errors = []
+ for _, metadata in metadata_tuples:
+ for dep in _get_all_dependencies(metadata, envs=("run_constrained",)):
+ if "{{" in dep:
+ # skip Jinja content; it might have not been rendered yet; we'll get it next call
+ continue
+ try:
+ MatchSpec(dep)
+ except ValueError as exc:
+ errors.append(
+ f"- Output '{metadata.name()}' has invalid run_constrained item: {dep}. "
+ f"Reason: {exc}"
+ )
+ if errors:
+ raise RecipeError("\n".join(["", *errors]))
def _variants_equal(metadata, output_metadata):
@@ -474,7 +510,7 @@ def ensure_matching_hashes(output_metadata):
error += "Mismatching package: {} (id {}); dep: {}; consumer package: {}\n".format(
*prob
)
- raise exceptions.RecipeError(
+ raise RecipeError(
"Mismatching hashes in recipe. Exact pins in dependencies "
"that contribute to the hash often cause this. Can you "
"change one or more exact pins to version bound constraints?\n"
@@ -701,19 +737,18 @@ def _git_clean(source_meta):
If more than one field is used to specified, exit
and complain.
"""
-
git_rev_tags_old = ("git_branch", "git_tag")
git_rev = "git_rev"
git_rev_tags = (git_rev,) + git_rev_tags_old
-
has_rev_tags = tuple(bool(source_meta.get(tag, "")) for tag in git_rev_tags)
- if sum(has_rev_tags) > 1:
- msg = "Error: multiple git_revs:"
- msg += ", ".join(
- f"{key}" for key, has in zip(git_rev_tags, has_rev_tags) if has
- )
- sys.exit(msg)
+
+ keys = [key for key in (git_rev, "git_branch", "git_tag") if key in source_meta]
+ if not keys:
+ # git_branch, git_tag, nor git_rev specified, return as-is
+ return source_meta
+ elif len(keys) > 1:
+ raise CondaBuildUserError(f"Multiple git_revs: {', '.join(keys)}")
# make a copy of the input so we have no side-effects
ret_meta = source_meta.copy()
@@ -736,15 +771,16 @@ def _str_version(package_meta):
return package_meta
-def check_bad_chrs(s, field):
- bad_chrs = "=@#$%^&*:;\"'\\|<>?/ "
+def check_bad_chrs(value: str, field: str) -> None:
+ bad_chrs = set("=@#$%^&*:;\"'\\|<>?/ ")
if field in ("package/version", "build/string"):
- bad_chrs += "-"
+ bad_chrs.add("-")
if field != "package/version":
- bad_chrs += "!"
- for c in bad_chrs:
- if c in s:
- sys.exit(f"Error: bad character '{c}' in {field}: {s}")
+ bad_chrs.add("!")
+ if invalid := bad_chrs.intersection(value):
+ raise CondaBuildUserError(
+ f"Bad character(s) ({''.join(sorted(invalid))}) in {field}: {value}."
+ )
def get_package_version_pin(build_reqs, name):
@@ -817,24 +853,12 @@ def build_string_from_metadata(metadata):
return build_str
-# This really belongs in conda, and it is int conda.cli.common,
-# but we don't presently have an API there.
-def _get_env_path(env_name_or_path):
- if not os.path.isdir(env_name_or_path):
- for envs_dir in list(context.envs_dirs) + [os.getcwd()]:
- path = os.path.join(envs_dir, env_name_or_path)
- if os.path.isdir(path):
- env_name_or_path = path
- break
- bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta")
- if not os.path.isdir(bootstrap_metadir):
- print(f"Bootstrap environment '{env_name_or_path}' not found")
- sys.exit(1)
- return env_name_or_path
-
-
def _get_dependencies_from_environment(env_name_or_path):
- path = _get_env_path(env_name_or_path)
+ path = (
+ env_name_or_path
+ if isdir(env_name_or_path)
+ else locate_prefix_by_name(env_name_or_path)
+ )
# construct build requirements that replicate the given bootstrap environment
# and concatenate them to the build requirements from the recipe
bootstrap_metadata = get_installed_packages(path)
@@ -846,7 +870,7 @@ def _get_dependencies_from_environment(env_name_or_path):
return {"requirements": {"build": bootstrap_requirements}}
-def toposort(output_metadata_map):
+def _toposort_outputs(output_tuples: list[OutputTuple]) -> list[OutputTuple]:
"""This function is used to work out the order to run the install scripts
for split packages based on any interdependencies. The result is just
a re-ordering of outputs such that we can run them in that order and
@@ -858,53 +882,51 @@ def toposort(output_metadata_map):
# We only care about the conda packages built by this recipe. Non-conda
# packages get sorted to the end.
- these_packages = [
- output_d["name"]
- for output_d in output_metadata_map
- if output_d.get("type", "conda").startswith("conda")
- ]
- topodict = dict()
- order = dict()
- endorder = set()
-
- for idx, (output_d, output_m) in enumerate(output_metadata_map.items()):
+ conda_outputs: dict[str, list[OutputTuple]] = {}
+ non_conda_outputs: list[OutputTuple] = []
+ for output_tuple in output_tuples:
+ output_d, _ = output_tuple
if output_d.get("type", "conda").startswith("conda"):
- deps = output_m.get_value("requirements/run", []) + output_m.get_value(
- "requirements/host", []
- )
- if not output_m.is_cross:
- deps.extend(output_m.get_value("requirements/build", []))
- name = output_d["name"]
- order[name] = idx
- topodict[name] = set()
- for dep in deps:
- dep = dep.split(" ")[0]
- if dep in these_packages:
- topodict[name].update((dep,))
+ # conda packages must have a name
+ # the same package name may be seen multiple times (variants)
+ conda_outputs.setdefault(output_d["name"], []).append(output_tuple)
+ elif "name" in output_d:
+ non_conda_outputs.append(output_tuple)
else:
- endorder.add(idx)
-
- topo_order = list(_toposort(topodict))
- keys = [
- k
- for pkgname in topo_order
- for k in output_metadata_map.keys()
- if "name" in k and k["name"] == pkgname
+ # TODO: is it even possible to get here? and if so should we silently ignore or error?
+ utils.get_logger(__name__).warning(
+ "Found an output without a name, skipping"
+ )
+
+ # Iterate over conda packages, creating a mapping of package names to their
+ # dependencies to be used in toposort
+ name_to_dependencies: dict[str, set[str]] = {}
+ for name, same_name_outputs in conda_outputs.items():
+ for output_d, output_metadata in same_name_outputs:
+ # dependencies for all of the variants
+ dependencies = (
+ *output_metadata.get_value("requirements/run", []),
+ *output_metadata.get_value("requirements/host", []),
+ *(
+ output_metadata.get_value("requirements/build", [])
+ if not output_metadata.is_cross
+ else []
+ ),
+ )
+ name_to_dependencies.setdefault(name, set()).update(
+ dependency_name
+ for dependency in dependencies
+ if (dependency_name := dependency.split(" ")[0]) in conda_outputs
+ )
+
+ return [
+ *(
+ output
+ for name in _toposort(name_to_dependencies)
+ for output in conda_outputs[name]
+ ),
+ *non_conda_outputs,
]
- # not sure that this is working... not everything has 'name', and not sure how this pans out
- # may end up excluding packages without the 'name' field
- keys.extend(
- [
- k
- for pkgname in endorder
- for k in output_metadata_map.keys()
- if ("name" in k and k["name"] == pkgname) or "name" not in k
- ]
- )
- result = OrderedDict()
- for key in keys:
- result[key] = output_metadata_map[key]
- return result
def get_output_dicts_from_metadata(
@@ -1001,12 +1023,12 @@ def finalize_outputs_pass(
fm.name(),
deepfreeze({k: fm.config.variant[k] for k in fm.get_used_vars()}),
] = (output_d, fm)
- except exceptions.DependencyNeedsBuildingError as e:
+ except DependencyNeedsBuildingError as e:
if not permit_unsatisfiable_variants:
raise
else:
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
"Could not finalize metadata due to missing dependencies: "
f"{e.packages}"
)
@@ -1227,7 +1249,7 @@ def parse_again(
log = utils.get_logger(__name__)
if kw:
- log.warn(
+ log.warning(
"using unsupported internal conda-build function `parse_again`. Please use "
"conda_build.api.render instead."
)
@@ -1304,12 +1326,11 @@ def parse_until_resolved(
):
"""variant contains key-value mapping for additional functions and values
for jinja2 variables"""
- # undefined_jinja_vars is refreshed by self.parse again
- undefined_jinja_vars = ()
# store the "final" state that we think we're in. reloading the meta.yaml file
# can reset it (to True)
final = self.final
- # always parse again at least once.
+
+ # always parse again at least once
self.parse_again(
permit_undefined_jinja=True,
allow_no_other_outputs=allow_no_other_outputs,
@@ -1317,6 +1338,8 @@ def parse_until_resolved(
)
self.final = final
+ # recursively parse again so long as each iteration has fewer undefined jinja variables
+ undefined_jinja_vars = ()
while set(undefined_jinja_vars) != set(self.undefined_jinja_vars):
undefined_jinja_vars = self.undefined_jinja_vars
self.parse_again(
@@ -1325,18 +1348,8 @@ def parse_until_resolved(
bypass_env_check=bypass_env_check,
)
self.final = final
- if undefined_jinja_vars:
- self.parse_again(
- permit_undefined_jinja=False,
- allow_no_other_outputs=allow_no_other_outputs,
- bypass_env_check=bypass_env_check,
- )
- sys.exit(
- f"Undefined Jinja2 variables remain ({self.undefined_jinja_vars}). Please enable "
- "source downloading and try again."
- )
- # always parse again at the end, too.
+ # always parse again at the end without permit_undefined_jinja
self.parse_again(
permit_undefined_jinja=False,
allow_no_other_outputs=allow_no_other_outputs,
@@ -1441,7 +1454,7 @@ def get_value(self, name, default=None, autotype=True):
# is passed in with an index, e.g. get_value('source/0/git_url')
if index is None:
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
f"No index specified in get_value('{name}'). Assuming index 0."
)
index = 0
@@ -1993,8 +2006,8 @@ def _get_contents(
except jinja2.TemplateError as ex:
if "'None' has not attribute" in str(ex):
ex = "Failed to run jinja context function"
- sys.exit(
- f"Error: Failed to render jinja template in {self.meta_path}:\n{str(ex)}"
+ raise CondaBuildUserError(
+ f"Failed to render jinja template in {self.meta_path}:\n{str(ex)}"
)
finally:
if "CONDA_BUILD_STATE" in os.environ:
@@ -2205,7 +2218,7 @@ def extract_single_output_text(
output = output_matches[output_index] if output_matches else ""
except ValueError:
if not self.path and self.meta.get("extra", {}).get("parent_recipe"):
- utils.get_logger(__name__).warn(
+ utils.get_logger(__name__).warning(
f"Didn't match any output in raw metadata. Target value was: {output_name}"
)
output = ""
@@ -2268,7 +2281,7 @@ def validate_features(self):
"character in your recipe."
)
- def copy(self):
+ def copy(self: Self) -> MetaData:
new = copy.copy(self)
new.config = self.config.copy()
new.config.variant = copy.deepcopy(self.config.variant)
@@ -2520,10 +2533,10 @@ def get_output_metadata_set(
permit_undefined_jinja: bool = False,
permit_unsatisfiable_variants: bool = False,
bypass_env_check: bool = False,
- ) -> list[tuple[dict[str, Any], MetaData]]:
+ ) -> list[OutputTuple]:
from .source import provide
- out_metadata_map = {}
+ output_tuples: list[OutputTuple] = []
if self.final:
outputs = get_output_dicts_from_metadata(self)
output_tuples = [(outputs[0], self)]
@@ -2579,27 +2592,26 @@ def get_output_metadata_set(
}
),
] = (out, out_metadata)
- out_metadata_map[deepfreeze(out)] = out_metadata
+ output_tuples.append((out, out_metadata))
ref_metadata.other_outputs = out_metadata.other_outputs = (
all_output_metadata
)
except SystemExit:
if not permit_undefined_jinja:
raise
- out_metadata_map = {}
+ output_tuples = []
- assert out_metadata_map, (
+ assert output_tuples, (
"Error: output metadata set is empty. Please file an issue"
" on the conda-build tracker at https://github.com/conda/conda-build/issues"
)
- # format here is {output_dict: metadata_object}
- render_order = toposort(out_metadata_map)
- check_circular_dependencies(render_order, config=self.config)
+ render_order: list[OutputTuple] = _toposort_outputs(output_tuples)
+ _check_circular_dependencies(render_order, config=self.config)
conda_packages = OrderedDict()
non_conda_packages = []
- for output_d, m in render_order.items():
+ for output_d, m in render_order:
if not output_d.get("type") or output_d["type"] in (
"conda",
"conda_v2",
@@ -2653,6 +2665,7 @@ def get_output_metadata_set(
m.final = True
final_conda_packages.append((out_d, m))
output_tuples = final_conda_packages + non_conda_packages
+ _check_run_constrained(output_tuples)
return output_tuples
def get_loop_vars(self):
@@ -2898,7 +2911,7 @@ def _get_used_vars_output_script(self):
)
else:
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
f"Not detecting used variables in output script {script}; conda-build only knows "
"how to search .sh and .bat files right now."
)
diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py
index 1e80fcd2e4..083ffeb07a 100644
--- a/conda_build/noarch_python.py
+++ b/conda_build/noarch_python.py
@@ -1,46 +1,47 @@
# Copyright (C) 2014 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
+from __future__ import annotations
+
import json
import locale
import logging
import os
import shutil
-import sys
from os.path import basename, dirname, isfile, join
+from pathlib import Path
-from .utils import on_win
+from .exceptions import CondaBuildUserError
+from .utils import bin_dirname, on_win, rm_rf
-def rewrite_script(fn, prefix):
+def rewrite_script(fn: str, prefix: str | os.PathLike) -> str:
"""Take a file from the bin directory and rewrite it into the python-scripts
directory with the same permissions after it passes some sanity checks for
noarch pacakges"""
# Load and check the source file for not being a binary
- src = join(prefix, "Scripts" if on_win else "bin", fn)
+ src = Path(prefix, bin_dirname, fn)
encoding = locale.getpreferredencoding()
# if default locale is ascii, allow UTF-8 (a reasonably modern ASCII extension)
if encoding == "ANSI_X3.4-1968":
encoding = "UTF-8"
- with open(src, encoding=encoding) as fi:
- try:
- data = fi.read()
- except UnicodeDecodeError: # file is binary
- sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}")
- src_mode = os.stat(src).st_mode
- os.unlink(src)
+ try:
+ data = src.read_text(encoding=encoding)
+ except UnicodeDecodeError: # binary file
+ raise CondaBuildUserError(f"Noarch package contains binary script: {fn}")
+ src_mode = src.stat().st_mode
+ rm_rf(src)
# Get rid of '-script.py' suffix on Windows
if on_win and fn.endswith("-script.py"):
fn = fn[:-10]
# Rewrite the file to the python-scripts directory
- dst_dir = join(prefix, "python-scripts")
- os.makedirs(dst_dir, exist_ok=True)
- dst = join(dst_dir, fn)
- with open(dst, "w") as fo:
- fo.write(data)
- os.chmod(dst, src_mode)
+ dst_dir = Path(prefix, "python-scripts")
+ dst_dir.mkdir(exist_ok=True)
+ dst = dst_dir / fn
+ dst.write_text(data)
+ dst.chmod(src_mode)
return fn
diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py
index d6ee2841d6..b474a4897a 100644
--- a/conda_build/os_utils/liefldd.py
+++ b/conda_build/os_utils/liefldd.py
@@ -353,12 +353,12 @@ def _get_path_dirs(prefix):
yield "/".join((prefix, "bin"))
-def get_uniqueness_key(file):
+def get_uniqueness_key(filename, file):
binary = ensure_binary(file)
if not binary:
return EXE_FORMATS.UNKNOWN
elif binary.format == EXE_FORMATS.MACHO:
- return str(file)
+ return filename
elif binary.format == EXE_FORMATS.ELF and ( # noqa
binary.type == lief.ELF.ELF_CLASS.CLASS32
or binary.type == lief.ELF.ELF_CLASS.CLASS64
@@ -369,8 +369,8 @@ def get_uniqueness_key(file):
]
if result:
return result[0]
- return str(file)
- return str(file)
+ return filename
+ return filename
def _get_resolved_location(
@@ -505,13 +505,13 @@ def inspect_linkages_lief(
for element in todo:
todo.pop(0)
filename2 = element[0]
- binary = element[1]
- if not binary:
+ binary2 = element[1]
+ if not binary2:
continue
- uniqueness_key = get_uniqueness_key(binary)
+ uniqueness_key = get_uniqueness_key(filename2, binary2)
if uniqueness_key not in already_seen:
parent_exe_dirname = None
- if binary.format == EXE_FORMATS.PE:
+ if binary2.format == EXE_FORMATS.PE:
tmp_filename = filename2
while tmp_filename:
if (
@@ -527,17 +527,17 @@ def inspect_linkages_lief(
if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2:
parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs"
rpaths_by_binary[filename2] = get_rpaths(
- binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot
+ binary2, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot
)
tmp_filename = filename2
rpaths_transitive = []
- if binary.format == EXE_FORMATS.PE:
+ if binary2.format == EXE_FORMATS.PE:
rpaths_transitive = rpaths_by_binary[tmp_filename]
else:
while tmp_filename:
rpaths_transitive[:0] = rpaths_by_binary[tmp_filename]
tmp_filename = parents_by_filename[tmp_filename]
- libraries = get_libraries(binary)
+ libraries = get_libraries(binary2)
if filename2 in libraries: # Happens on macOS, leading to cycles.
libraries.remove(filename2)
# RPATH is implicit everywhere except macOS, make it explicit to simplify things.
@@ -546,14 +546,14 @@ def inspect_linkages_lief(
"$RPATH/" + lib
if not lib.startswith("/")
and not lib.startswith("$")
- and binary.format != EXE_FORMATS.MACHO # noqa
+ and binary2.format != EXE_FORMATS.MACHO # noqa
else lib
)
for lib in libraries
]
for lib, orig in zip(libraries, these_orig):
resolved = _get_resolved_location(
- binary,
+ binary2,
orig,
exedir,
exedir,
@@ -568,7 +568,7 @@ def inspect_linkages_lief(
# can be run case-sensitively if the user wishes.
#
"""
- if binary.format == EXE_FORMATS.PE:
+ if binary2.format == EXE_FORMATS.PE:
import random
path_fixed = (
os.path.dirname(path_fixed)
@@ -596,7 +596,7 @@ def inspect_linkages_lief(
if recurse:
if os.path.exists(resolved[0]):
todo.append([resolved[0], lief.parse(resolved[0])])
- already_seen.add(get_uniqueness_key(binary))
+ already_seen.add(uniqueness_key)
return results
@@ -1174,6 +1174,10 @@ def __call__(self, *args, **kw):
if not data:
break
sha1.update(data)
+ # update with file name, if its a different
+ # file with the same contents, we don't want
+ # to treat it as cached
+ sha1.update(os.path.realpath(arg).encode("utf-8"))
arg = sha1.hexdigest()
if isinstance(arg, list):
newargs.append(tuple(arg))
diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py
index 17fc5d5a13..8e02c8ee86 100644
--- a/conda_build/os_utils/macho.py
+++ b/conda_build/os_utils/macho.py
@@ -258,7 +258,7 @@ def _chmod(filename, mode):
os.chmod(filename, mode)
except (OSError, utils.PermissionError) as e:
log = utils.get_logger(__name__)
- log.warn(str(e))
+ log.warning(str(e))
def install_name_tool(args, build_prefix=None, verbose=False):
diff --git a/conda_build/post.py b/conda_build/post.py
index 67c6a355a7..6b10af2a78 100644
--- a/conda_build/post.py
+++ b/conda_build/post.py
@@ -9,6 +9,7 @@
import shutil
import stat
import sys
+import traceback
from collections import OrderedDict, defaultdict
from copy import copy
from fnmatch import filter as fnmatch_filter
@@ -73,6 +74,12 @@
"linux": (elffile,),
}
+GNU_ARCH_MAP = {
+ "ppc64le": "powerpc64le",
+ "32": "i686",
+ "64": "x86_64",
+}
+
def fix_shebang(f, prefix, build_python, osx_is_app=False):
path = join(prefix, f)
@@ -603,7 +610,20 @@ def mk_relative_linux(f, prefix, rpaths=("lib",), method=None):
existing_pe = existing_pe.split(os.pathsep)
existing = existing_pe
if have_lief:
- existing2, _, _ = get_rpaths_raw(elf)
+ existing2 = None
+ try:
+ existing2, _, _ = get_rpaths_raw(elf)
+ except Exception as e:
+ if method == "LIEF":
+ print(
+ f"ERROR :: get_rpaths_raw({elf!r}) with LIEF failed: {e}, but LIEF was specified"
+ )
+ traceback.print_tb(e.__traceback__)
+ else:
+ print(
+ f"WARNING :: get_rpaths_raw({elf!r}) with LIEF failed: {e}, will proceed with patchelf"
+ )
+ method = "patchelf"
if existing_pe and existing_pe != existing2:
print(
f"WARNING :: get_rpaths_raw()={existing2} and patchelf={existing_pe} disagree for {elf} :: "
@@ -1406,8 +1426,20 @@ def check_overlinking_impl(
list(diffs)[1:3],
)
sysroots_files[srs] = sysroot_files
+
+ def sysroot_matches_subdir(path):
+ # The path looks like /aarch64-conda-linux-gnu/sysroot/
+ # We check that the triplet "aarch64-conda-linux-gnu"
+ # matches the subdir for eg: linux-aarch64.
+ sysroot_arch = Path(path).parent.name.split("-")[0]
+ subdir_arch = subdir.split("-")[-1]
+ return sysroot_arch == GNU_ARCH_MAP.get(subdir_arch, subdir_arch)
+
sysroots_files = OrderedDict(
- sorted(sysroots_files.items(), key=lambda x: -len(x[1]))
+ sorted(
+ sysroots_files.items(),
+ key=lambda x: (not sysroot_matches_subdir(x[0]), -len(x[1])),
+ )
)
all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos(
@@ -1595,7 +1627,7 @@ def post_process_shared_lib(m, f, files, host_prefix=None):
elif codefile == machofile:
if m.config.host_platform != "osx":
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
"Found Mach-O file but patching is only supported on macOS, skipping: %s",
path,
)
@@ -1631,7 +1663,7 @@ def fix_permissions(files, prefix):
lchmod(path, new_mode)
except (OSError, utils.PermissionError) as e:
log = utils.get_logger(__name__)
- log.warn(str(e))
+ log.warning(str(e))
def check_menuinst_json(files, prefix) -> None:
diff --git a/conda_build/render.py b/conda_build/render.py
index cc3bcd87c0..0c80df0005 100644
--- a/conda_build/render.py
+++ b/conda_build/render.py
@@ -27,7 +27,7 @@
from conda.base.context import context
from conda.cli.common import specs_from_url
from conda.core.package_cache_data import ProgressiveFetchExtract
-from conda.exceptions import UnsatisfiableError
+from conda.exceptions import NoPackagesFoundError, UnsatisfiableError
from conda.gateways.disk.create import TemporaryDirectory
from conda.models.records import PackageRecord
from conda.models.version import VersionOrder
@@ -739,14 +739,14 @@ def finalize_metadata(
if build_unsat or host_unsat:
m.final = False
log = utils.get_logger(__name__)
- log.warn(
+ log.warning(
f"Returning non-final recipe for {m.dist()}; one or more dependencies "
"was unsatisfiable:"
)
if build_unsat:
- log.warn(f"Build: {build_unsat}")
+ log.warning(f"Build: {build_unsat}")
if host_unsat:
- log.warn(f"Host: {host_unsat}")
+ log.warning(f"Host: {host_unsat}")
else:
m.final = True
if is_top_level:
@@ -1000,6 +1000,59 @@ def render_recipe(
)
+def render_metadata_tuples(
+ metadata_tuples: Iterable[MetaDataTuple],
+ config: Config,
+ permit_unsatisfiable_variants: bool = True,
+ finalize: bool = True,
+ bypass_env_check: bool = False,
+) -> list[MetaDataTuple]:
+ output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {}
+ for meta, download, render_in_env in metadata_tuples:
+ if not meta.skip() or not config.trim_skip:
+ for od, om in meta.get_output_metadata_set(
+ permit_unsatisfiable_variants=permit_unsatisfiable_variants,
+ permit_undefined_jinja=not finalize,
+ bypass_env_check=bypass_env_check,
+ ):
+ if not om.skip() or not config.trim_skip:
+ if "type" not in od or od["type"] == "conda":
+ if finalize and not om.final:
+ try:
+ om = finalize_metadata(
+ om,
+ permit_unsatisfiable_variants=permit_unsatisfiable_variants,
+ )
+ except (DependencyNeedsBuildingError, NoPackagesFoundError):
+ if not permit_unsatisfiable_variants:
+ raise
+
+ # remove outputs section from output objects for simplicity
+ if not om.path and (outputs := om.get_section("outputs")):
+ om.parent_outputs = outputs
+ del om.meta["outputs"]
+
+ output_metas[
+ om.dist(),
+ om.config.variant.get("target_platform"),
+ tuple(
+ (var, om.config.variant[var])
+ for var in om.get_used_vars()
+ ),
+ ] = MetaDataTuple(om, download, render_in_env)
+ else:
+ output_metas[
+ f"{om.type}: {om.name()}",
+ om.config.variant.get("target_platform"),
+ tuple(
+ (var, om.config.variant[var])
+ for var in om.get_used_vars()
+ ),
+ ] = MetaDataTuple(om, download, render_in_env)
+
+ return list(output_metas.values())
+
+
# Keep this out of the function below so it can be imported by other modules.
FIELDS = [
"package",
diff --git a/conda_build/source.py b/conda_build/source.py
index 903f5d7ca0..983188dd5a 100644
--- a/conda_build/source.py
+++ b/conda_build/source.py
@@ -74,7 +74,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
hash_added = True
break
else:
- log.warn(
+ log.warning(
f"No hash (md5, sha1, sha256) provided for {unhashed_fn}. Source download forced. "
"Add hash to recipe to use source cache."
)
@@ -102,10 +102,10 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
with LoggingContext():
download(url, path)
except CondaHTTPError as e:
- log.warn(f"Error: {str(e).strip()}")
+ log.warning(f"Error: {str(e).strip()}")
rm_rf(path)
except RuntimeError as e:
- log.warn(f"Error: {str(e).strip()}")
+ log.warning(f"Error: {str(e).strip()}")
rm_rf(path)
else:
if verbose:
@@ -467,7 +467,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None):
if not git:
git = external.find_executable("git", build_prefix)
if not git:
- log.warn(
+ log.warning(
"git not installed in root environment. Skipping recording of git info."
)
return
diff --git a/conda_build/utils.py b/conda_build/utils.py
index 796f849caf..4b5fdcc8d2 100644
--- a/conda_build/utils.py
+++ b/conda_build/utils.py
@@ -66,7 +66,6 @@
from conda.models.version import VersionOrder
from conda.utils import unix_path_to_win
-from .deprecations import deprecated
from .exceptions import BuildLockError
if TYPE_CHECKING:
@@ -258,8 +257,8 @@ def _execute(self, *args, **kwargs):
psutil = None
psutil_exceptions = (OSError, ValueError)
log = get_logger(__name__)
- log.warn(f"psutil import failed. Error was {e}")
- log.warn(
+ log.warning(f"psutil import failed. Error was {e}")
+ log.warning(
"only disk usage and time statistics will be available. Install psutil to "
"get CPU time and memory usage statistics."
)
@@ -595,7 +594,7 @@ def copy_into(
src_folder = os.getcwd()
if os.path.islink(src) and not os.path.exists(os.path.realpath(src)):
- log.warn("path %s is a broken symlink - ignoring copy", src)
+ log.warning("path %s is a broken symlink - ignoring copy", src)
return
if not lock and locking:
@@ -1233,7 +1232,7 @@ def islist(
# StopIteration: list is empty, an empty list is still uniform
return True
# check for explicit type match, do not allow the ambiguity of isinstance
- uniform = lambda e: type(e) == etype # noqa: E731
+ uniform = lambda e: type(e) == etype # noqa: E721
try:
return all(uniform(e) for e in arg)
@@ -1319,7 +1318,7 @@ def find_recipe(path: str) -> str:
metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))]
if len(metas) == 1:
- get_logger(__name__).warn(
+ get_logger(__name__).warning(
"Multiple meta files found. "
f"The {metas[0]} file in the base directory ({path}) "
"will be used."
@@ -1409,47 +1408,6 @@ def get_installed_packages(path):
return installed
-@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.")
-def _convert_lists_to_sets(_dict):
- for k, v in _dict.items():
- if hasattr(v, "keys"):
- _dict[k] = HashableDict(_convert_lists_to_sets(v))
- elif hasattr(v, "__iter__") and not isinstance(v, str):
- try:
- _dict[k] = sorted(list(set(v)))
- except TypeError:
- _dict[k] = sorted(list({tuple(_) for _ in v}))
- return _dict
-
-
-@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.")
-class HashableDict(dict):
- """use hashable frozen dictionaries for resources and resource types so that they can be in sets"""
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self = _convert_lists_to_sets(self)
-
- def __hash__(self):
- return hash(json.dumps(self, sort_keys=True))
-
-
-@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.")
-def represent_hashabledict(dumper, data):
- value = []
-
- for item_key, item_value in data.items():
- node_key = dumper.represent_data(item_key)
- node_value = dumper.represent_data(item_value)
-
- value.append((node_key, node_value))
-
- return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value)
-
-
-yaml.add_representer(HashableDict, represent_hashabledict)
-
-
# http://stackoverflow.com/a/10743550/1170370
@contextlib.contextmanager
def capture():
@@ -1622,13 +1580,12 @@ def filter_info_files(files_list, prefix):
)
-@deprecated.argument("24.5", "24.7", "config")
-def rm_rf(path):
+def rm_rf(path: str | os.PathLike) -> None:
from conda.core.prefix_data import delete_prefix_from_linked_data
- from conda.gateways.disk.delete import rm_rf as rm_rf
+ from conda.gateways.disk.delete import rm_rf
- rm_rf(path)
- delete_prefix_from_linked_data(path)
+ rm_rf(str(path))
+ delete_prefix_from_linked_data(str(path))
# https://stackoverflow.com/a/31459386/1170370
@@ -1696,7 +1653,8 @@ def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers
logging.config.dictConfig(config_dict)
level = config_dict.get("loggers", {}).get(name, {}).get("level", level)
log = logging.getLogger(name)
- log.setLevel(level)
+ if log.level != level:
+ log.setLevel(level)
if dedupe:
log.addFilter(dedupe_filter)
@@ -1933,7 +1891,7 @@ def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchS
if "*" not in spec:
if match.group(1) not in ("python", "vc") and warn:
log = get_logger(__name__)
- log.warn(
+ log.warning(
f"Adding .* to spec '{spec}' to ensure satisfiability. Please "
"consider putting {{{{ var_name }}}}.* or some relational "
"operator (>/>=/<=) on this spec in meta.yaml, or if req is "
diff --git a/conda_build/variants.py b/conda_build/variants.py
index 447025818c..3eef82266d 100644
--- a/conda_build/variants.py
+++ b/conda_build/variants.py
@@ -18,7 +18,6 @@
import yaml
from conda.base.context import context
-from .deprecations import deprecated
from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys
from .version import _parse as parse_version
@@ -701,7 +700,6 @@ def get_package_variants(recipedir_or_metadata, config=None, variants=None):
return filter_combined_spec_to_used_keys(combined_spec, specs=specs)
-@deprecated.argument("24.5", "24.7", "loop_only")
def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]:
"""For purposes of naming/identifying, provide a way of identifying which variables contribute
to the matrix dimensionality"""
@@ -747,15 +745,31 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False):
v_req_regex = "[-_]".join(map(re.escape, v.split("_")))
variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}"
selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]"
+ # NOTE: why use a regex instead of the jinja2 parser/AST?
+ # One can ask the jinja2 parser for undefined variables, but conda-build moves whole
+ # blocks of text around when searching for variables and applies selectors to the text.
+ # So the text that reaches this function is not necessarily valid jinja2 syntax. :/
conditional_regex = (
r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}"
)
+ # TODO: this `for` regex won't catch some common cases like lists of vars, multiline
+ # jinja2 blocks, if filters on the for loop, etc.
+ for_regex = r"(?:^|[^\{])\{%\s*for\s*.*\s*in\s*" + v_regex + r"(?:[^%]*?)?%\}"
+ set_regex = r"(?:^|[^\{])\{%\s*set\s*.*\s*=\s*.*" + v_regex + r"(?:[^%]*?)?%\}"
# plain req name, no version spec. Look for end of line after name, or comment or selector
requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)"
if selectors_only:
all_res.insert(0, selector_regex)
else:
- all_res.extend([variant_regex, requirement_regex, conditional_regex])
+ all_res.extend(
+ [
+ variant_regex,
+ requirement_regex,
+ conditional_regex,
+ for_regex,
+ set_regex,
+ ]
+ )
# consolidate all re's into one big one for speedup
all_res = r"|".join(all_res)
if any(re.search(all_res, line) for line in variant_lines):
diff --git a/conda_build/windows.py b/conda_build/windows.py
index 00287c50bf..8643431a5b 100644
--- a/conda_build/windows.py
+++ b/conda_build/windows.py
@@ -102,16 +102,16 @@ def msvc_env_cmd(bits, config, override=None):
# there's clear user demand, it's not clear that we should invest the
# effort into updating a known deprecated function for a new platform.
log = get_logger(__name__)
- log.warn(
+ log.warning(
"Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. "
"If this recipe does not use a compiler, this message is safe to ignore. "
"Otherwise, use {{compiler('')}} jinja2 in requirements/build."
)
if bits not in ["64", "32"]:
- log.warn(f"The legacy MSVC compiler setup does not support {bits} builds. ")
+ log.warning(f"The legacy MSVC compiler setup does not support {bits} builds. ")
return ""
if override:
- log.warn(
+ log.warning(
"msvc_compiler key in meta.yaml is deprecated. Use the new"
"variant-powered compiler configuration instead. Note that msvc_compiler"
"is incompatible with the new {{{{compiler('c')}}}} jinja scheme."
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 58f1311df7..65638cfcbe 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,6 +1,6 @@
Pillow==10.0.1
PyYAML==6.0.1
-Sphinx==7.3.7
+Sphinx==7.4.7
conda-sphinx-theme==0.2.1
linkify-it-py==2.0.2
myst-parser==2.0.0
diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst
index 83ba151382..0a54b49558 100644
--- a/docs/source/resources/define-metadata.rst
+++ b/docs/source/resources/define-metadata.rst
@@ -1315,7 +1315,10 @@ build prefix. Explicit file lists support glob expressions.
Directory names are also supported, and they recursively include
contents.
-.. code-block:: none
+.. warning::
+ When defining `outputs/files` as a list without specifying `outputs/script`, any file in the prefix (including those installed by host dependencies) matching one of the glob expressions is included in the output.
+
+.. code-block:: yaml
outputs:
- name: subpackage-name
@@ -1325,6 +1328,29 @@ contents.
- *.some-extension
- somefolder/*.some-extension
+Greater control over file matching may be
+achieved by defining ``files`` as a dictionary separating files to
+``include`` from those to ``exclude``.
+When using include/exclude, only files installed by
+the current recipe are considered. i.e. files in the prefix installed
+by host dependencies are excluded. include/exclude must not be used
+simultaneously with glob expressions listed directly in ``outputs/files``.
+Files matching both include and exclude expressions will be excluded.
+
+.. code-block:: yaml
+
+ outputs:
+ - name: subpackage-name
+ files:
+ include:
+ - a-file
+ - a-folder
+ - *.some-extension
+ - somefolder/*.some-extension
+ exclude:
+ - *.exclude-extension
+ - a-folder/**/*.some-extension
+
Scripts that create or move files into the build prefix can be
any kind of script. Known script types need only specify the
script name. Currently the list of recognized extensions is
@@ -1374,10 +1400,9 @@ A subpackage does not automatically inherit any dependencies from its top-level
recipe, so any build or run requirements needed by the subpackage must be
explicitly specified.
-.. code-block:: none
+.. code-block:: yaml
outputs:
-
- name: subpackage-name
requirements:
build:
diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface
deleted file mode 100644
index d7737f9368..0000000000
--- a/news/5222-deprecating-conda_interface
+++ /dev/null
@@ -1,32 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222)
-* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222)
-* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222)
-* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222)
-* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222)
-* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222)
-* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222)
-* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222)
-* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222)
-* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222)
-* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222)
-* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222)
-* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222)
-* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5233-enable-codspeed b/news/5233-enable-codspeed
deleted file mode 100644
index efb32df4d1..0000000000
--- a/news/5233-enable-codspeed
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-*
-
-### Docs
-
-*
-
-### Other
-
-* Enable CodSpeed benchmarks for select tests. (#5233)
diff --git a/news/5237-select_lines-caching b/news/5237-select_lines-caching
deleted file mode 100644
index 434a832350..0000000000
--- a/news/5237-select_lines-caching
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237)
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-*
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5238-open_recipe b/news/5238-open_recipe
deleted file mode 100644
index 9d5d42c4c5..0000000000
--- a/news/5238-open_recipe
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238)
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-*
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5251-deprecating-conda_interface b/news/5251-deprecating-conda_interface
deleted file mode 100644
index 9f5e48d6cd..0000000000
--- a/news/5251-deprecating-conda_interface
+++ /dev/null
@@ -1,34 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251)
-* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251)
-* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251)
-* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251)
-* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251)
-* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251)
-* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251)
-* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251)
-* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251)
-* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5252-sign-stubs b/news/5252-sign-stubs
deleted file mode 100644
index 3f8bec0b49..0000000000
--- a/news/5252-sign-stubs
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252)
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-*
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5271-context b/news/5271-context
deleted file mode 100644
index b4143e00f4..0000000000
--- a/news/5271-context
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-* Require `conda >=23.7.0`. (#5271)
-
-### Bug fixes
-
-* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271)
-
-### Deprecations
-
-* Deprecate `conda_build.config.Config.override_channels`. Use `conda.base.context.context.override_channels` instead. (#5271)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5276-deprecating-conda_interface b/news/5276-deprecating-conda_interface
deleted file mode 100644
index 701b9a53f1..0000000000
--- a/news/5276-deprecating-conda_interface
+++ /dev/null
@@ -1,56 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276)
-* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276)
-* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276)
-* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276)
-* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276)
-* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276)
-* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276)
-* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276)
-* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276)
-* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276)
-* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276)
-* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276)
-* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276)
-* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276)
-* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276)
-* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276)
-* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276)
-* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276)
-* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276)
-* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276)
-* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276)
-* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276)
-* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276)
-* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276)
-* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276)
-* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276)
-* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276)
-* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276)
-* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276)
-* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276)
-* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276)
-* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276)
-* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276)
-* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276)
-* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276)
-* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276)
-* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276)
-* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5280-deprecate-get_vars-loop_only b/news/5280-deprecate-get_vars-loop_only
deleted file mode 100644
index e18d5cfe8c..0000000000
--- a/news/5280-deprecate-get_vars-loop_only
+++ /dev/null
@@ -1,19 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Deprecate `conda_build.variants.get_vars(loop_only)`. Unused. (#5280)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5284-deprecate-HashableDict b/news/5284-deprecate-HashableDict
deleted file mode 100644
index c411443395..0000000000
--- a/news/5284-deprecate-HashableDict
+++ /dev/null
@@ -1,21 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284)
-* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284)
-* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5299-remove-deprecations b/news/5299-remove-deprecations
deleted file mode 100644
index c78531ea4d..0000000000
--- a/news/5299-remove-deprecations
+++ /dev/null
@@ -1,39 +0,0 @@
-### Enhancements
-
-*
-
-### Bug fixes
-
-*
-
-### Deprecations
-
-* Postpone `conda_build.index.channel_data` deprecation. (#5299)
-* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299)
-* Remove `conda_build.bdist_conda`. (#5299)
-* Remove `conda_build.build.have_prefix_files`. (#5299)
-* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299)
-* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299)
-* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299)
-* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299)
-* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299)
-* Remove `conda_build.environ._load_all_json`. (#5299)
-* Remove `conda_build.environ._load_json`. (#5299)
-* Remove `conda_build.environ.cached_actions`. (#5299)
-* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299)
-* Remove `conda_build.environ.InvalidEnvironment`. (#5299)
-* Remove `conda_build.environ.LINK_ACTION`. (#5299)
-* Remove `conda_build.environ.PREFIX_ACTION`. (#5299)
-* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299)
-* Remove `conda_build.index.DummyExecutor`. (#5299)
-* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299)
-* Remove `conda_build.index.LOCKFILE_NAME`. (#5299)
-* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299)
-
-### Docs
-
-*
-
-### Other
-
-*
diff --git a/news/5322-undefine-build-vars b/news/5322-undefine-build-vars
new file mode 100644
index 0000000000..67c47373b5
--- /dev/null
+++ b/news/5322-undefine-build-vars
@@ -0,0 +1,20 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+* Ensures that variables mentioned in `script_env` are undefined in multi-output build environment
+ if undefined in the environment `conda-build` is invoked from.
+
+### Deprecations
+
+*
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5441-24.9-removals b/news/5441-24.9-removals
new file mode 100644
index 0000000000..1613579d16
--- /dev/null
+++ b/news/5441-24.9-removals
@@ -0,0 +1,20 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+* Remove `conda_build.build.check_external`. `patchelf` is an explicit conda-build dependency on Linux so it will always be installed. (#5441)
+* Remove `conda_build.metadata._get_env_path`. Use `conda.base.context.locate_prefix_by_name` instead. (#5441)
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5447-jinja2-for-set-vars b/news/5447-jinja2-for-set-vars
new file mode 100644
index 0000000000..fbca651f89
--- /dev/null
+++ b/news/5447-jinja2-for-set-vars
@@ -0,0 +1,20 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+* Variables used in single-line jinja2 `for` and `set` statements are now properly included in the variant
+ matrix for some edge cases. (#5447)
+
+### Deprecations
+
+*
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/pyproject.toml b/pyproject.toml
index a8b907644a..12fe4731f6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,7 +27,7 @@ dependencies = [
"chardet",
"conda >=23.7.0",
"conda-index >=0.4.0",
- "conda-package-handling >=1.3",
+ "conda-package-handling >=2.2.0",
"filelock",
"frozendict >=2.4.2",
"jinja2",
@@ -126,6 +126,10 @@ filterwarnings = [
"error::DeprecationWarning:conda_build",
# ignore numpy.distutils error
'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data',
+ # ignore conda-index error
+ "ignore::PendingDeprecationWarning:conda_index",
+ "ignore::DeprecationWarning:conda_index",
+ "ignore:Python 3.14 will, by default, filter extracted tar archives and reject files or modify their metadata:DeprecationWarning",
]
markers = [
"serial: execute test serially (to avoid race conditions)",
diff --git a/recipe/meta.yaml b/recipe/meta.yaml
index 33f8fe9125..b96d96a96e 100644
--- a/recipe/meta.yaml
+++ b/recipe/meta.yaml
@@ -32,7 +32,7 @@ requirements:
- chardet
- conda >=23.7.0
- conda-index >=0.4.0
- - conda-package-handling >=1.3
+ - conda-package-handling >=2.2.0
- filelock
- frozendict >=2.4.2
- jinja2
diff --git a/rever.xsh b/rever.xsh
index 577ecfa980..c25154fed0 100644
--- a/rever.xsh
+++ b/rever.xsh
@@ -1,3 +1,5 @@
+# edit this in https://github.com/conda/infrastructure
+
$ACTIVITIES = ["authors", "changelog"]
# Basic settings
diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py
index 9f4ce1cbb0..ed56cabceb 100644
--- a/tests/cli/test_main_build.py
+++ b/tests/cli/test_main_build.py
@@ -16,7 +16,7 @@
Config,
zstd_compression_level_default,
)
-from conda_build.exceptions import DependencyNeedsBuildingError
+from conda_build.exceptions import CondaBuildUserError, DependencyNeedsBuildingError
from conda_build.os_utils.external import find_executable
from conda_build.utils import get_build_folders, on_win, package_has_file
@@ -165,7 +165,7 @@ def test_build_long_test_prefix_default_enabled(mocker, testing_workdir):
main_build.execute(args)
args.append("--no-long-test-prefix")
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
main_build.execute(args)
@@ -483,7 +483,7 @@ def test_test_extra_dep(testing_metadata):
main_build.execute(args)
# missing click dep will fail tests
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
args = [output, "-t"]
# extra_deps will add it in
main_build.execute(args)
diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py
index b8931b5220..83859bf441 100644
--- a/tests/cli/test_main_inspect.py
+++ b/tests/cli/test_main_inspect.py
@@ -9,6 +9,7 @@
from conda_build import api
from conda_build.cli import main_inspect
+from conda_build.exceptions import CondaBuildUserError
from conda_build.utils import on_win
from ..utils import metadata_dir
@@ -23,7 +24,7 @@ def test_inspect_linkages(testing_workdir, capfd):
# get a package that has known object output
args = ["linkages", "python"]
if on_win:
- with pytest.raises(SystemExit) as exc:
+ with pytest.raises(CondaBuildUserError) as exc:
main_inspect.execute(args)
assert "conda inspect linkages is only implemented in Linux and OS X" in exc
else:
@@ -36,7 +37,7 @@ def test_inspect_objects(testing_workdir, capfd):
# get a package that has known object output
args = ["objects", "python"]
if sys.platform != "darwin":
- with pytest.raises(SystemExit) as exc:
+ with pytest.raises(CondaBuildUserError) as exc:
main_inspect.execute(args)
assert "conda inspect objects is only implemented in OS X" in exc
else:
diff --git a/tests/conftest.py b/tests/conftest.py
index 465cab6fcc..cd66dddb97 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -105,7 +105,15 @@ def boolify(v):
exit_on_verify_error=exit_on_verify_error_default,
conda_pkg_format=conda_pkg_format_default,
)
- result = Config(variant=None, **testing_config_kwargs)
+
+ if on_mac and "CONDA_BUILD_SYSROOT" in os.environ:
+ var_dict = {
+ "CONDA_BUILD_SYSROOT": [os.environ["CONDA_BUILD_SYSROOT"]],
+ }
+ else:
+ var_dict = None
+
+ result = Config(variant=var_dict, **testing_config_kwargs)
result._testing_config_kwargs = testing_config_kwargs
assert result.no_rewrite_stdout_env is False
assert result._src_cache_root is None
@@ -204,24 +212,35 @@ def variants_conda_build_sysroot(monkeypatch, request):
if not on_mac:
return {}
- monkeypatch.setenv(
- "CONDA_BUILD_SYSROOT",
- subprocess.run(
- ["xcrun", "--sdk", "macosx", "--show-sdk-path"],
- check=True,
- capture_output=True,
- text=True,
- ).stdout.strip(),
- )
- monkeypatch.setenv(
- "MACOSX_DEPLOYMENT_TARGET",
- subprocess.run(
+ # if we do not speciy a custom sysroot, we get what the
+ # current SDK has
+ if "CONDA_BUILD_SYSROOT" not in os.environ:
+ monkeypatch.setenv(
+ "CONDA_BUILD_SYSROOT",
+ subprocess.run(
+ ["xcrun", "--sdk", "macosx", "--show-sdk-path"],
+ check=True,
+ capture_output=True,
+ text=True,
+ ).stdout.strip(),
+ )
+
+ mdt = subprocess.run(
["xcrun", "--sdk", "macosx", "--show-sdk-version"],
check=True,
capture_output=True,
text=True,
- ).stdout.strip(),
- )
+ ).stdout.strip()
+ else:
+ # custom sysroots always have names like MacOSX.sdk
+ mdt = (
+ os.path.basename(os.environ["CONDA_BUILD_SYSROOT"])
+ .replace("MacOSX", "")
+ .replace(".sdk", "")
+ )
+
+ monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", mdt)
+
return request.param
diff --git a/tests/requirements.txt b/tests/requirements.txt
index acb3317206..d6e46d9cea 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -3,7 +3,7 @@ chardet
conda >=23.7.0
conda-index >=0.4.0
conda-libmamba-solver # ensure we use libmamba
-conda-package-handling >=1.3
+conda-package-handling >=2.2.0
filelock
frozendict >=2.4.2
jinja2
diff --git a/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml
new file mode 100644
index 0000000000..406ba464c0
--- /dev/null
+++ b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml
@@ -0,0 +1,10 @@
+package:
+ name: pkg
+ version: '1.0'
+source:
+ path: .
+outputs:
+ - name: pkg-output
+ build:
+ script:
+ - exit 1
diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat
new file mode 100644
index 0000000000..6dedc57766
--- /dev/null
+++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat
@@ -0,0 +1 @@
+exit 1
\ No newline at end of file
diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh
new file mode 100644
index 0000000000..6dedc57766
--- /dev/null
+++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh
@@ -0,0 +1 @@
+exit 1
\ No newline at end of file
diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml
new file mode 100644
index 0000000000..43c2f9d054
--- /dev/null
+++ b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml
@@ -0,0 +1,9 @@
+package:
+ name: pkg
+ version: '1.0'
+source:
+ path: .
+outputs:
+ - name: pkg-output
+ script: exit_1.sh # [unix]
+ script: exit_1.bat # [win]
diff --git a/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml
new file mode 100644
index 0000000000..df710d103b
--- /dev/null
+++ b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml
@@ -0,0 +1,7 @@
+package:
+ name: pkg
+ version: '1.0'
+source:
+ path: .
+build:
+ script: exit 1
diff --git a/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml b/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml
new file mode 100644
index 0000000000..ad7ed0e836
--- /dev/null
+++ b/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml
@@ -0,0 +1,2 @@
+target_platform:
+ - win-64
\ No newline at end of file
diff --git a/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml b/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml
new file mode 100644
index 0000000000..c180f95383
--- /dev/null
+++ b/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml
@@ -0,0 +1,18 @@
+package:
+ name: foo
+ version: 0.0.1
+
+build:
+ number: 0
+ script:
+ - echo 'hello'
+ - ls $PREFIX
+ # this is the unix layout.
+ - test ! -d $PREFIX/x86_64-w64-mingw32
+ - test -d $PREFIX/Library
+
+requirements:
+ build:
+ host:
+ - m2w64-sysroot_win-64
+ run:
diff --git a/tests/test-recipes/metadata/_run_constrained_error/meta.yaml b/tests/test-recipes/metadata/_run_constrained_error/meta.yaml
new file mode 100644
index 0000000000..7d8dd7f759
--- /dev/null
+++ b/tests/test-recipes/metadata/_run_constrained_error/meta.yaml
@@ -0,0 +1,10 @@
+package:
+ name: test_run_constrained_error
+ version: 1.0
+
+requirements:
+ run_constrained:
+ # obtained from https://github.com/conda-forge/willow-feedstock/blob/67d9ac1c5232295ccaac41b131e3982a335b365b/recipe/meta.yaml#L29
+ - pillow-heif >=0.10.0,<1.0.0=0.13.0,<1.0.0>=py312
+ - {{ 'another-package' }} {{ '>=0.20.0,<2.0.0=0.23.0,<2.0.0>=py310' }}
+
diff --git a/tests/test-recipes/metadata/_sysroot_detection/build.sh b/tests/test-recipes/metadata/_sysroot_detection/build.sh
new file mode 100644
index 0000000000..56c6c57c6e
--- /dev/null
+++ b/tests/test-recipes/metadata/_sysroot_detection/build.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+mkdir -p ${PREFIX}/bin
+
+# Delete the x86_64 libc.so.6 to make sure we find the powerpc libc.so.6
+rm -f ${BUILD_PREFIX}/x86_64-conda-linux-gnu/sysroot/lib64/libc.so.6
+
+${CC} ${CFLAGS} main.c -o ${PREFIX}/bin/sysroot-detection
diff --git a/tests/test-recipes/metadata/_sysroot_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_sysroot_detection/conda_build_config.yaml
new file mode 100644
index 0000000000..549ed687b9
--- /dev/null
+++ b/tests/test-recipes/metadata/_sysroot_detection/conda_build_config.yaml
@@ -0,0 +1,2 @@
+target_platform:
+ - linux-ppc64le
diff --git a/tests/test-recipes/metadata/_sysroot_detection/main.c b/tests/test-recipes/metadata/_sysroot_detection/main.c
new file mode 100644
index 0000000000..1394ce82a6
--- /dev/null
+++ b/tests/test-recipes/metadata/_sysroot_detection/main.c
@@ -0,0 +1,5 @@
+#include
+
+int main() {
+ return 0;
+}
diff --git a/tests/test-recipes/metadata/_sysroot_detection/meta.yaml b/tests/test-recipes/metadata/_sysroot_detection/meta.yaml
new file mode 100644
index 0000000000..bffaf71b88
--- /dev/null
+++ b/tests/test-recipes/metadata/_sysroot_detection/meta.yaml
@@ -0,0 +1,15 @@
+{% set version = "1" %}
+
+package:
+ name: sysroot_detection
+ version: {{ version }}
+
+source:
+ path: main.c
+
+build:
+ number: 0
+
+requirements:
+ build:
+ - {{ compiler('c') }}
diff --git a/tests/test-recipes/metadata/gh-5342/meta.yaml b/tests/test-recipes/metadata/gh-5342/meta.yaml
new file mode 100644
index 0000000000..f083f1c95e
--- /dev/null
+++ b/tests/test-recipes/metadata/gh-5342/meta.yaml
@@ -0,0 +1,15 @@
+{% set name = "gh-5342" %}
+
+package:
+ name: {{ name }}
+ version: 1.0
+
+outputs:
+ - name: {{ name }}
+ build:
+ skip: true
+
+ - name: {{ name }}-dev
+ build:
+ files:
+ - file
diff --git a/tests/test-recipes/split-packages/_build_script_relying_on_missing_var/meta.yaml b/tests/test-recipes/split-packages/_build_script_relying_on_missing_var/meta.yaml
new file mode 100644
index 0000000000..55ce0b9d0d
--- /dev/null
+++ b/tests/test-recipes/split-packages/_build_script_relying_on_missing_var/meta.yaml
@@ -0,0 +1,14 @@
+package:
+ name: test_build_script_relying_on_missing_var
+ version: 1.0
+
+outputs:
+ - name: test_1
+ build:
+ script_env:
+ - TEST_FN_DOESNT_EXIST
+ script:
+ - python -c "import os; print('val...' + os.environ['TEST_FN_DOESNT_EXIST'])"
+ requirements:
+ host:
+ - python
diff --git a/tests/test-recipes/split-packages/_test-file-hash/build.sh b/tests/test-recipes/split-packages/_test-file-hash/build.sh
new file mode 100644
index 0000000000..5cf09c5b51
--- /dev/null
+++ b/tests/test-recipes/split-packages/_test-file-hash/build.sh
@@ -0,0 +1,8 @@
+echo "int main() {}" > main.c
+mkdir -p $PREFIX/bin
+$CC main.c -o $PREFIX/bin/_file_hash
+
+echo "int foo() {return 2;}" > foo.c
+echo "int foo(); int bar() {return foo()*2;}" > bar.c
+$CC -shared foo.c -o libupstream.so
+$CC -shared bar.c -o libdownstream.so -L$PWD -lupstream '-Wl,-rpath,$ORIGIN'
diff --git a/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml b/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml
new file mode 100644
index 0000000000..5b99fedac2
--- /dev/null
+++ b/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml
@@ -0,0 +1,3 @@
+python:
+- 3.10
+- 3.11
diff --git a/tests/test-recipes/split-packages/_test-file-hash/install-py.sh b/tests/test-recipes/split-packages/_test-file-hash/install-py.sh
new file mode 100644
index 0000000000..7a37b6050b
--- /dev/null
+++ b/tests/test-recipes/split-packages/_test-file-hash/install-py.sh
@@ -0,0 +1,4 @@
+mkdir -p $SP_DIR/_py_file_hash
+cp libdownstream.so $SP_DIR/_py_file_hash/
+cp libupstream.so $SP_DIR/_py_file_hash/
+
diff --git a/tests/test-recipes/split-packages/_test-file-hash/meta.yaml b/tests/test-recipes/split-packages/_test-file-hash/meta.yaml
new file mode 100644
index 0000000000..db0c9a89d1
--- /dev/null
+++ b/tests/test-recipes/split-packages/_test-file-hash/meta.yaml
@@ -0,0 +1,30 @@
+package:
+ name: _file_hash-split
+ version: 0.0.1
+
+build:
+ number: 0
+ skip: True # [not linux64]
+ error_overlinking: true
+
+requirements:
+ build:
+ - {{ compiler('c') }}
+ host:
+ run:
+
+outputs:
+ - name: py-file-hash
+ script: install-py.sh
+ requirements:
+ build:
+ - {{ compiler('c') }}
+ host:
+ - python
+ run:
+ - python
+
+ - name: _file_hash
+ requirements:
+ build:
+ - {{ compiler('c') }}
diff --git a/tests/test-recipes/split-packages/copying_files/bld.bat b/tests/test-recipes/split-packages/copying_files/bld.bat
index e1084a2a6f..b516b7d6c3 100644
--- a/tests/test-recipes/split-packages/copying_files/bld.bat
+++ b/tests/test-recipes/split-packages/copying_files/bld.bat
@@ -2,4 +2,14 @@ echo "weee" > %PREFIX%\subpackage_file1
mkdir %PREFIX%\somedir
echo "weee" > %PREFIX%\somedir\subpackage_file1
echo "weee" > %PREFIX%\subpackage_file1.ext
-echo "weee" > %PREFIX%\subpackage_file2.ext
\ No newline at end of file
+echo "weee" > %PREFIX%\subpackage_file2.ext
+echo "weee" > %PREFIX%\subpackage_file3.ext
+
+echo "weee" > %PREFIX%\subpackage_include_exclude1
+mkdir %PREFIX%\anotherdir
+echo "weee" > %PREFIX%\anotherdir\subpackage_include_exclude1
+echo "weee" > %PREFIX%\subpackage_include_exclude1.wav
+echo "weee" > %PREFIX%\subpackage_include_exclude2.wav
+echo "weee" > %PREFIX%\subpackage_include_exclude3.wav
+mkdir %PREFIX%\Library\bin
+echo "weee" > %PREFIX%\Library\bin\dav1d.fake
diff --git a/tests/test-recipes/split-packages/copying_files/build.sh b/tests/test-recipes/split-packages/copying_files/build.sh
index 529dc13092..6dbde0d63f 100644
--- a/tests/test-recipes/split-packages/copying_files/build.sh
+++ b/tests/test-recipes/split-packages/copying_files/build.sh
@@ -6,3 +6,18 @@ echo "weee" > $PREFIX/somedir/subpackage_file1
# test glob patterns
echo "weee" > $PREFIX/subpackage_file1.ext
echo "weee" > $PREFIX/subpackage_file2.ext
+echo "weee" > $PREFIX/subpackage_file3.ext
+
+# The files used to test the two subpackages must be disjoint because they are
+# coinstalled
+# test copying filename
+echo "weee" > $PREFIX/subpackage_include_exclude1
+# test copying by folder name
+mkdir $PREFIX/anotherdir
+echo "weee" > $PREFIX/anotherdir/subpackage_include_exclude1
+# test glob patterns
+echo "weee" > $PREFIX/subpackage_include_exclude1.wav
+echo "weee" > $PREFIX/subpackage_include_exclude2.wav
+echo "weee" > $PREFIX/subpackage_include_exclude3.wav
+mkdir $PREFIX/lib
+echo "weee" > $PREFIX/lib/libdav1d.fake
diff --git a/tests/test-recipes/split-packages/copying_files/meta.yaml b/tests/test-recipes/split-packages/copying_files/meta.yaml
index 9ab2e45957..4226e8a161 100644
--- a/tests/test-recipes/split-packages/copying_files/meta.yaml
+++ b/tests/test-recipes/split-packages/copying_files/meta.yaml
@@ -4,14 +4,44 @@ package:
requirements:
run:
- - my_script_subpackage
+ - my_script_subpackage_files
+ - my_script_subpackage_include_exclude
outputs:
- - name: my_script_subpackage
+ - name: my_script_subpackage_files
+ build:
+ ignore_run_exports_from:
+ - libpng
+ requirements:
+ host:
+ - libpng=1.6.39
files:
- subpackage_file1
- somedir
- "*.ext"
+ # Libs should match because they are in the prefix
+ - "lib/libpng*" # [unix]
+ - "Library/bin/libpng*" # [win]
+ test:
+ script: subpackage_test.py
+ script_interpreter: python
+ - name: my_script_subpackage_include_exclude
+ build:
+ ignore_run_exports_from:
+ - dav1d
+ requirements:
+ host:
+ - dav1d=1.2.1
+ files:
+ include:
+ - subpackage_include_exclude1
+ - anotherdir
+ - "*.wav"
+ # Libs should not match because they come from a different package
+ - "lib/libdav1d*" # [unix]
+ - "Library/bin/dav1d*" # [win]
+ exclude:
+ - "*3.wav"
test:
script: subpackage_test.py
script_interpreter: python
diff --git a/tests/test-recipes/split-packages/copying_files/subpackage_test.py b/tests/test-recipes/split-packages/copying_files/subpackage_test.py
index 91157c0642..9148e40947 100644
--- a/tests/test-recipes/split-packages/copying_files/subpackage_test.py
+++ b/tests/test-recipes/split-packages/copying_files/subpackage_test.py
@@ -1,33 +1,76 @@
import os
+import sys
-print(os.getenv('PREFIX'))
-filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1')
-assert os.path.isfile(filename)
+if os.getenv("PKG_NAME") == "my_script_subpackage_files":
+ file_basename = "subpackage_file"
+ dirname = "somedir"
+ extension = "ext"
+
+ if "darwin" in sys.platform:
+ external_host_file = "lib/libpng16.dylib"
+ elif "win32" in sys.platform:
+ external_host_file = "Library/bin/libpng16.dll"
+ else:
+ external_host_file = "lib/libpng16.so"
+
+ filename = os.path.join(os.environ["PREFIX"], f"{file_basename}3.{extension}")
+ print(filename)
+ assert os.path.isfile(filename), filename + " is missing"
+ print("glob files OK")
+
+ filename = os.path.join(os.environ["PREFIX"], external_host_file)
+ print(filename)
+ assert os.path.isfile(filename), filename + " is missing"
+ print("glob files prefix OK")
+
+if os.getenv("PKG_NAME") == "my_script_subpackage_include_exclude":
+ file_basename = "subpackage_include_exclude"
+ dirname = "anotherdir"
+ extension = "wav"
+
+ if "darwin" in sys.platform:
+ external_host_file = "lib/libdav1d.6.dylib"
+ elif "win32" in sys.platform:
+ external_host_file = "Library/bin/dav1d.dll"
+ else:
+ external_host_file = "lib/libdav1d.so.6"
+
+ filename = os.path.join(os.environ["PREFIX"], f"{file_basename}3.{extension}")
+ assert not os.path.isfile(filename), filename + " is missing"
+ print("glob exclude OK")
+
+ filename = os.path.join(os.environ["PREFIX"], external_host_file)
+ assert not os.path.isfile(filename), filename + " is missing"
+ print("glob exclude prefix OK")
+
+print(os.getenv("PREFIX"))
+filename = os.path.join(os.environ["PREFIX"], f"{file_basename}1")
+assert os.path.isfile(filename), filename + " is missing"
contents = open(filename).read().rstrip()
-if hasattr(contents, 'decode'):
+if hasattr(contents, "decode"):
contents = contents.decode()
-assert "weee" in contents, 'incorrect file contents: %s' % contents
+assert "weee" in contents, "incorrect file contents: %s" % contents
print("plain file OK")
-filename = os.path.join(os.environ['PREFIX'], 'somedir', 'subpackage_file1')
+filename = os.path.join(os.environ["PREFIX"], dirname, f"{file_basename}1")
assert os.path.isfile(filename), filename + " is missing"
contents = open(filename).read().rstrip()
-if hasattr(contents, 'decode'):
+if hasattr(contents, "decode"):
contents = contents.decode()
-assert "weee" in contents, 'incorrect file contents: %s' % contents
+assert "weee" in contents, "incorrect file contents: %s" % contents
print("subfolder file OK")
-filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1.ext')
-assert os.path.isfile(filename)
+filename = os.path.join(os.environ["PREFIX"], f"{file_basename}1.{extension}")
+assert os.path.isfile(filename), filename + " is missing"
contents = open(filename).read().rstrip()
-if hasattr(contents, 'decode'):
+if hasattr(contents, "decode"):
contents = contents.decode()
-assert "weee" in contents, 'incorrect file contents: %s' % contents
+assert "weee" in contents, "incorrect file contents: %s" % contents
-filename = os.path.join(os.environ['PREFIX'], 'subpackage_file2.ext')
-assert os.path.isfile(filename)
+filename = os.path.join(os.environ["PREFIX"], f"{file_basename}2.{extension}")
+assert os.path.isfile(filename), filename + " is missing"
contents = open(filename).read().rstrip()
-if hasattr(contents, 'decode'):
+if hasattr(contents, "decode"):
contents = contents.decode()
-assert "weee" in contents, 'incorrect file contents: %s' % contents
+assert "weee" in contents, "incorrect file contents: %s" % contents
print("glob OK")
diff --git a/tests/test-recipes/split-packages/script_install_files/subpackage1.py b/tests/test-recipes/split-packages/script_install_files/subpackage1.py
index 22cf26111b..2f64db8201 100644
--- a/tests/test-recipes/split-packages/script_install_files/subpackage1.py
+++ b/tests/test-recipes/split-packages/script_install_files/subpackage1.py
@@ -2,6 +2,8 @@
out_path = os.path.join(os.environ['PREFIX'], 'subpackage_file_1')
+assert "PIP_NO_INDEX" in os.environ
+
with open(out_path, 'w') as f:
f.write("weeee")
diff --git a/tests/test-recipes/variants/jinja2_used_variables/conda_build_config.yaml b/tests/test-recipes/variants/jinja2_used_variables/conda_build_config.yaml
new file mode 100644
index 0000000000..c5920d67c4
--- /dev/null
+++ b/tests/test-recipes/variants/jinja2_used_variables/conda_build_config.yaml
@@ -0,0 +1,27 @@
+CLANG_VERSION:
+ - 16.0.6
+ - 17.0.6
+ - 18.1.8
+ - 19.1.0.rc1
+
+VCVER:
+ - 14.3
+ - 14.2
+CL_VERSION:
+ - 19.40.33808
+ - 19.29.30139
+
+BLAH:
+ - a
+ - b
+
+FOO:
+ - cdf
+
+FOOBAR:
+ - hgf
+
+zip_keys:
+ -
+ - VCVER
+ - CL_VERSION
diff --git a/tests/test-recipes/variants/jinja2_used_variables/meta.yaml b/tests/test-recipes/variants/jinja2_used_variables/meta.yaml
new file mode 100644
index 0000000000..88c591b8b7
--- /dev/null
+++ b/tests/test-recipes/variants/jinja2_used_variables/meta.yaml
@@ -0,0 +1,42 @@
+{% if CLANG_VERSION is not defined %}
+{% set CLANG_VERSION = "16.0.6" %}
+{% set CL_VERSION = "19.29" %}
+{% set VCVER = "" %}
+{% set FOO = "" %}
+{% set FOOBAR = "" %}
+{% endif %}
+{% set clang_major = CLANG_VERSION.split(".")[0] %}
+{% set cl_minor = CL_VERSION.split(".")[1] %}
+{% set vc_major = VCVER.split(".")[0] %}
+
+package:
+ name: clang-win-activation
+ version: {{ CLANG_VERSION }}
+
+build:
+ number: 0
+ {% if clang_major|int == 16 and cl_minor|int >= 40 %}
+ skip: true
+ {% endif %}
+
+outputs:
+ - name: clang_win-64
+ build:
+ run_exports:
+ strong:
+ - vc >={{ VCVER }}
+ requirements:
+ run:
+ - clang {{ CLANG_VERSION }}.*
+
+ test:
+ commands:
+ {% for var in FOO.split() %}
+ - echo {{ var }}
+ {% endfor %}
+
+test:
+ commands:
+ {% for var in FOOBAR.split() %}
+ - echo {{ var }}
+ {% endfor %}
diff --git a/tests/test_api_build.py b/tests/test_api_build.py
index a663f18e73..efba89d75d 100644
--- a/tests/test_api_build.py
+++ b/tests/test_api_build.py
@@ -36,10 +36,13 @@
from conda_build import __version__, api, exceptions
from conda_build.config import Config
from conda_build.exceptions import (
+ BuildScriptException,
CondaBuildException,
+ CondaBuildUserError,
DependencyNeedsBuildingError,
OverDependingError,
OverLinkingError,
+ RecipeError,
)
from conda_build.os_utils.external import find_executable
from conda_build.render import finalize_metadata
@@ -277,7 +280,7 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config):
)[0]
assert not package_has_file(output_file, "info/recipe/meta.yaml")
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
# we are testing that even with the recipe excluded, we still get the tests in place
output_file = api.build(
os.path.join(metadata_dir, "_no_include_recipe"), config=testing_config
@@ -383,7 +386,7 @@ def test_dirty_variable_available_in_build_scripts(testing_config):
testing_config.dirty = True
api.build(recipe, config=testing_config)
- with pytest.raises(subprocess.CalledProcessError):
+ with pytest.raises(BuildScriptException):
testing_config.dirty = False
api.build(recipe, config=testing_config)
@@ -501,7 +504,7 @@ def test_recursive_fail(testing_config):
@pytest.mark.sanity
def test_jinja_typo(testing_config):
- with pytest.raises(SystemExit, match="GIT_DSECRIBE_TAG"):
+ with pytest.raises(CondaBuildUserError, match="GIT_DSECRIBE_TAG"):
api.build(
os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config
)
@@ -543,7 +546,7 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
def test_failed_tests_exit_build(testing_config):
"""https://github.com/conda/conda-build/issues/1112"""
- with pytest.raises(SystemExit, match="TESTS FAILED"):
+ with pytest.raises(CondaBuildUserError, match="TESTS FAILED"):
api.build(
os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config
)
@@ -816,13 +819,13 @@ def test_disable_pip(testing_metadata):
testing_metadata.meta["build"]["script"] = (
'python -c "import pip; print(pip.__version__)"'
)
- with pytest.raises(subprocess.CalledProcessError):
+ with pytest.raises(BuildScriptException):
api.build(testing_metadata)
testing_metadata.meta["build"]["script"] = (
'python -c "import setuptools; print(setuptools.__version__)"'
)
- with pytest.raises(subprocess.CalledProcessError):
+ with pytest.raises(BuildScriptException):
api.build(testing_metadata)
@@ -1463,6 +1466,12 @@ def test_run_constrained_stores_constrains_info(testing_config):
assert info_contents["constrains"][0] == "bzip2 1.*"
+def test_run_constrained_is_validated(testing_config: Config):
+ recipe = os.path.join(metadata_dir, "_run_constrained_error")
+ with pytest.raises(RecipeError):
+ api.build(recipe, config=testing_config)
+
+
@pytest.mark.sanity
def test_no_locking(testing_config):
recipe = os.path.join(metadata_dir, "source_git_jinja2")
@@ -1539,7 +1548,7 @@ def test_setup_py_data_in_env(testing_config):
# should pass with any modern python (just not 3.5)
api.build(recipe, config=testing_config)
# make sure it fails with our special python logic
- with pytest.raises(subprocess.CalledProcessError):
+ with pytest.raises((BuildScriptException, CondaBuildException)):
api.build(recipe, config=testing_config, python="3.5")
@@ -1767,6 +1776,18 @@ def test_overdepending_detection(testing_config, variants_conda_build_sysroot):
api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot)
+@pytest.mark.skipif(not on_linux, reason="cannot compile for linux-ppc64le")
+def test_sysroots_detection(testing_config, variants_conda_build_sysroot):
+ recipe = os.path.join(metadata_dir, "_sysroot_detection")
+ testing_config.activate = True
+ testing_config.error_overlinking = True
+ testing_config.error_overdepending = True
+ testing_config.channel_urls = [
+ "conda-forge",
+ ]
+ api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot)
+
+
@pytest.mark.skipif(sys.platform != "darwin", reason="macOS-only test (at present)")
def test_macos_tbd_handling(testing_config, variants_conda_build_sysroot):
"""
@@ -1800,7 +1821,7 @@ def test_downstream_tests(testing_config):
upstream = os.path.join(metadata_dir, "_test_downstreams/upstream")
downstream = os.path.join(metadata_dir, "_test_downstreams/downstream")
api.build(downstream, config=testing_config, notest=True)
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
api.build(upstream, config=testing_config)
@@ -1942,10 +1963,15 @@ def test_activated_prefixes_in_actual_path(testing_metadata):
@pytest.mark.parametrize("add_pip_as_python_dependency", [False, True])
def test_add_pip_as_python_dependency_from_condarc_file(
- testing_metadata, testing_workdir, add_pip_as_python_dependency, monkeypatch
-):
+ testing_metadata: MetaData,
+ testing_workdir: str | os.PathLike,
+ add_pip_as_python_dependency: bool,
+ monkeypatch: MonkeyPatch,
+ mocker: MockerFixture,
+ tmp_path: Path,
+) -> None:
"""
- Test whether settings from .condarc files are heeded.
+ Test whether settings from .condarc files are needed.
ref: https://github.com/conda/conda-libmamba-solver/issues/393
"""
# TODO: SubdirData._cache_ clearing might not be needed for future conda versions.
@@ -1955,16 +1981,74 @@ def test_add_pip_as_python_dependency_from_condarc_file(
# SubdirData's cache doesn't distinguish on add_pip_as_python_dependency.
SubdirData._cache_.clear()
+ # clear cache
+ mocker.patch("conda.base.context.Context.pkgs_dirs", pkgs_dirs := (str(tmp_path),))
+ assert context.pkgs_dirs == pkgs_dirs
+
testing_metadata.meta["build"]["script"] = ['python -c "import pip"']
testing_metadata.meta["requirements"]["host"] = ["python"]
del testing_metadata.meta["test"]
if add_pip_as_python_dependency:
check_build_fails = nullcontext()
else:
- check_build_fails = pytest.raises(subprocess.CalledProcessError)
+ check_build_fails = pytest.raises(BuildScriptException)
conda_rc = Path(testing_workdir, ".condarc")
conda_rc.write_text(f"add_pip_as_python_dependency: {add_pip_as_python_dependency}")
with env_var("CONDARC", conda_rc, reset_context):
with check_build_fails:
api.build(testing_metadata)
+
+
+def test_rendered_is_reported(testing_config, capsys):
+ recipe_dir = os.path.join(metadata_dir, "outputs_overwrite_base_file")
+ api.build(recipe_dir, config=testing_config)
+
+ captured = capsys.readouterr()
+ assert "Rendered as:" in captured.out
+ assert "name: base-outputs_overwrite_base_file" in captured.out
+ assert "- name: base-outputs_overwrite_base_file" in captured.out
+ assert "- base-outputs_overwrite_base_file >=1.0,<2.0a0" in captured.out
+
+
+@pytest.mark.skipif(on_win, reason="Tests cross-compilation targeting Windows")
+def test_cross_unix_windows_mingw(testing_config):
+ recipe = os.path.join(metadata_dir, "_cross_unix_windows_mingw")
+ testing_config.channel_urls = [
+ "conda-forge",
+ ]
+ api.build(recipe, config=testing_config)
+
+
+@pytest.mark.parametrize(
+ "recipe", sorted(Path(metadata_dir, "_build_script_errors").glob("*"))
+)
+@pytest.mark.parametrize("debug", (False, True))
+def test_conda_build_script_errors_without_conda_info_handlers(tmp_path, recipe, debug):
+ env = os.environ.copy()
+ if debug:
+ env["CONDA_VERBOSITY"] = "3"
+ process = subprocess.run(
+ ["conda", "build", recipe],
+ env=env,
+ capture_output=True,
+ text=True,
+ check=False,
+ cwd=tmp_path,
+ )
+ assert process.returncode > 0
+ all_output = process.stdout + "\n" + process.stderr
+
+ # These should NOT appear in the output
+ assert ">>> ERROR REPORT <<<" not in all_output
+ assert "An unexpected error has occurred." not in all_output
+ assert "Conda has prepared the above report." not in all_output
+
+ # These should appear
+ assert "returned non-zero exit status 1" in all_output
+
+ # With verbose mode, we should actually see the traceback
+ if debug:
+ assert "Traceback" in all_output
+ assert "CalledProcessError" in all_output
+ assert "returned non-zero exit status 1" in all_output
diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py
index c0e46b7bf3..9b0526ed54 100644
--- a/tests/test_api_convert.py
+++ b/tests/test_api_convert.py
@@ -10,6 +10,7 @@
from conda.gateways.connection.download import download
from conda_build import api
+from conda_build.exceptions import CondaBuildUserError
from conda_build.utils import on_win, package_has_file
from .utils import assert_package_consistency, metadata_dir
@@ -60,8 +61,7 @@ def test_show_imports(base_platform, package, capfd):
download(f, fn)
for platform in platforms:
- with pytest.raises(SystemExit):
- api.convert(fn, platforms=platform, show_imports=True)
+ api.convert(fn, platforms=platform, show_imports=True)
output, error = capfd.readouterr()
@@ -80,8 +80,7 @@ def test_no_imports_found(base_platform, package, capfd):
fn = f"{package_name}-py36_0.tar.bz2"
download(f, fn)
- with pytest.raises(SystemExit):
- api.convert(fn, platforms=None, show_imports=True)
+ api.convert(fn, platforms=None, show_imports=True)
output, error = capfd.readouterr()
assert "No imports found." in output
@@ -96,13 +95,12 @@ def test_no_platform(base_platform, package):
fn = f"{package_name}-py36_0.tar.bz2"
download(f, fn)
- with pytest.raises(SystemExit) as e:
+ with pytest.raises(
+ CondaBuildUserError,
+ match="Error: --platform option required for conda package conversion.",
+ ):
api.convert(fn, platforms=None)
- assert "Error: --platform option required for conda package conversion." in str(
- e.value
- )
-
@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"])
@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")])
@@ -121,14 +119,13 @@ def test_c_extension_error(base_platform, package):
download(f, fn)
for platform in platforms:
- with pytest.raises(SystemExit) as e:
+ with pytest.raises(
+ CondaBuildUserError,
+ match=f"WARNING: Package {fn} contains C extensions; skipping conversion. "
+ "Use -f to force conversion.",
+ ):
api.convert(fn, platforms=platform)
- assert (
- f"WARNING: Package {fn} contains C extensions; skipping conversion. "
- "Use -f to force conversion."
- ) in str(e.value)
-
@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"])
@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")])
diff --git a/tests/test_api_test.py b/tests/test_api_test.py
index 2bb76838aa..10200d5a99 100644
--- a/tests/test_api_test.py
+++ b/tests/test_api_test.py
@@ -9,6 +9,7 @@
import pytest
from conda_build import api
+from conda_build.exceptions import CondaBuildUserError
from .utils import metadata_dir
@@ -63,5 +64,5 @@ def test_api_extra_dep(testing_metadata):
api.test(output, config=testing_metadata.config, extra_deps=["click"])
# missing click dep will fail tests
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
api.test(output, config=testing_metadata.config)
diff --git a/tests/test_build.py b/tests/test_build.py
index 839cce4b9e..40ed500020 100644
--- a/tests/test_build.py
+++ b/tests/test_build.py
@@ -12,12 +12,24 @@
import sys
from contextlib import nullcontext
from pathlib import Path
+from typing import TYPE_CHECKING
import pytest
+from conda.common.compat import on_win
from conda_build import api, build
+from conda_build.exceptions import CondaBuildUserError
-from .utils import get_noarch_python_meta, metadata_dir
+from .utils import get_noarch_python_meta, metadata_dir, metadata_path
+
+if TYPE_CHECKING:
+ from conda_build.config import Config
+
+if TYPE_CHECKING:
+ from pytest_mock import MockerFixture
+
+ from conda_build.config import Config
+ from conda_build.metadata import MetaData
def test_build_preserves_PATH(testing_config):
@@ -324,3 +336,64 @@ def test_guess_interpreter(
):
with pytest.raises(error) if error else nullcontext():
assert build.guess_interpreter(script) == interpreter
+
+
+@pytest.mark.parametrize("readme", ["README.md", "README.rst", "README"])
+def test_copy_readme(testing_metadata: MetaData, readme: str):
+ testing_metadata.meta["about"]["readme"] = readme
+ with pytest.raises(CondaBuildUserError):
+ build.copy_readme(testing_metadata)
+
+ Path(testing_metadata.config.work_dir, readme).touch()
+ build.copy_readme(testing_metadata)
+ assert Path(testing_metadata.config.info_dir, readme).exists()
+
+
+def test_construct_metadata_for_test_from_recipe(testing_config: Config) -> None:
+ with pytest.warns(FutureWarning):
+ build._construct_metadata_for_test_from_recipe(
+ str(metadata_path / "test_source_files"),
+ testing_config,
+ )
+
+
+@pytest.mark.skipif(not on_win, reason="WSL is only on Windows")
+def test_wsl_unsupported(
+ testing_metadata: MetaData,
+ mocker: MockerFixture,
+ tmp_path: Path,
+):
+ mocker.patch(
+ "conda_build.os_utils.external.find_executable",
+ return_value="C:\\Windows\\System32\\bash.exe",
+ )
+
+ (script := tmp_path / "install.sh").touch()
+ with pytest.raises(CondaBuildUserError):
+ build.bundle_conda(
+ output={"script": script},
+ metadata=testing_metadata,
+ env={},
+ stats={},
+ )
+
+
+def test_handle_anaconda_upload(testing_config: Config, mocker: MockerFixture):
+ mocker.patch(
+ "conda_build.os_utils.external.find_executable",
+ return_value=None,
+ )
+ testing_config.anaconda_upload = True
+
+ with pytest.raises(CondaBuildUserError):
+ build.handle_anaconda_upload((), testing_config)
+
+
+def test_tests_failed(testing_metadata: MetaData, tmp_path: Path):
+ with pytest.raises(CondaBuildUserError):
+ build.tests_failed(
+ package_or_metadata=testing_metadata,
+ move_broken=True,
+ broken_dir=tmp_path,
+ config=testing_metadata.config,
+ )
diff --git a/tests/test_develop.py b/tests/test_develop.py
index d72bb247d3..498f1650c8 100644
--- a/tests/test_develop.py
+++ b/tests/test_develop.py
@@ -9,7 +9,8 @@
import pytest
-from conda_build.develop import _uninstall, write_to_conda_pth
+from conda_build.develop import _uninstall, execute, get_setup_py, write_to_conda_pth
+from conda_build.exceptions import CondaBuildUserError
from conda_build.utils import rm_rf
from .utils import thisdir
@@ -99,3 +100,20 @@ def test_uninstall(site_packages: Path, conda_pth: Path):
_uninstall(site_packages, path)
assert list(filter(None, conda_pth.read_text().split("\n"))) == develop_paths
+
+
+def test_get_setup_py(tmp_path: Path):
+ setup_py_path = tmp_path / "setup.py"
+ setup_py_path.touch()
+ result = get_setup_py(str(tmp_path))
+ assert "setup.py" in result
+
+ with pytest.raises(CondaBuildUserError, match="No setup.py found in "):
+ get_setup_py("/path/to/non-existent")
+
+
+def test_execute_error_nonexistent_prefix():
+ with pytest.raises(
+ CondaBuildUserError, match="Error: environment does not exist: "
+ ):
+ execute("/path/to/non-existent/prefix", "python", "setup.py", "install")
diff --git a/tests/test_inspect.py b/tests/test_inspect.py
index cd90ba98ae..04acf2728b 100644
--- a/tests/test_inspect.py
+++ b/tests/test_inspect.py
@@ -6,11 +6,12 @@
import pytest
from conda_build import api
+from conda_build.exceptions import CondaBuildUserError
def test_inspect_linkages():
if sys.platform == "win32":
- with pytest.raises(SystemExit) as exc:
+ with pytest.raises(CondaBuildUserError) as exc:
out_string = api.inspect_linkages("python")
assert "conda inspect linkages is only implemented in Linux and OS X" in exc
else:
@@ -20,7 +21,7 @@ def test_inspect_linkages():
def test_inspect_objects():
if sys.platform != "darwin":
- with pytest.raises(SystemExit) as exc:
+ with pytest.raises(CondaBuildUserError) as exc:
out_string = api.inspect_objects("python")
assert "conda inspect objects is only implemented in OS X" in exc
else:
diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py
index dae6d7f6ca..4f20b85105 100644
--- a/tests/test_inspect_pkg.py
+++ b/tests/test_inspect_pkg.py
@@ -10,8 +10,9 @@
import pytest
from conda.core.prefix_data import PrefixData
-from conda_build.inspect_pkg import which_package
-from conda_build.utils import on_win
+from conda_build.exceptions import CondaBuildUserError
+from conda_build.inspect_pkg import inspect_linkages, inspect_objects, which_package
+from conda_build.utils import on_mac, on_win
def test_which_package(tmp_path: Path):
@@ -271,3 +272,26 @@ def test_which_package_battery(tmp_path: Path):
# missing files should return no packages
assert not len(list(which_package(tmp_path / "missing", tmp_path)))
+
+
+def test_inspect_linkages_no_packages():
+ with pytest.raises(CondaBuildUserError):
+ inspect_linkages([])
+
+
+@pytest.mark.skipif(not on_win, reason="inspect_linkages is available")
+def test_inspect_linkages_on_win():
+ with pytest.raises(CondaBuildUserError):
+ inspect_linkages(["packages"])
+
+
+@pytest.mark.skipif(on_win, reason="inspect_linkages is not available")
+def test_inspect_linkages_not_installed():
+ with pytest.raises(CondaBuildUserError):
+ inspect_linkages(["not_installed_pkg"])
+
+
+@pytest.mark.skipif(on_mac, reason="inspect_objects is only available on macOS")
+def test_inspect_objects_not_on_mac():
+ with pytest.raises(CondaBuildUserError):
+ inspect_objects([])
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index 1b9fc34258..b8dc9df8e4 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -5,6 +5,7 @@
import os
import subprocess
import sys
+from contextlib import nullcontext
from itertools import product
from typing import TYPE_CHECKING
@@ -15,12 +16,15 @@
from conda_build import api
from conda_build.config import Config
+from conda_build.exceptions import CondaBuildUserError
from conda_build.metadata import (
FIELDS,
OPTIONALLY_ITERABLE_FIELDS,
MetaData,
_hash_dependencies,
+ check_bad_chrs,
get_selectors,
+ sanitize,
select_lines,
yamlize,
)
@@ -30,6 +34,8 @@
from .utils import metadata_dir, metadata_path, thisdir
if TYPE_CHECKING:
+ from pathlib import Path
+
from pytest import MonkeyPatch
@@ -200,6 +206,7 @@ def test_clobber_section_data(testing_metadata):
@pytest.mark.serial
+@pytest.mark.filterwarnings("ignore", category=PendingDeprecationWarning)
def test_build_bootstrap_env_by_name(testing_metadata):
assert not any(
"git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])
@@ -218,6 +225,7 @@ def test_build_bootstrap_env_by_name(testing_metadata):
subprocess.check_call(cmd.split())
+@pytest.mark.filterwarnings("ignore", category=PendingDeprecationWarning)
def test_build_bootstrap_env_by_path(testing_metadata):
assert not any(
"git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])
@@ -549,3 +557,69 @@ def test_get_section(testing_metadata: MetaData):
assert isinstance(section, list)
else:
assert isinstance(section, dict)
+
+
+def test_select_lines_invalid():
+ with pytest.raises(
+ CondaBuildUserError,
+ match=r"Invalid selector in meta\.yaml",
+ ):
+ select_lines("text # [{bad]", {}, variants_in_place=True)
+
+
+@pytest.mark.parametrize(
+ "keys,expected",
+ [
+ pytest.param([], {}, id="git_tag"),
+ pytest.param(["git_tag"], {"git_rev": "rev"}, id="git_tag"),
+ pytest.param(["git_branch"], {"git_rev": "rev"}, id="git_branch"),
+ pytest.param(["git_rev"], {"git_rev": "rev"}, id="git_rev"),
+ pytest.param(["git_tag", "git_branch"], None, id="git_tag + git_branch"),
+ pytest.param(["git_tag", "git_rev"], None, id="git_tag + git_rev"),
+ pytest.param(["git_branch", "git_rev"], None, id="git_branch + git_rev"),
+ pytest.param(
+ ["git_tag", "git_branch", "git_rev"],
+ None,
+ id="git_tag + git_branch + git_rev",
+ ),
+ ],
+)
+def test_sanitize_source(keys: list[str], expected: dict[str, str] | None) -> None:
+ with pytest.raises(
+ CondaBuildUserError,
+ match=r"Multiple git_revs:",
+ ) if expected is None else nullcontext():
+ assert sanitize({"source": {key: "rev" for key in keys}}) == {
+ "source": expected
+ }
+
+
+@pytest.mark.parametrize(
+ "value,field,invalid",
+ [
+ pytest.param("good", "field", None, id="valid field"),
+ pytest.param("!@d&;-", "field", "!&;@", id="invalid field"),
+ pytest.param("good", "package/version", None, id="valid package/version"),
+ pytest.param("!@d&;-", "package/version", "&-;@", id="invalid package/version"),
+ pytest.param("good", "build/string", None, id="valid build/string"),
+ pytest.param("!@d&;-", "build/string", "!&-;@", id="invalid build/string"),
+ ],
+)
+def test_check_bad_chrs(value: str, field: str, invalid: str) -> None:
+ with pytest.raises(
+ CondaBuildUserError,
+ match=rf"Bad character\(s\) \({invalid}\) in {field}: {value}\.",
+ ) if invalid else nullcontext():
+ check_bad_chrs(value, field)
+
+
+def test_parse_until_resolved(testing_metadata: MetaData, tmp_path: Path) -> None:
+ (recipe := tmp_path / (name := "meta.yaml")).write_text("{{ UNDEFINED[:2] }}")
+ testing_metadata._meta_path = recipe
+ testing_metadata._meta_name = name
+
+ with pytest.raises(
+ CondaBuildUserError,
+ match=("Failed to render jinja template"),
+ ):
+ testing_metadata.parse_until_resolved()
diff --git a/tests/test_noarch_python.py b/tests/test_noarch_python.py
new file mode 100644
index 0000000000..f09967c0f2
--- /dev/null
+++ b/tests/test_noarch_python.py
@@ -0,0 +1,60 @@
+# Copyright (C) 2014 Anaconda, Inc
+# SPDX-License-Identifier: BSD-3-Clause
+from __future__ import annotations
+
+import stat
+from typing import TYPE_CHECKING
+from uuid import uuid4
+
+import pytest
+
+from conda_build.exceptions import CondaBuildUserError
+from conda_build.noarch_python import rewrite_script
+from conda_build.utils import bin_dirname, on_win
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+
+@pytest.mark.parametrize(
+ "before,after",
+ [
+ ("script.py", "script.py"),
+ ("script-script.py", "script" if on_win else "script-script.py"),
+ ],
+)
+def test_rewrite_script(tmp_path: Path, before: str, after: str) -> None:
+ """Test that a script file is rewritten to the python-scripts directory."""
+ script = tmp_path / bin_dirname / before
+ script.parent.mkdir()
+
+ # write some text to the script
+ script.write_text(text := uuid4().hex)
+
+ # change the permissions so we can check they are preserved
+ script.chmod(permissions := stat.S_IFREG | (0o444 if on_win else 0o456))
+
+ # rewrite the script to the python-scripts directory
+ rewrite_script(script.name, tmp_path)
+
+ # check that the original script has been removed
+ assert not script.exists()
+
+ # check that the script has been rewritten to the python-scripts directory,
+ # has the same text, and the same permissions
+ rewrite = tmp_path / "python-scripts" / after
+ assert rewrite.read_text() == text
+ assert rewrite.stat().st_mode == permissions
+
+
+def test_rewrite_script_binary(tmp_path: Path) -> None:
+ """Test that a binary file will raise an error."""
+ binary = tmp_path / bin_dirname / "binary"
+ binary.parent.mkdir()
+
+ # write some binary data to the script
+ binary.write_bytes(b"\x80\x81\x82\x83\x84\x85")
+
+ # try to rewrite the binary script to the python-scripts directory
+ with pytest.raises(CondaBuildUserError, match=r"package contains binary script"):
+ rewrite_script(binary.name, tmp_path)
diff --git a/tests/test_post.py b/tests/test_post.py
index 97ef1448fc..e0eb59237f 100644
--- a/tests/test_post.py
+++ b/tests/test_post.py
@@ -9,6 +9,7 @@
import pytest
+import conda_build.utils
from conda_build import api, post
from conda_build.utils import (
get_site_packages,
@@ -18,7 +19,7 @@
package_has_file,
)
-from .utils import add_mangling, metadata_dir
+from .utils import add_mangling, metadata_dir, subpackage_path
@pytest.mark.skipif(
@@ -138,7 +139,7 @@ def test_menuinst_validation_fails_bad_schema(testing_config, caplog, tmp_path):
assert "ValidationError" in captured_text
-def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path):
+def test_menuinst_validation_fails_bad_json(testing_config, monkeypatch, tmp_path):
"3rd check - non-parsable JSON fails validation"
recipe = Path(metadata_dir, "_menu_json_validation")
recipe_tmp = tmp_path / "_menu_json_validation"
@@ -147,13 +148,56 @@ def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path):
menu_json_contents = menu_json.read_text()
menu_json.write_text(menu_json_contents + "Make this an invalid JSON")
- with caplog.at_level(logging.WARNING):
- api.build(str(recipe_tmp), config=testing_config, notest=True)
+ # suspect caplog fixture may fail; use monkeypatch instead.
+ records = []
- captured_text = caplog.text
- assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text
- assert "not a valid menuinst JSON document" in captured_text
- assert "JSONDecodeError" in captured_text
+ class MonkeyLogger:
+ def __getattr__(self, name):
+ return self.warning
+
+ def warning(self, *args, **kwargs):
+ records.append((*args, kwargs))
+
+ monkeylogger = MonkeyLogger()
+
+ def get_monkey_logger(*args, **kwargs):
+ return monkeylogger
+
+ # For some reason it uses get_logger in the individual functions, instead of
+ # a module-level global that we could easily patch.
+ monkeypatch.setattr(conda_build.utils, "get_logger", get_monkey_logger)
+
+ api.build(str(recipe_tmp), config=testing_config, notest=True)
+
+ # without %s substitution
+ messages = [record[0] for record in records]
+
+ assert "Found 'Menu/*.json' files but couldn't validate: %s" not in messages
+ assert "'%s' is not a valid menuinst JSON document!" in messages
+ assert any(
+ isinstance(record[-1].get("exc_info"), json.JSONDecodeError)
+ for record in records
+ )
+
+
+def test_file_hash(testing_config, caplog, tmp_path):
+ "check that the post-link check caching takes the file path into consideration"
+ recipe = Path(subpackage_path, "_test-file-hash")
+ recipe_tmp = tmp_path / "test-file-hash"
+ shutil.copytree(recipe, recipe_tmp)
+
+ variants = {"python": ["3.11", "3.12"]}
+ testing_config.ignore_system_config = True
+ testing_config.activate = True
+
+ with caplog.at_level(logging.INFO):
+ api.build(
+ str(recipe_tmp),
+ config=testing_config,
+ notest=True,
+ variants=variants,
+ activate=True,
+ )
@pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.")
diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py
index 11e43383d0..881a4eb4cb 100644
--- a/tests/test_subpackages.py
+++ b/tests/test_subpackages.py
@@ -11,6 +11,7 @@
from conda.base.context import context
from conda_build import api, utils
+from conda_build.exceptions import BuildScriptException, CondaBuildUserError
from conda_build.metadata import MetaDataTuple
from conda_build.render import finalize_metadata
@@ -292,7 +293,7 @@ def test_per_output_tests(testing_config):
@pytest.mark.sanity
def test_per_output_tests_script(testing_config):
recipe_dir = os.path.join(subpackage_dir, "_output_test_script")
- with pytest.raises(SystemExit):
+ with pytest.raises(CondaBuildUserError):
api.build(recipe_dir, config=testing_config)
@@ -353,6 +354,18 @@ def test_build_script_and_script_env_warn_empty_script_env(testing_config):
api.build(recipe, config=testing_config)
+@pytest.mark.sanity
+def test_build_script_does_not_set_env_from_script_env_if_missing(
+ testing_config, capfd, monkeypatch
+):
+ monkeypatch.delenv("TEST_FN_DOESNT_EXIST", raising=False)
+ recipe = os.path.join(subpackage_dir, "_build_script_relying_on_missing_var")
+ with pytest.raises(BuildScriptException):
+ api.build(recipe, config=testing_config)
+ captured = capfd.readouterr()
+ assert "KeyError: 'TEST_FN_DOESNT_EXIST'" in captured.err
+
+
@pytest.mark.sanity
@pytest.mark.skipif(sys.platform != "darwin", reason="only implemented for mac")
def test_strong_run_exports_from_build_applies_to_host(testing_config):
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 70a2981203..98733546b5 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -163,7 +163,7 @@ def test_logger_filtering(caplog, capfd):
log.info("test info message")
log.info("test duplicate message")
log.info("test duplicate message")
- log.warn("test warn message")
+ log.warning("test warn message")
log.error("test error message")
out, err = capfd.readouterr()
assert "test debug message" in out
@@ -211,7 +211,7 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker):
)
log = utils.get_logger(__name__)
# default log level is INFO, but our config file should set level to DEBUG
- log.warn("test message")
+ log.warning("test message")
# output should have gone to stdout according to config above.
out, err = capfd.readouterr()
assert "test message" in out
diff --git a/tests/test_variants.py b/tests/test_variants.py
index 3c79e36e16..84c1d96404 100644
--- a/tests/test_variants.py
+++ b/tests/test_variants.py
@@ -426,6 +426,37 @@ def test_get_used_loop_vars():
}
+def test_get_used_loop_vars_jinja2():
+ metadata = api.render(
+ os.path.join(variants_dir, "jinja2_used_variables"),
+ finalize=False,
+ bypass_env_check=True,
+ )
+ # 4 CLANG_VERSION values x 2 VCVER values - one skipped because of jinja2 conditionals
+ assert len(metadata) == 7
+ for m, _, _ in metadata:
+ assert m.get_used_loop_vars(force_top_level=False) == {"CLANG_VERSION", "VCVER"}
+ assert m.get_used_loop_vars(force_top_level=True) == {
+ "CL_VERSION",
+ "CLANG_VERSION",
+ "VCVER",
+ }
+ assert m.get_used_vars(force_top_level=False) == {
+ "CLANG_VERSION",
+ "VCVER",
+ "FOO",
+ "target_platform",
+ }
+ assert m.get_used_vars(force_top_level=True) == {
+ "CLANG_VERSION",
+ "CL_VERSION",
+ "VCVER",
+ "FOO",
+ "FOOBAR",
+ "target_platform",
+ }
+
+
def test_reprovisioning_source():
api.render(os.path.join(variants_dir, "20_reprovision_source"))