From 0c00a297e118f3d15e3c24743c102570f754ddd5 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Thu, 31 Oct 2024 12:31:34 -0500 Subject: [PATCH 001/208] Concretize reuse: reuse specs from environment (#45139) The already concrete specs in an environment are now among the reusable specs for the concretizer. This includes concrete specs from all include_concrete environments. In addition to this change to the default reuse, `environment` is added as a reuse type for the concretizer config. This allows users to specify: spack: concretizer: # Reuse from this environment (including included concrete) but not elsewhere reuse: from: - type: environment # or reuse from only my_env included environment reuse: from: - type: environment: my_env # or reuse from everywhere reuse: true If reuse is specified from a specific environment, only specs from that environment will be reused. If the reused environment is not specified via include_concrete, the concrete specs will be retried at concretization time to be reused. Signed-off-by: Ryan Krattiger Co-authored-by: Gregory Becker --- lib/spack/spack/environment/__init__.py | 4 + lib/spack/spack/environment/environment.py | 26 +++- lib/spack/spack/schema/concretizer.py | 19 ++- lib/spack/spack/schema/env.py | 4 +- lib/spack/spack/solver/asp.py | 92 +++++++++++++- lib/spack/spack/test/cmd/env.py | 132 ++++++++++++++++++++- lib/spack/spack/test/env.py | 15 +++ 7 files changed, 279 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py index fb083594e00fb5..62445e04379134 100644 --- a/lib/spack/spack/environment/__init__.py +++ b/lib/spack/spack/environment/__init__.py @@ -473,6 +473,7 @@ active_environment, all_environment_names, all_environments, + as_env_dir, create, create_in_dir, deactivate, @@ -480,6 +481,7 @@ default_view_name, display_specs, environment_dir_from_name, + environment_from_name_or_dir, exists, initialize_environment_dir, installed_specs, @@ -507,6 +509,7 @@ "active_environment", "all_environment_names", "all_environments", + "as_env_dir", "create", "create_in_dir", "deactivate", @@ -514,6 +517,7 @@ "default_view_name", "display_specs", "environment_dir_from_name", + "environment_from_name_or_dir", "exists", "initialize_environment_dir", "installed_specs", diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index b00405b5d1e277..81a2223c4995b8 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -277,6 +277,22 @@ def is_env_dir(path): return os.path.isdir(path) and os.path.exists(os.path.join(path, manifest_name)) +def as_env_dir(name_or_dir): + """Translate an environment name or directory to the environment directory""" + if is_env_dir(name_or_dir): + return name_or_dir + else: + validate_env_name(name_or_dir) + if not exists(name_or_dir): + raise SpackEnvironmentError("no such environment '%s'" % name_or_dir) + return root(name_or_dir) + + +def environment_from_name_or_dir(name_or_dir): + """Get an environment with the supplied name.""" + return Environment(as_env_dir(name_or_dir)) + + def read(name): """Get an environment with the supplied name.""" validate_env_name(name) @@ -1506,6 +1522,7 @@ def _get_specs_to_concretize( # Exit early if the set of concretized specs is the set of user specs new_user_specs = set(self.user_specs) - set(self.concretized_user_specs) kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs) + kept_user_specs |= set(self.included_user_specs) if not new_user_specs: return new_user_specs, kept_user_specs, [] @@ -1552,7 +1569,10 @@ def _concretize_together_where_possible( abstract = old_concrete_to_abstract.get(abstract, abstract) if abstract in new_user_specs: result.append((abstract, concrete)) - self._add_concrete_spec(abstract, concrete) + + # Only add to the environment if it's from this environment (not just included) + if abstract in self.user_specs: + self._add_concrete_spec(abstract, concrete) return result @@ -1595,7 +1615,9 @@ def _concretize_together( ordered_user_specs = list(new_user_specs) + list(kept_user_specs) concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)] for abstract, concrete in concretized_specs: - self._add_concrete_spec(abstract, concrete) + # Don't add if it's just included + if abstract in self.user_specs: + self._add_concrete_spec(abstract, concrete) # zip truncates the longer list, which is exactly what we want here return list(zip(new_user_specs, concrete_specs)) diff --git a/lib/spack/spack/schema/concretizer.py b/lib/spack/spack/schema/concretizer.py index 0b222d923e1b0f..b52b305ed9a12d 100644 --- a/lib/spack/spack/schema/concretizer.py +++ b/lib/spack/spack/schema/concretizer.py @@ -32,8 +32,23 @@ "type": "object", "properties": { "type": { - "type": "string", - "enum": ["local", "buildcache", "external"], + "oneOf": [ + { + "type": "string", + "enum": [ + "local", + "buildcache", + "environment", + "external", + ], + }, + { + "type": "object", + "properties": { + "environment": {"type": "string"} + }, + }, + ] }, "include": LIST_OF_SPECS, "exclude": LIST_OF_SPECS, diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py index 0adeb7b475ba28..b75bd231f4d206 100644 --- a/lib/spack/spack/schema/env.py +++ b/lib/spack/spack/schema/env.py @@ -19,6 +19,8 @@ #: Top level key in a manifest file TOP_LEVEL_KEY = "spack" +include_concrete = {"type": "array", "default": [], "items": {"type": "string"}} + properties: Dict[str, Any] = { "spack": { "type": "object", @@ -31,7 +33,7 @@ { "include": {"type": "array", "default": [], "items": {"type": "string"}}, "specs": spec_list_schema, - "include_concrete": {"type": "array", "default": [], "items": {"type": "string"}}, + "include_concrete": include_concrete, }, ), } diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 940aae0a72b608..97fbd03e8f1e8c 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -2616,6 +2616,7 @@ def setup( ) for name, info in env.dev_specs.items() ) + specs = tuple(specs) # ensure compatible types to add self.gen.h1("Reusable concrete specs") @@ -3978,7 +3979,7 @@ def selected_specs(self) -> List[spack.spec.Spec]: return [s for s in self.factory() if self.is_selected(s)] @staticmethod - def from_store(configuration, include, exclude) -> "SpecFilter": + def from_store(configuration, *, include, exclude) -> "SpecFilter": """Constructs a filter that takes the specs from the current store.""" packages = _external_config_with_implicit_externals(configuration) is_reusable = functools.partial(_is_reusable, packages=packages, local=True) @@ -3986,7 +3987,7 @@ def from_store(configuration, include, exclude) -> "SpecFilter": return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude) @staticmethod - def from_buildcache(configuration, include, exclude) -> "SpecFilter": + def from_buildcache(configuration, *, include, exclude) -> "SpecFilter": """Constructs a filter that takes the specs from the configured buildcaches.""" packages = _external_config_with_implicit_externals(configuration) is_reusable = functools.partial(_is_reusable, packages=packages, local=False) @@ -3994,6 +3995,29 @@ def from_buildcache(configuration, include, exclude) -> "SpecFilter": factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude ) + @staticmethod + def from_environment(configuration, *, include, exclude, env) -> "SpecFilter": + packages = _external_config_with_implicit_externals(configuration) + is_reusable = functools.partial(_is_reusable, packages=packages, local=True) + factory = functools.partial(_specs_from_environment, env=env) + return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude) + + @staticmethod + def from_environment_included_concrete( + configuration, + *, + include: List[str], + exclude: List[str], + env: ev.Environment, + included_concrete: str, + ) -> "SpecFilter": + packages = _external_config_with_implicit_externals(configuration) + is_reusable = functools.partial(_is_reusable, packages=packages, local=True) + factory = functools.partial( + _specs_from_environment_included_concrete, env=env, included_concrete=included_concrete + ) + return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude) + def _specs_from_store(configuration): store = spack.store.create(configuration) @@ -4011,6 +4035,23 @@ def _specs_from_mirror(): return [] +def _specs_from_environment(env): + """Return all concrete specs from the environment. This includes all included concrete""" + if env: + return [concrete for _, concrete in env.concretized_specs()] + else: + return [] + + +def _specs_from_environment_included_concrete(env, included_concrete): + """Return only concrete specs from the environment included from the included_concrete""" + if env: + assert included_concrete in env.included_concrete_envs + return [concrete for concrete in env.included_specs_by_hash[included_concrete].values()] + else: + return [] + + class ReuseStrategy(enum.Enum): ROOTS = enum.auto() DEPENDENCIES = enum.auto() @@ -4040,6 +4081,12 @@ def __init__(self, configuration: spack.config.Configuration) -> None: SpecFilter.from_buildcache( configuration=self.configuration, include=[], exclude=[] ), + SpecFilter.from_environment( + configuration=self.configuration, + include=[], + exclude=[], + env=ev.active_environment(), # includes all concrete includes + ), ] ) else: @@ -4054,7 +4101,46 @@ def __init__(self, configuration: spack.config.Configuration) -> None: for source in reuse_yaml.get("from", default_sources): include = source.get("include", default_include) exclude = source.get("exclude", default_exclude) - if source["type"] == "local": + if isinstance(source["type"], dict): + env_dir = ev.as_env_dir(source["type"].get("environment")) + active_env = ev.active_environment() + if active_env and env_dir in active_env.included_concrete_envs: + # If environment is included as a concrete environment, use the local copy + # of specs in the active environment. + # note: included concrete environments are only updated at concretization + # time, and reuse needs to matchthe included specs. + self.reuse_sources.append( + SpecFilter.from_environment_included_concrete( + self.configuration, + include=include, + exclude=exclude, + env=active_env, + included_concrete=env_dir, + ) + ) + else: + # If the environment is not included as a concrete environment, use the + # current specs from its lockfile. + self.reuse_sources.append( + SpecFilter.from_environment( + self.configuration, + include=include, + exclude=exclude, + env=ev.environment_from_name_or_dir(env_dir), + ) + ) + elif source["type"] == "environment": + # reusing from the current environment implicitly reuses from all of the + # included concrete environments + self.reuse_sources.append( + SpecFilter.from_environment( + self.configuration, + include=include, + exclude=exclude, + env=ev.active_environment(), + ) + ) + elif source["type"] == "local": self.reuse_sources.append( SpecFilter.from_store(self.configuration, include=include, exclude=exclude) ) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index d1d35e6f5e0cb4..f82cee10d723c3 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -9,6 +9,7 @@ import pathlib import shutil from argparse import Namespace +from typing import Any, Dict, Optional import pytest @@ -74,7 +75,7 @@ def setup_combined_multiple_env(): env("create", "test1") test1 = ev.read("test1") with test1: - add("zlib") + add("mpich@1.0") test1.concretize() test1.write() @@ -401,14 +402,17 @@ def test_env_install_single_spec(install_mockery, mock_fetch): @pytest.mark.parametrize("unify", [True, False, "when_possible"]) -def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch): +def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mutable_config): test1, test2, combined = setup_combined_multiple_env() + combined.unify = unify + if not unify: + combined.manifest.set_default_view(False) + + combined.add("mpileaks") combined.concretize() combined.write() - combined.unify = unify - with combined: install() @@ -422,6 +426,14 @@ def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch): assert test1_roots == combined_included_roots[test1.path] assert test2_roots == combined_included_roots[test2.path] + mpileaks = combined.specs_by_hash[combined.concretized_order[0]] + if unify: + assert mpileaks["mpi"].dag_hash() in test1_roots + assert mpileaks["libelf"].dag_hash() in test2_roots + else: + # check that unification is not by accident + assert mpileaks["mpi"].dag_hash() not in test1_roots + def test_env_roots_marked_explicit(install_mockery, mock_fetch): install = SpackCommand("install") @@ -1869,7 +1881,7 @@ def test_env_include_concrete_envs_lockfile(): def test_env_include_concrete_add_env(): test1, test2, combined = setup_combined_multiple_env() - # crete new env & crecretize + # create new env & concretize env("create", "new") new_env = ev.read("new") with new_env: @@ -1921,6 +1933,116 @@ def test_env_include_concrete_remove_env(): assert test2.path not in lockfile_as_dict["include_concrete"].keys() +def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]: + override_env = None + _config: Dict[Any, Any] = {} + if reuse_mode == "true": + _config = {"concretizer": {"reuse": True}} + elif reuse_mode == "from_environment": + _config = {"concretizer": {"reuse": {"from": [{"type": "environment"}]}}} + elif reuse_mode == "from_environment_test1": + _config = {"concretizer": {"reuse": {"from": [{"type": {"environment": "test1"}}]}}} + elif reuse_mode == "from_environment_external_test": + # Create a new environment called external_test that enables the "debug" + # The default is "~debug" + env("create", "external_test") + override_env = ev.read("external_test") + with override_env: + add("mpich@1.0 +debug") + override_env.concretize() + override_env.write() + + # Reuse from the environment that is not included. + # Specify the requirement for the debug variant. By default this would concretize to use + # mpich@3.0 but with include concrete the mpich@1.0 +debug version from the + # "external_test" environment will be used. + _config = { + "concretizer": {"reuse": {"from": [{"type": {"environment": "external_test"}}]}}, + "packages": {"mpich": {"require": ["+debug"]}}, + } + elif reuse_mode == "from_environment_raise": + _config = { + "concretizer": {"reuse": {"from": [{"type": {"environment": "not-a-real-env"}}]}} + } + # Disable unification in these tests to avoid confusing reuse due to unification using an + # include concrete spec vs reuse due to the reuse configuration + _config["concretizer"].update({"unify": False}) + + combined_env.manifest.configuration.update(_config) + combined_env.manifest.changed = True + combined_env.write() + + return override_env + + +@pytest.mark.parametrize( + "reuse_mode", + [ + "true", + "from_environment", + "from_environment_test1", + "from_environment_external_test", + "from_environment_raise", + ], +) +def test_env_include_concrete_reuse(monkeypatch, reuse_mode): + + # The mock packages do not use the gcc-runtime + def mock_has_runtime_dependencies(*args, **kwargs): + return True + + monkeypatch.setattr( + spack.solver.asp, "_has_runtime_dependencies", mock_has_runtime_dependencies + ) + # The default mpi version is 3.x provided by mpich in the mock repo. + # This test verifies that concretizing with an included concrete + # environment with "concretizer:reuse:true" the included + # concrete spec overrides the default with mpi@1.0. + test1, _, combined = setup_combined_multiple_env() + + # Set the reuse mode for the environment + override_env = configure_reuse(reuse_mode, combined) + if override_env: + # If there is an override environment (ie. testing reuse with + # an external environment) update it here. + test1 = override_env + + # Capture the test1 specs included by combined + test1_specs_by_hash = test1.specs_by_hash + + try: + # Add mpileaks to the combined environment + with combined: + add("mpileaks") + combined.concretize() + comb_specs_by_hash = combined.specs_by_hash + + # create reference env with mpileaks that does not use reuse + # This should concretize to the default version of mpich (3.0) + env("create", "new") + ref_env = ev.read("new") + with ref_env: + add("mpileaks") + ref_env.concretize() + ref_specs_by_hash = ref_env.specs_by_hash + + # Ensure that the mpich used by the mpileaks is the mpich from the reused test environment + comb_mpileaks_spec = [s for s in comb_specs_by_hash.values() if s.name == "mpileaks"] + test1_mpich_spec = [s for s in test1_specs_by_hash.values() if s.name == "mpich"] + assert len(comb_mpileaks_spec) == 1 + assert len(test1_mpich_spec) == 1 + assert comb_mpileaks_spec[0]["mpich"].dag_hash() == test1_mpich_spec[0].dag_hash() + + # None of the references specs (using mpich@3) reuse specs from test1. + # This tests that the reuse is not happening coincidently + assert not any([s in test1_specs_by_hash for s in ref_specs_by_hash]) + + # Make sure the raise tests raises + assert "raise" not in reuse_mode + except ev.SpackEnvironmentError: + assert "raise" in reuse_mode + + @pytest.mark.parametrize("unify", [True, False, "when_possible"]) def test_env_include_concrete_env_reconcretized(unify): """Double check to make sure that concrete_specs for the local specs is empty diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py index 3f2183f5e2b028..a542539899c7cd 100644 --- a/lib/spack/spack/test/env.py +++ b/lib/spack/spack/test/env.py @@ -906,3 +906,18 @@ def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, te assert callpath in temporary_store.db.query(explicit=False) env.install_specs([mpileaks], fake=True) assert temporary_store.db.query(explicit=True) == [mpileaks] + + +def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_path): + test_env = ev.create("test") + + name_env = ev.environment_from_name_or_dir(test_env.name) + assert name_env.name == test_env.name + assert name_env.path == test_env.path + + dir_env = ev.environment_from_name_or_dir(test_env.path) + assert dir_env.name == test_env.name + assert dir_env.path == test_env.path + + with pytest.raises(ev.SpackEnvironmentError, match="no such environment"): + _ = ev.environment_from_name_or_dir("fake-env") From 94c29e1cfcc11c2d983513dfb65e3ab5bb13e161 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 31 Oct 2024 14:57:56 -0500 Subject: [PATCH 002/208] mcpp: add v2.7.2-25-g619046f with CVE patches (#47301) --- var/spack/repos/builtin/packages/mcpp/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/mcpp/package.py b/var/spack/repos/builtin/packages/mcpp/package.py index 0dd0562ee2d32d..f2ee6652a2c34d 100644 --- a/var/spack/repos/builtin/packages/mcpp/package.py +++ b/var/spack/repos/builtin/packages/mcpp/package.py @@ -12,11 +12,13 @@ class Mcpp(AutotoolsPackage, SourceforgePackage): homepage = "https://sourceforge.net/projects/mcpp/" sourceforge_mirror_path = "mcpp/mcpp/V.2.7.2/mcpp-2.7.2.tar.gz" + git = "https://github.com/jbrandwood/mcpp.git" + # Versions from `git describe --tags` + version("2.7.2-25-g619046f", commit="619046fa0debac3f86ff173098aeb59b8f051d19") version("2.7.2", sha256="3b9b4421888519876c4fc68ade324a3bbd81ceeb7092ecdbbc2055099fcb8864") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") def configure_args(self): config_args = ["--enable-mcpplib", "--disable-static"] From e3aca49e25e70883fd45ec42c3a43fcd7fda5e29 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 31 Oct 2024 21:58:42 +0100 Subject: [PATCH 003/208] database.py: remove process unsafe update_explicit (#47358) Fixes an issue reported where `spack env depfile` + `make -j` would non-deterministically refuse to mark all environment roots explicit. `update_explicit` had the pattern ```python rec = self._data[key] with self.write_transaction(): rec.explicit = explicit ``` but `write_transaction` may reinitialize `self._data`, meaning that mutating `rec` won't mutate `self._data`, and the changes won't be persisted. Instead, use `mark` which has a correct implementation. Also avoids the essentially incorrect early return in `update_explicit` which is a pattern I don't think belongs in database.py: it branches on possibly stale data to realize there is nothing to change, but in reality it requires a write transaction to know that for a fact, but that would defeat the purpose. So, leave this optimization to the call site. --- lib/spack/spack/cmd/mark.py | 5 +++-- lib/spack/spack/database.py | 20 +------------------- lib/spack/spack/installer.py | 6 +++--- 3 files changed, 7 insertions(+), 24 deletions(-) diff --git a/lib/spack/spack/cmd/mark.py b/lib/spack/spack/cmd/mark.py index 0069008c4f05ac..66a84fb9076212 100644 --- a/lib/spack/spack/cmd/mark.py +++ b/lib/spack/spack/cmd/mark.py @@ -98,8 +98,9 @@ def do_mark(specs, explicit): specs (list): list of specs to be marked explicit (bool): whether to mark specs as explicitly installed """ - for spec in specs: - spack.store.STORE.db.update_explicit(spec, explicit) + with spack.store.STORE.db.write_transaction(): + for spec in specs: + spack.store.STORE.db.mark(spec, "explicit", explicit) def mark_specs(args, specs): diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 12f6ac24659fb3..e53dd817a52b1d 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1336,7 +1336,7 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> self._data[spec_key] = spec_rec @_autospec - def mark(self, spec: "spack.spec.Spec", key, value) -> None: + def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None: """Mark an arbitrary record on a spec.""" with self.write_transaction(): return self._mark(spec, key, value) @@ -1771,24 +1771,6 @@ def root(key, record): if id(rec.spec) not in needed and rec.installed ] - def update_explicit(self, spec, explicit): - """ - Update the spec's explicit state in the database. - - Args: - spec (spack.spec.Spec): the spec whose install record is being updated - explicit (bool): ``True`` if the package was requested explicitly - by the user, ``False`` if it was pulled in as a dependency of - an explicit package. - """ - rec = self.get_record(spec) - if explicit != rec.explicit: - with self.write_transaction(): - message = "{s.name}@{s.version} : marking the package {0}" - status = "explicit" if explicit else "implicit" - tty.debug(message.format(status, s=spec)) - rec.explicit = explicit - class NoUpstreamVisitor: """Gives edges to upstream specs, but does follow edges from upstream specs.""" diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 80fe9f2b038e58..be09973336b2cd 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -412,7 +412,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b tty.debug(f"{pre} already registered in DB") record = spack.store.STORE.db.get_record(spec) if explicit and not record.explicit: - spack.store.STORE.db.update_explicit(spec, explicit) + spack.store.STORE.db.mark(spec, "explicit", True) except KeyError: # If not, register it and generate the module file. @@ -1507,8 +1507,8 @@ def _prepare_for_install(self, task: Task) -> None: self._update_installed(task) # Only update the explicit entry once for the explicit package - if task.explicit: - spack.store.STORE.db.update_explicit(task.pkg.spec, True) + if task.explicit and not rec.explicit: + spack.store.STORE.db.mark(task.pkg.spec, "explicit", True) def _cleanup_all_tasks(self) -> None: """Cleanup all tasks to include releasing their locks.""" From 0de1ddcbe84db8fb996ee0be94ebce03c82fba03 Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Thu, 31 Oct 2024 16:33:04 -0500 Subject: [PATCH 004/208] cbtf: Update Boost dependencies (#47131) --- var/spack/repos/builtin/packages/cbtf/package.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py index dcd00bb0f9f392..7daccff49836a0 100644 --- a/var/spack/repos/builtin/packages/cbtf/package.py +++ b/var/spack/repos/builtin/packages/cbtf/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * -from spack.pkg.builtin.boost import Boost class Cbtf(CMakePackage): @@ -48,12 +47,7 @@ class Cbtf(CMakePackage): # for rpc depends_on("libtirpc", type="link") - depends_on("boost@1.70.0:") - - # TODO: replace this with an explicit list of components of Boost, - # for instance depends_on('boost +filesystem') - # See https://github.com/spack/spack/pull/22303 for reference - depends_on(Boost.with_default_variants) + depends_on("boost@1.70.0:1.84.0+date_time+filesystem+test+thread") # For MRNet depends_on("mrnet@5.0.1-3:+lwthreads", when="@develop") From e35bc1f82d826513896e92dc429c885dbdb44856 Mon Sep 17 00:00:00 2001 From: Paolo <142514942+paolotricerri@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:51:47 +0000 Subject: [PATCH 005/208] acfl, armpl-cc: add v24.10 (#47167) * Introduce support for ArmPL and ACfL 24.10 This patch introduces the possibility of installing armpl-gcc and acfl 24.10 through spack. It also addressed one issue observed after PR https://github.com/spack/spack/pull/46594 * Fix Github action issues. - Remove default URL - Reinstate default OS for ACfL to RHEL. --- .../repos/builtin/packages/acfl/package.py | 50 ++++++++++++++++--- .../builtin/packages/armpl-gcc/package.py | 28 ++++++----- 2 files changed, 60 insertions(+), 18 deletions(-) diff --git a/var/spack/repos/builtin/packages/acfl/package.py b/var/spack/repos/builtin/packages/acfl/package.py index b1e30fd767906e..4df0644898456b 100644 --- a/var/spack/repos/builtin/packages/acfl/package.py +++ b/var/spack/repos/builtin/packages/acfl/package.py @@ -2,6 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack.package import * @@ -24,9 +25,7 @@ "ubuntu22.04": "Ubuntu-22.04", "debian12": "Ubuntu-22.04", "sles15": "SLES-15", - "centos7": "RHEL-7", "centos8": "RHEL-8", - "rhel7": "RHEL-7", "rhel8": "RHEL-8", "rhel9": "RHEL-9", "rocky8": "RHEL-8", @@ -36,6 +35,36 @@ } _versions = { + "24.10": { + "RHEL-8": ( + "7c685c5393345baff573dc53ea3bb84e6293f9e51808e168ececcf51efb45813", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_RHEL-8_aarch64.tar", + ), + "RHEL-9": ( + "52767ec236098aec410b1d9899c4ba2c3dc2bcc3c2b500dbf2f4b7b3cfacf16d", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_RHEL-9_aarch64.tar", + ), + "SLES-15": ( + "ab118af1150931d59e7ec89f9c235a89bf604700ace53f549d3898677e7e76a4", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_SLES-15_aarch64.tar", + ), + "Ubuntu-20.04": ( + "defe9b8bd31d28aba1c8b8026295b6e277f221d1e387b16d8d86f4dea9b75c27", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_Ubuntu-20.04_aarch64.tar", + ), + "Ubuntu-22.04": ( + "10c0fad1ff3628f505ada90359c68b046676a4c6cab1131d76ae0429d3694415", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_Ubuntu-22.04_aarch64.tar", + ), + "AmazonLinux-2": ( + "8abd35c455adb94812aaa55853f72ac55e142940e775e985eeedbbbe17902d8f", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_AmazonLinux-2_aarch64.tar", + ), + "AmazonLinux-2023": ( + "6b1cf34240af15ae9a7c767d7f484f2fa79c4633571b613e3d65e20b8d3ba65a", + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Compiler-for-Linux/Version_24.10/arm-compiler-for-linux_24.10_AmazonLinux-2023_aarch64.tar", + ), + }, "24.04": { "RHEL-7": ( "064c3ecfd71cba3d8bf639448e899388f58eb7faef4b38f3c1aace625ace8b1e", @@ -209,10 +238,13 @@ def get_os(ver): spack_os = spack.platforms.host().default_os - if ver.startswith("22."): + if ver.startswith("22"): return _os_map_before_23.get(spack_os, "") - else: - return _os_map.get(spack_os, "RHEL-7") + if ver.startswith("23") or ver == "24.04": + return {**_os_map, "centos7": "RHEL-7", "rhel7": "RHEL-7"}.get(spack_os, "RHEL-7") + if ver == "24.10": + return _os_map.get(spack_os, "RHEL-8") + return "RHEL-8" def get_armpl_version_to_3(spec): @@ -234,6 +266,11 @@ def get_armpl_prefix(spec): return join_path(spec.prefix, f"armpl-{ver}_{os}_arm-linux-compiler") +def get_gcc_prefix(spec): + dirlist = next(os.walk(spec.prefix))[1] + return join_path(spec.prefix, next(dir for dir in dirlist if dir.startswith("gcc"))) + + def get_acfl_prefix(spec): os = get_os(spec.version.string) if spec.version.string.startswith("22."): @@ -260,7 +297,6 @@ class Acfl(Package, CompilerPackage): """ homepage = "https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux" - url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar" maintainers("paolotricerri") @@ -402,6 +438,7 @@ def setup_run_environment(self, env): def check_install(self): arm_dir = get_acfl_prefix(self.spec) armpl_dir = get_armpl_prefix(self.spec) + gcc_dir = get_gcc_prefix(self.spec) suffix = get_armpl_suffix(self.spec) armpl_example_dir = join_path(armpl_dir, f"examples{suffix}") # run example makefile @@ -411,6 +448,7 @@ def check_install(self): "CC=" + self.cc, "F90=" + self.fortran, "CPATH=" + join_path(arm_dir, "include"), + "COMPILER_PATH=" + gcc_dir, "ARMPL_DIR=" + armpl_dir, ) # clean up diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py index b6dffafb85a671..25f187781e4822 100644 --- a/var/spack/repos/builtin/packages/armpl-gcc/package.py +++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py @@ -54,6 +54,11 @@ } _versions = { + "24.10": { + "deb": ("2be772d41c0e8646e24c4f57e188e96f2dd8934966ae560c74fa905cbde5e1bc"), + "macOS": ("04e794409867e6042ed0f487bbaf47cc6edd527dc6ddad67160f1dba83906969"), + "rpm": ("055d4b3c63d990942d453a8720d029be7e604646218ffc3262321683f51f23aa"), + }, "24.04": { "deb": ("a323074cd08af82f4d79988cc66088b18e47dea4b93323b1b8a0f994f769f2f0"), "macOS": ("228bf3a2c25dbd45c2f89c78f455ee3c7dfb25e121c20d2765138b5174e688dc"), @@ -261,7 +266,8 @@ def get_os_or_pkg_manager(ver): return _os_pkg_map.get(platform.default_os, "rpm") -def get_package_url_before_24(base_url, version): +def get_package_url_before_24(version): + base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries" armpl_version = version.split("_")[0] armpl_version_dashed = armpl_version.replace(".", "-") compiler_version = version.split("_", 1)[1] @@ -270,7 +276,7 @@ def get_package_url_before_24(base_url, version): if armpl_version.startswith("23.06"): return ( f"{base_url}/{armpl_version_dashed}/" - + f"armpl_{armpl_version}_{compiler_version}.dmg" + f"armpl_{armpl_version}_{compiler_version}.dmg" ) else: filename = f"arm-performance-libraries_{armpl_version}_macOS.dmg" @@ -286,9 +292,11 @@ def get_package_url_before_24(base_url, version): return f"{base_url}/{armpl_version_dashed}/{os_short}/{filename}" -def get_package_url_from_24(base, version): +def get_package_url_from_24(version): + base_url = ( + "https://developer.arm.com/-/cdn-downloads/permalink/Arm-Performance-Libraries/Version" + ) pkg_system = get_os_or_pkg_manager(version) - os = "macOS" if pkg_system == "macOS" else "linux" extension = "tgz" if pkg_system == "macOS" else "tar" @@ -298,17 +306,15 @@ def get_package_url_from_24(base, version): full_name_library = f"{full_name_library}_gcc" file_name = f"{full_name_library}.{extension}" - vn = version.replace(".", "-") - url_parts = f"{base}/{vn}/{os}/{file_name}" + url_parts = f"{base_url}_{version}/{file_name}" return url_parts def get_package_url(version): - base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries" if version[:2] >= "24": - return get_package_url_from_24(base_url, version) + return get_package_url_from_24(version) else: - return get_package_url_before_24(base_url, version) + return get_package_url_before_24(version) def get_armpl_prefix(spec): @@ -335,8 +341,6 @@ class ArmplGcc(Package): high-performance computing applications on Arm processors.""" homepage = "https://developer.arm.com/tools-and-software/server-and-hpc/downloads/arm-performance-libraries" - url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/24-04/linux/arm-performance-libraries_24.04_deb_gcc.tar" - maintainers("paolotricerri") for ver, packages in _versions.items(): @@ -434,7 +438,7 @@ def install(self, spec, prefix): exe = Executable( f"./arm-performance-libraries_{armpl_version}_" - + f"{get_os_or_pkg_manager(armpl_version)}.sh" + f"{get_os_or_pkg_manager(armpl_version)}.sh" ) exe("--accept", "--force", "--install-to", prefix) From b4b3320f71d22d6475b70a50056d6eb1fd1057d2 Mon Sep 17 00:00:00 2001 From: Tobias Ribizel Date: Fri, 1 Nov 2024 00:05:00 +0100 Subject: [PATCH 006/208] typst: new package (#47293) --- .../repos/builtin/packages/typst/package.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 var/spack/repos/builtin/packages/typst/package.py diff --git a/var/spack/repos/builtin/packages/typst/package.py b/var/spack/repos/builtin/packages/typst/package.py new file mode 100644 index 00000000000000..b495b89482c258 --- /dev/null +++ b/var/spack/repos/builtin/packages/typst/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import re + +from spack.package import * + + +class Typst(CargoPackage): + """Typst is a new markup-based typesetting system for the sciences.""" + + homepage = "https://typst.app" + git = "https://github.com/typst/typst" + executables = ["^typst$"] + + maintainers("upsj") + + license("Apache-2.0", checked_by="upsj") + + version("0.12.0", commit="737895d769188f6fc154523e67a9102bc24c872e", tag="v0.12.0") + + depends_on("rust@1.81.0:") + depends_on("openssl") + depends_on("pkgconf", type="build") + + @classmethod + def determine_version(cls, exe): + output = Executable(exe)("--version", output=str, error=str) + match = re.search(r"typst ([0-9.]+)", output) + return match.group(1) if match else None + + def build(self, spec, prefix): + # The cargopackage installer doesn't allow for an option to install from a subdir + # see: https://github.com/rust-lang/cargo/issues/7599 + cargo("install", "--root", "out", "--path", "crates/typst-cli") From 8076134c918ff8866bfd23b787ae6609c1bdc2f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= <765740+giordano@users.noreply.github.com> Date: Fri, 1 Nov 2024 00:32:43 +0000 Subject: [PATCH 007/208] nvidia-nsight-systems: new package (#47355) Co-authored-by: Scot Halverson --- .../packages/nvidia-nsight-systems/package.py | 127 ++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 var/spack/repos/builtin/packages/nvidia-nsight-systems/package.py diff --git a/var/spack/repos/builtin/packages/nvidia-nsight-systems/package.py b/var/spack/repos/builtin/packages/nvidia-nsight-systems/package.py new file mode 100644 index 00000000000000..945bbe92f465ba --- /dev/null +++ b/var/spack/repos/builtin/packages/nvidia-nsight-systems/package.py @@ -0,0 +1,127 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +import platform +import re +import shutil +from glob import glob + +from spack.package import * + +# FIXME Remove hack for polymorphic versions +# This package uses a ugly hack to be able to dispatch, given the same +# version, to different binary packages based on the platform that is +# running spack. See #13827 for context. +# If you need to add a new version, please be aware that: +# - versions in the following dict are automatically added to the package +# - version tuple must be in the form (checksum, url) +# - checksum must be sha256 +# - package key must be in the form '{os}-{arch}' where 'os' is in the +# format returned by platform.system() and 'arch' by platform.machine() +_versions = { + "2024.6.1": { + "Linux-aarch64": ( + "24700c28dfda9f95d4e93de218b86ab1ba0ee8b74cb61c3c581767296159c75c", + "https://developer.nvidia.com/downloads/assets/tools/secure/nsight-systems/2024_6/nsight-systems-2024.6.1-2024.6.1.90_3490548-0.aarch64.rpm", + ), + "Linux-x86_64": ( + "dd4359a47ff3857395c55a0da483b64f5c0c3a1a2e57dd543a512dc3d2cd2674", + "https://developer.nvidia.com/downloads/assets/tools/secure/nsight-systems/2024_6/nsight-systems-2024.6.1-2024.6.1.90_3490548-0.x86_64.rpm", + ), + }, + "2024.1.1": { + "Linux-aarch64": ( + "41dc15ae128ef1de8e582b66bb465ac6bd67b9d20ef77fc70528b735d80fb3ec", + "https://developer.download.nvidia.com/devtools/repos/rhel8/arm64/nsight-systems-2024.1.1-2024.1.1.59_3380207-0.aarch64.rpm", + ), + "Linux-ppc64le": ( + "8c98b511df1747c4c782430504ae6fa4b3fce6fa72623083a828fc0a1e11f1b8", + "https://developer.download.nvidia.com/devtools/repos/rhel8/ppc64le/nsight-systems-cli-2024.1.1-2024.1.1.59_3380207-0.ppc64le.rpm", + ), + "Linux-x86_64": ( + "96f57548e0bd69cb02cd1fe8c70ed4a650636ecb3a5ea5ec490c8049adc2beb5", + "https://developer.download.nvidia.com/devtools/repos/rhel8/x86_64/nsight-systems-2024.1.1-2024.1.1.59_3380207-0.x86_64.rpm", + ), + }, +} + + +class NvidiaNsightSystems(Package): + """NVIDIA Nsight™ Systems is a system-wide performance analysis tool designed + to visualize an application’s algorithms, identify the largest opportunities + to optimize, and tune to scale efficiently across any quantity or size of CPUs + and GPUs, from large servers to the smallest system on a chip""" + + homepage = "https://developer.nvidia.com/nsight-systems" + url = "https://developer.download.nvidia.com/devtools/repos/" + maintainers("scothalverson") + license("NVIDIA Software License Agreement") + + executables = ["^nsys$"] + + # Used to unpack the source RPM archives. + depends_on("libarchive programs='bsdtar'", type="build") + + for ver, packages in _versions.items(): + key = "{0}-{1}".format(platform.system(), platform.machine()) + pkg = packages.get(key) + if pkg: + version(ver, sha256=pkg[0], url=pkg[1], expand=False) + + @classmethod + def determine_version(cls, exe): + output = Executable(exe)("--version", output=str, error=str) + # Example output: + # NVIDIA Nsight Systems version 2024.1.1.59-241133802077v0 + # but we only want to match 2024.1.1 + match = re.search(r"NVIDIA Nsight Systems version ((?:[0-9]+.){2}[0-9])", output) + return match.group(1) if match else None + + def install(self, spec, prefix): + bsdtar = which("bsdtar") + rpm_file = glob(join_path(self.stage.source_path, "nsight-systems*.rpm"))[0] + params = ["-x", "-f", rpm_file] + ver = prefix.split("/")[-1].split("-")[-2] + bsdtar(*params) + + arch = self.spec.target.family + if arch == "aarch64": + folders = ["documentation", "host-linux-armv8", "target-linux-sbsa-armv8"] + elif arch == "ppc64le": + folders = ["documentation", "host-linux-ppc64le", "target-linux-ppc64le"] + elif arch == "x86_64": + folders = ["documentation", "host-linux-x64", "target-linux-x64"] + if os.path.exists(join_path("opt", "nvidia", "nsight-systems-cli")): + base_path = join_path("opt", "nvidia", "nsight-systems-cli") + elif os.path.exists(join_path("opt", "nvidia", "nsight-systems")): + base_path = join_path("opt", "nvidia", "nsight-systems") + else: + raise InstallError("Couldn't determine subdirectories to install.") + + for sd in folders: + shutil.copytree(join_path(base_path, ver, sd), join_path(prefix, sd)) + os.mkdir(join_path(prefix, "bin")) + if arch == "aarch64": + os.symlink( + join_path(prefix, "host-linux-armv8", "nsys-ui"), + join_path(prefix, "bin", "nsys-ui"), + ) + os.symlink( + join_path(prefix, "target-linux-sbsa-armv8", "nsys"), + join_path(prefix, "bin", "nsys"), + ) + elif arch == "ppc64le": + # `nsys-ui` is missing in the PowerPC version of the package. + os.symlink( + join_path(prefix, "target-linux-ppc64le", "nsys"), join_path(prefix, "bin", "nsys") + ) + elif arch == "x86_64": + os.symlink( + join_path(prefix, "host-linux-x64", "nsys-ui"), join_path(prefix, "bin", "nsys-ui") + ) + os.symlink( + join_path(prefix, "target-linux-x64", "nsys"), join_path(prefix, "bin", "nsys") + ) From 504cc808d642a969d0e13242b505e6f8430a594f Mon Sep 17 00:00:00 2001 From: Kaan <61908449+kaanolgu@users.noreply.github.com> Date: Fri, 1 Nov 2024 00:42:40 +0000 Subject: [PATCH 008/208] Babelstream v5.0 Spack Package Updates (#41019) - Merging sycl2020usm and sycl2020acc into sycl2020 and the submodel=acc/usm variant is introduced - implementation is renamed to option - impl ( fortran implementation options) renamed to foption - sycl_compiler_implementation and thrust_backend - stddata,stdindices,stdranges to a single std with std_submodel introduction - std_use_tbb to be boolean; also changed model filtering algorithm to make sure that it only picks model names - Modified comments to clear confusion with cuda_arch cc_ and sm_ prefix appends - Deleted duplicate of cuda_arch definition from +omp - CMAKE_CXX_COMPILER moved to be shared arg between all models except tbb and thrust - Replaced sys.exit with InstallError and created a dictionary to simplify things and eliminate excess code lines doing same checks - Replaced the -mcpu flags to -march since it is deprecated now - Replaced platform.machine with spec.target - Removing raja_backend, introducing openmp_flag,removing -march flags,clearing debugging print(), removing excess if ___ in self.spec.variants - [FIX] Issue where Thrust couldn't find correct compiler (it requires nvcc) - [FIX] Fortran unsupported check to match the full string - [FIX] RAJA cuda_arch to be with sm_ not cc_ - dir= option is no longer needed for kokkos - dir is no longer needed - [omp] Adding clang support for nvidia offload - SYCL2020 offload to nvidia GPU - changing model dependency to be languages rather than build system - removing hardcoded arch flags and replacing with archspec - removing cpu_arch from acc model --------- Signed-off-by: Todd Gamblin Co-authored-by: Greg Becker Co-authored-by: Kaan Olgu Co-authored-by: Todd Gamblin --- .../builtin/packages/babelstream/package.py | 990 +++++++++++++----- 1 file changed, 723 insertions(+), 267 deletions(-) diff --git a/var/spack/repos/builtin/packages/babelstream/package.py b/var/spack/repos/builtin/packages/babelstream/package.py index ec85b2d3569bc4..b09fcc5f6e4a62 100644 --- a/var/spack/repos/builtin/packages/babelstream/package.py +++ b/var/spack/repos/builtin/packages/babelstream/package.py @@ -3,65 +3,107 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import re # To get the variant name after (+) - +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * -def find_model_flag(str): - res = re.findall(r"\+(\w+)", str) - if not res: - return "" - return res - - -class Babelstream(CMakePackage, CudaPackage, ROCmPackage): +class Babelstream(CMakePackage, CudaPackage, ROCmPackage, MakefilePackage): """Measure memory transfer rates to/from global device memory on GPUs. This benchmark is similar in spirit, and based on, the STREAM benchmark for CPUs.""" homepage = "https://github.com/UoB-HPC/BabelStream" - url = "https://github.com/UoB-HPC/BabelStream/archive/refs/tags/v4.0.tar.gz" + url = "https://github.com/UoB-HPC/BabelStream/archive/refs/tags/v5.0.tar.gz" git = "https://github.com/UoB-HPC/BabelStream.git" + version("5.0", sha256="1a418203fbfd95595bdc66047e2e39d8f1bba95a49725c9ecb907caf1af2521f") version("4.0", sha256="a9cd39277fb15d977d468435eb9b894f79f468233f0131509aa540ffda4f5953") + version("3.4", sha256="e34ee9d5ccdead019e3ea478333bcb7886117d600e5da8579a626f6ee34209cf") + version("3.3", sha256="4c89c805b277d52776feeb7a8eef7985a0d9295ce3e0bb2333bf715f724723cf") + version("3.2", sha256="20309b27ddd09ea37406bcc6f46fd32e9372bf3d145757e55938d19d69cdc49d") + version("3.1", sha256="be69e6085e8966e12aa2df897eea6254b172e5adfa03de0adbb89bc3065f4fbe") + version("3.0", sha256="776219c72e0fdc36f134e6975b68c7ab25f38206f8f8af84a6f9630648c24800") + version("1.0", sha256="3cfb9e45601f1f249878355c72baa6e6a61f6c811f8716d60b83c7fb544e1d5c") version("main", branch="main") - version("develop", branch="develop") - - depends_on("cxx", type="build") # generated - - maintainers("tomdeakin", "kaanolgu", "tom91136", "robj0nes") - + maintainers("tomdeakin", "kaanolgu", "tom91136") + # Previous maintainers: "robj0nes" + depends_on("cxx", type="build", when="languages=cxx") + depends_on("fortran", type="build", when="languages=fortran") # Languages - # Also supported variants are cuda and rocm (for HIP) - variant("sycl", default=False, description="Enable SYCL support") - variant("sycl2020", default=False, description="Enable SYCL support") - variant("omp", default=False, description="Enable OpenMP support") - variant("ocl", default=False, description="Enable OpenCL support") - variant("tbb", default=False, description="Enable TBB support") - variant("acc", default=False, description="Enable OpenACC support") - variant("thrust", default=False, description="Enable THRUST support") - variant("raja", default=False, description="Enable RAJA support") - variant("stddata", default=False, description="Enable STD-data support") - variant("stdindices", default=False, description="Enable STD-indices support") - variant("stdranges", default=False, description="Enable STD-ranges support") + # in the future it could be possible to add other languages too + variant( + "languages", + default="cxx", + values=("cxx", "fortran"), + description="Languages Babelstream Spack Package Support", + ) + # Build System + build_system( + conditional("cmake", when="languages=cxx"), + conditional("makefile", when="languages=fortran"), + default="cmake", + ) + with when("languages=cxx"): + # Also supported variants are cuda and rocm (for HIP) + # not included here because they are supplied via respective packages + variant("sycl", default=False, description="Enable SYCL support") + variant("sycl2020", default=False, description="Enable SYCL support") + variant("omp", default=False, description="Enable OpenMP support") + variant("ocl", default=False, description="Enable OpenCL support") + variant("tbb", default=False, description="Enable TBB support") + variant("acc", default=False, description="Enable OpenACC support") + variant("hip", default=False, description="Enable HIP support") + variant("thrust", default=False, description="Enable THRUST support") + variant("raja", default=False, description="Enable RAJA support") + variant("std", default=False, description="Enable STD support") # Some models need to have the programming model abstraction downloaded - # this variant enables a path to be provided. variant("dir", values=str, default="none", description="Enable Directory support") + variant( + "sycl2020_submodel", + values=("usm", "acc"), + when="+sycl2020", + default="usm", + description="SYCL2020 -> choose between usm and acc methods", + ) + variant( + "std_submodel", + values=("data", "indices", "ranges"), + when="+std", + default="data", + description="STD -> choose between data, indices and ranges models", + ) - # Kokkos conflict and variant - conflicts( - "dir=none", when="+kokkos", msg="KOKKKOS requires architecture to be specfied by dir=" + variant( + "sycl2020_offload", + values=("nvidia", "intel"), + default="intel", + when="+sycl2020", + description="Offloading to NVIDIA GPU or not", ) - variant("kokkos", default=False, description="Enable KOKKOS support") - # ACC conflict - variant("cpu_arch", values=str, default="none", description="Enable CPU Target for ACC") - variant("acc_target", values=str, default="none", description="Enable CPU Target for ACC") + variant( + "thrust_submodel", + values=("cuda", "rocm"), + default="cuda", + when="+thrust", + description="Which THRUST implementation to use, supported options include option= \ + - CUDA (via https://github.com/NVIDIA/thrust)\ + - ROCM (via https://github.com/ROCmSoftwarePlatform/rocThrust)", + ) + variant( + "thrust_backend", + values=("cuda", "omp", "tbb"), + default="cuda", + when="+thrust", + description="Which THRUST implementation to use, supported options include option", + ) + + # Kokkos variant + variant("kokkos", default=False, description="Enable KOKKOS support") # STD conflicts - conflicts("+stddata", when="%gcc@:10.1.0", msg="STD-data requires newer version of GCC") - conflicts("+stdindices", when="%gcc@:10.1.0", msg="STD-indices requires newer version of GCC") - conflicts("+stdranges", when="%gcc@:10.1.0", msg="STD-ranges requires newer version of GCC") + conflicts("+std", when="%gcc@:10.1.0", msg="STD requires newer version of GCC") # CUDA conflict conflicts( @@ -69,349 +111,763 @@ class Babelstream(CMakePackage, CudaPackage, ROCmPackage): when="+cuda", msg="CUDA requires architecture to be specfied by cuda_arch=", ) - variant("mem", values=str, default="DEFAULT", description="Enable MEM Target for CUDA") - # Raja Conflict variant( - "offload", values=str, default="none", description="Enable RAJA Target [CPU or NVIDIA]" - ) - conflicts( - "offload=none", - when="+raja", - msg="RAJA requires architecture to be specfied by acc_target=[CPU,NVIDIA]", + "cuda_memory_mode", + values=("default", "managed", "pagefault"), + default="default", + when="+cuda", + description="Enable MEM Target for CUDA", ) - # download raja from https://github.com/LLNL/RAJA + # OMP offload + variant("omp_offload", default=False, when="+omp", description="Enable OpenMP Target") + variant( + "omp_flags", + values=str, + default="none", + when="+omp", + description="If OFFLOAD is enabled, this *overrides* the default offload flags", + ) conflicts( - "dir=none", + "omp_flags=none", + when="+omp_offload", + msg="OpenMP requires offload flags to be specfied by omp_flags=", + ) + # Raja offload + variant( + "raja_offload", + values=("cpu", "nvidia"), + default="cpu", when="+raja", - msg="RAJA implementation requires architecture to be specfied by dir=", + description="Enable RAJA Target [CPU or NVIDIA] / Offload with custom settings for OpenMP", + ) + # std-* offload + variant( + "std_offload", + values=("nvhpc", "none"), + default="none", + when="+std", + description="Enable offloading support (via the non-standard `-stdpar`)\ + for the new NVHPC SDK", + ) + variant( + "std_onedpl_backend", + values=("openmp", "tbb", "dpcpp", "none"), + default="none", + when="+std", + description="Implements policies using OpenMP,TBB or dpc++", + ) + variant( + "std_use_tbb", + values=(True, False), + default=False, + when="+std", + description="No-op if ONE_TBB_DIR is set. Link against an in-tree oneTBB\ + via FetchContent_Declare, see top level CMakeLists.txt for details", + ) + variant( + "std_use_onedpl", + values=(True, False), + default=False, + when="+std", + description="Link oneDPL which implements C++17 executor policies\ + (via execution_policy_tag) for different backends", + ) + # hip memory mode + variant( + "hip_mem_mode", + values=("default", "managed", "pagefault"), + default="default", + when="+hip", + description="Enable MEM Target for HIP", + ) + # tbb use vector + variant( + "tbb_use_vector", + values=(True, False), + default=False, + when="+tbb", + description="Whether to use std::vector for storage or use aligned_alloc. \ + C++ vectors are *zero* initialised where as aligned_alloc is \ + uninitialised before first use.", ) # Thrust Conflict - # conflicts("~cuda", when="+thrust", msg="Thrust requires +cuda variant") depends_on("thrust", when="+thrust") - depends_on("rocthrust", when="+thrust implementation=rocm") - + depends_on("cuda", when="thrust_submodel=cuda") + depends_on("cuda", when="+raja raja_offload=nvidia") + depends_on("hip", when="+hip") + depends_on("rocthrust", when="thrust_submodel=rocm") + depends_on("intel-tbb", when="+std +std_use_tbb") + depends_on("intel-oneapi-dpl", when="+std +std_use_onedpl") + depends_on("intel-tbb", when="+std +std_use_onedpl") # TBB Dependency - depends_on("intel-oneapi-tbb", when="+tbb") - partitioner_vals = ["auto", "affinity", "static", "simple"] + depends_on("intel-tbb", when="+tbb") + variant( - "partitioner", - values=partitioner_vals, + "tbb_partitioner", + values=("auto", "affinity", "static", "simple"), default="auto", + when="+tbb", description="Partitioner specifies how a loop template should partition its work among threads.\ Possible values are:\ AUTO - Optimize range subdivision based on work-stealing events.\ AFFINITY - Proportional splitting that optimizes for cache affinity.\ STATIC - Distribute work uniformly with no additional load balancing.\ SIMPLE - Recursively split its range until it cannot be further subdivided.\ - See https://spec.oneapi.com/versions/latest/elements/oneTBB/source/algorithms.html#partitioners for more details.", + See https://spec.oneapi.com/versions/latest/elements/oneTBB/source/algorithms.html#partitioners", ) - # Kokkos Dependency - depends_on("kokkos@3.7.1", when="+kokkos") + # Kokkos & RAJA Dependency + cuda_archs = CudaPackage.cuda_arch_values + for sm_ in cuda_archs: + depends_on( + "kokkos +cuda +wrapper cuda_arch={0}".format(sm_), + when="kokkos_backend=cuda cuda_arch={0}".format(sm_), + ) + depends_on( + "raja +cuda cuda_arch={0}".format(sm_), + when="raja_offload=nvidia cuda_arch={0}".format(sm_), + ) + depends_on("kokkos +openmp", when="kokkos_backend=omp") + depends_on("raja +openmp", when="raja_offload=cpu") # OpenCL Dependency - - backends = { - "ocl": [ - ("amd", "rocm-opencl", "enable ROCM backend"), - ("cuda", "cuda", "enable Cuda backend"), - ("intel", "intel-oneapi-compilers", "enable Intel backend"), - ("pocl", "pocl@1.5", "enable POCL backend"), - ], - "kokkos": [ - ("cuda", "cuda", "enable Cuda backend"), - ("omp", "none", "enable Cuda backend"), - ], - } - backend_vals = ["none"] - for lang in backends: - for item in backends[lang]: - backend, dpdncy, descr = item - backend_vals.append(backend.lower()) - - variant("backend", values=backend_vals, default="none", description="Enable backend support") - - for lang in backends: - for item in backends[lang]: - backend, dpdncy, descr = item - if dpdncy.lower() != "none": - depends_on("%s" % dpdncy.lower(), when="backend=%s" % backend.lower()) - # this flag could be used in all required languages - variant("flags", values=str, default="none", description="Additional CXX flags to be provided") - - # comp_impl_vals=["ONEAPI-DPCPP","DPCPP","HIPSYCL","COMPUTECPP"] variant( - "implementation", - values=str, + "ocl_backend", + values=("amd", "cuda", "intel", "pocl", "none"), default="none", - description="Compile using the specified SYCL compiler option", + when="+ocl", + description="Enable Backend Target for OpenCL", ) - - conflicts( - "implementation=none", - when="+sycl", - msg="SYCL requires compiler implementation to be specified by option=", + variant( + "kokkos_backend", + values=("cuda", "omp", "none"), + default="none", + when="+kokkos", + description="Enable Backend Target for kokkos", ) conflicts( - "implementation=none", - when="+thrust", - msg="Which Thrust implementation to use, supported options include:\ - - CUDA (via https://github.com/NVIDIA/thrust)\ - - ROCM (via https://github.com/ROCm/rocThrust)", + "ocl_backend=none", + when="+ocl", + msg="OpenCL implementation requires backend to be specfied by ocl_backend=", ) + # depends_on("rocm-opencl@6.0.2", when="+ocl ocl_backend=amd") + depends_on("cuda", when="+ocl ocl_backend=cuda") + depends_on("cuda", when="+sycl2020 sycl2020_offload=nvidia") + depends_on("intel-oneapi-compilers", when="+ocl ocl_backend=intel") + depends_on("pocl@1.5", when="+ocl ocl_backend=pocl") + + variant( + "cuda_extra_flags", + values=str, + default="none", + description="Additional CUDA Compiler flags to be provided", + ) + + # CMake specific dependency + with when("build_system=cmake"): + depends_on("cmake@3.14.0:", type="build") # This applies to all - depends_on("cmake@3.14.0:", type="build") depends_on("opencl-c-headers", when="+ocl") + # Fortran related configurations + with when("languages=fortran"): + implementation_vals = [ + "DoConcurrent", + "Array", + "OpenMP", + "OpenMPWorkshare", + "OpenMPTarget", + "OpenMPTargetLoop", + "OpenMPTaskloop", + "OpenACC", + "OpenACCArray", + "CUDA", + "CUDAKernel", + "Sequential", + ] + variant( + "foption", + values=implementation_vals, + default="Sequential", + description="Implementation", + ) + # The fortran Makefile is inside the src/fortran so we need to address this + build_directory = "src/fortran" + build_name = "" + variant( + "fortran_flags", + values=str, + default="none", + description="Additional Fortran flags to be provided", + ) + + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): - # convert spec to string to work on it - spec_string = str(self.spec) - - # take only the first portion of the spec until space - spec_string_truncate = spec_string.split(" ", 1)[0] - model_list = find_model_flag(spec_string_truncate) # Prints out ['cuda', 'thrust'] - - if len(model_list) > 1: - ignore_list = ["cuda"] # if +acc is provided ignore the cuda model - model = list(set(model_list) - set(ignore_list)) - # We choose 'thrust' from the list of ['cuda', 'thrust'] - args = ["-DMODEL=" + model[0]] + model_list = [ + "sycl", + "sycl2020", + "omp", + "cuda", + "ocl", + "tbb", + "acc", + "hip", + "thrust", + "raja", + "std", + "kokkos", + ] + # for +acc and +thrust the CudaPackage appends +cuda variant too so we need + # to filter cuda from list e.g. we choose 'thrust' + # from the list of ['cuda', 'thrust'] + model_names = [name for name in model_list if f"+{name}" in self.spec] + print("model names : ", model_names) + if len(model_names) > 1: + model_names = [elem for elem in model_names if (elem != "cuda" and elem != "rocm")] + if "std" in model_names[0]: + args = ["-DMODEL=" + "std-" + self.spec.variants["std_submodel"].value] + elif "sycl2020" in model_names[0]: # this is for nvidia offload + args = ["-DMODEL=" + "sycl2020-" + self.spec.variants["sycl2020_submodel"].value] + else: + args = ["-DMODEL=" + model_names[0]] else: - # if it is +stddata,indices etc. we need to pass it - # as std-data to the CMake compiler - # do some alterations here - if "std" in model_list[0]: - args = ["-DMODEL=" + "std-" + model_list[0].split("d", 1)[1]] + # do some alterations here to append sub models too + if "std" in model_names[0]: + args = ["-DMODEL=" + "std-" + self.spec.variants["std_submodel"].value] + elif "sycl2020" in model_names[0]: + args = ["-DMODEL=" + "sycl2020-" + self.spec.variants["sycl2020_submodel"].value] + print(args) + elif "rocm" in model_names[0]: + args = ["-DMODEL=hip"] else: - args = ["-DMODEL=" + model_list[0]] + args = ["-DMODEL=" + model_names[0]] + if model_names[0] != "tbb" and model_names[0] != "thrust": + args.append("-DCMAKE_CXX_COMPILER=" + spack_cxx) # =================================== # ACC # =================================== - if ("+acc" in self.spec) and ("~cuda" in self.spec): - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) - if "cuda_arch" in self.spec.variants: - cuda_arch_list = self.spec.variants["cuda_arch"].value - # the architecture value is only number so append sm_ to the name - cuda_arch = "cc" + cuda_arch_list[0] - args.append("-DTARGET_DEVICE=gpu") - args.append("-DCUDA_ARCH=" + cuda_arch) - elif "cpu_arch" in self.spec.variants: - cpu_arch_list = self.spec.variants["cpu_arch"].value - # the architecture value is only number so append sm_ to the name - cpu_arch = cpu_arch_list[0] - args.append("-DTARGET_DEVICE=multicore") - args.append("-DTARGET_PROCESSOR=" + cpu_arch) - + """ + register_flag_optional(TARGET_DEVICE + "[PGI/NVHPC only] This sets the `-target` flag, possible values are: + gpu - Globally set the target device to an NVIDIA GPU + multicore - Globally set the target device to the host CPU + Refer to `nvc++ --help` for the full list" + register_flag_optional(CUDA_ARCH + "[PGI/NVHPC only] Only applicable if `TARGET_DEVICE` is set to `gpu`. + Nvidia architecture in ccXY format, for example, sm_70 becomes cc70, + will be passed in via `-gpu=` (e.g `cc70`) + Possible values are: + cc35 - Compile for compute capability 3.5 + cc50 - Compile for compute capability 5.0 + cc60 - Compile for compute capability 6.0 + cc62 - Compile for compute capability 6.2 + cc70 - Compile for compute capability 7.0 + cc72 - Compile for compute capability 7.2 + cc75 - Compile for compute capability 7.5 + cc80 - Compile for compute capability 8.0 + ccall - Compile for all supported compute capabilities + Refer to `nvc++ --help` for the full list" + "") + +register_flag_optional(TARGET_PROCESSOR + "[PGI/NVHPC only] This sets the `-tp` (target processor) flag, possible values are: + px - Generic x86 Processor + bulldozer - AMD Bulldozer processor + piledriver - AMD Piledriver processor + zen - AMD Zen architecture (Epyc, Ryzen) + zen2 - AMD Zen 2 architecture (Ryzen 2) + sandybridge - Intel SandyBridge processor + haswell - Intel Haswell processor + knl - Intel Knights Landing processor + skylake - Intel Skylake Xeon processor + host - Link native version of HPC SDK cpu math library + native - Alias for -tp host + Refer to `nvc++ --help` for the full list" + "") + """ + if self.spec.satisfies("+acc~kokkos~raja"): + if (self.spec.compiler.name == "nvhpc") or (self.spec.compiler.name == "pgi"): + target_device = "gpu" if "cuda_arch" in self.spec.variants else "multicore" + if "cuda_arch" in self.spec.variants: + cuda_arch_list = self.spec.variants["cuda_arch"].value + # the architecture value is only number so append cc_ to the name + cuda_arch = "cc" + cuda_arch_list[0] + # args.append( + # "-DCXX_EXTRA_FLAGS=" + "-target=" + target_device + "-gpu=" + cuda_arch + # ) + args.append("-DCUDA_ARCH=" + cuda_arch) + else: + # get the cpu architecture value from user + target_processor = str( + self.spec.target + ) # self.spec.variants["cpu_arch"].value[0] + args.append("-DTARGET_PROCESSOR=" + target_processor) + # args.append( + # "-DCXX_EXTRA_FLAGS=" + # + "-target=" + # + target_device + # + "-tp=" + # + target_processor + # ) + args.append("-DTARGET_DEVICE=" + target_device) # =================================== # STDdata,STDindices,STDranges # =================================== - std_list = ["+stddata", "+stdindices", "+stdranges"] - if spec_string.startswith(tuple(std_list)): - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) + + if "+std" in self.spec: + if self.spec.satisfies("+std_use_tbb"): + args.append("-DCXX_EXTRA_FLAGS=-ltbb") + if self.spec.satisfies("+std_use_onedpl"): + # args.append("-DCXX_EXTRA_FLAGS=-ltbb") + # args.append("-DCXX_EXTRA_FLAGS=-loneDPL") + args.append( + "-DUSE_ONEDPL=" + self.spec.variants["std_onedpl_backend"].value.upper() + ) + if self.spec.variants["std_offload"].value != "none": + # the architecture value is only number so append cc_ to the name + cuda_arch = "cc" + self.spec.variants["cuda_arch"].value[0] + args.append("-DNVHPC_OFFLOAD=" + cuda_arch) # =================================== # CUDA # =================================== - - if ("+cuda" in self.spec) and ("~kokkos" in self.spec) and ("~acc" in self.spec): + if self.spec.satisfies("+cuda~kokkos~acc~omp~thrust~raja"): # Set up the cuda macros needed by the build cuda_arch_list = self.spec.variants["cuda_arch"].value + # "-DCUDA_ARCH" requires sm_ # the architecture value is only number so append sm_ to the name cuda_arch = "sm_" + cuda_arch_list[0] args.append("-DCUDA_ARCH=" + cuda_arch) cuda_dir = self.spec["cuda"].prefix cuda_comp = cuda_dir + "/bin/nvcc" args.append("-DCMAKE_CUDA_COMPILER=" + cuda_comp) - args.append("-DMEM=" + self.spec.variants["mem"].value) - if self.spec.variants["flags"].value != "none": - args.append("-DCUDA_EXTRA_FLAGS=" + self.spec.variants["flags"].value) + args.append("-DMEM=" + self.spec.variants["cuda_memory_mode"].value.upper()) + if self.spec.variants["cuda_extra_flags"].value != "none": + args.append("-DCUDA_EXTRA_FLAGS=" + self.spec.variants["cuda_extra_flags"].value) # =================================== # OMP # =================================== # `~kokkos` option is there to prevent +kokkos +omp setting to use omp directly from here # Same applies for raja - if ("+omp" in self.spec) and ("~kokkos" in self.spec) and ("~raja" in self.spec): - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) - if "cuda_arch" in self.spec.variants: - cuda_arch_list = self.spec.variants["cuda_arch"].value - # the architecture value is only number so append sm_ to the name - cuda_arch = "sm_" + cuda_arch_list[0] - args.append("-DOFFLOAD= " + "NVIDIA:" + cuda_arch) - elif "amdgpu_target" in self.spec.variants: - rocm_arch = self.spec.variants["amdgpu_target"].value - # the architecture value is only number so append sm_ to the name - args.append("-DOFFLOAD=" + " AMD:" + rocm_arch) + if self.spec.satisfies("+omp~kokkos~raja"): + args.append("-DCMAKE_C_COMPILER=" + spack_cc) + if self.spec.satisfies("~omp_offload"): + args.append("-DOFFLOAD=" + "OFF") + # Check if the omp_flags variant is not set to "none" + args.append( + "-DCMAKE_CXX_FLAGS=" + + self.pkg.compiler.openmp_flag + + " " + + ( + self.spec.variants["omp_flags"].value + if self.spec.variants["omp_flags"].value != "none" + else "" + ) + ) else: - args.append("-DOFFLOAD=" + "INTEL") + offload_args = "" + args.append("-DOFFLOAD=ON") + if "cuda_arch" in self.spec.variants: + if self.spec.satisfies("%nvhpc"): + cuda_arch = "cc" + self.spec.variants["cuda_arch"].value[0] + offload_args = " -mp=gpu;" + "-gpu=" + cuda_arch + " " + if self.spec.satisfies("%clang"): + cuda_arch = "sm_" + self.spec.variants["cuda_arch"].value[0] + offload_args = "-fopenmp;--offload-arch=" + cuda_arch + elif ("amdgpu_target" in self.spec.variants) and ( + self.spec.variants["amdgpu_target"].value != "none" + ): + offload_args = ( + ";--offload-arch=" + self.spec.variants["amdgpu_target"].value[0] + ) + + args.append( + "-DOFFLOAD_FLAGS=" + + self.pkg.compiler.openmp_flag + + ";" + + offload_args + + ";" + + self.spec.variants["omp_flags"].value + ) # =================================== - # SYCL + # SYCL # =================================== - if self.spec.satisfies("+sycl"): - args.append("-DSYCL_COMPILER=" + self.spec.variants["implementation"].value.upper()) - if self.spec.variants["implementation"].value.upper() != "ONEAPI-DPCPP": - args.append( - "-DSYCL_COMPILER_DIR=" + self.spec.variants["implementation"].value.upper() - ) - if self.spec.variants["implementation"].value.upper() == "COMPUTE-CPP": - args.append("-DOpenCL_LIBRARY=") + if "+sycl" in self.spec: + if self.spec.satisfies("%oneapi"): + # -fsycl flag is required for setting up sycl/sycl.hpp seems like + # it doesn't get it from the CMake file + args.append("-DSYCL_COMPILER=ONEAPI-ICPX") + args.append("-DCXX_EXTRA_FLAGS= -fsycl") + elif self.spec.satisfies("%clang"): + # this requires the clang inside oneapi installation + args.append("-DSYCL_COMPILER=ONEAPI-Clang") + args.append("-DCXX_EXTRA_FLAGS= -fsycl") + else: + args.append("-DSYCL_COMPILER=HIPSYCL") + args.append("-DSYCL_COMPILER_DIR=" + self.spec.variants["dir"].value) + args.append("-DCXX_EXTRA_FLAGS= -fsycl") # =================================== - # SYCL 2020 + # SYCL 2020 # =================================== - if self.spec.satisfies("+sycl2020"): + if "+sycl2020" in self.spec: if self.spec.satisfies("%oneapi"): # -fsycl flag is required for setting up sycl/sycl.hpp seems like # it doesn't get it from the CMake file - args.append("-DCXX_EXTRA_FLAGS= -fsycl -O3") - # this is required to enable -DCMAKE_CXX_COMPILER=icpx flag from CMake args.append("-DSYCL_COMPILER=ONEAPI-ICPX") + args.append("-DCXX_EXTRA_FLAGS= -fsycl") + elif self.spec.satisfies("%clang"): + # this requires the clang inside oneapi installation + args.append("-DSYCL_COMPILER=ONEAPI-Clang") + args.append("-DCXX_EXTRA_FLAGS= -fsycl") else: + args.append("-DSYCL_COMPILER=HIPSYCL") + args.append("-DSYCL_COMPILER_DIR=" + self.spec.variants["dir"].value) + args.append("-DCXX_EXTRA_FLAGS= -fsycl") + # if self.spec.variants["flags"].value != "none": + if self.spec.variants["sycl2020_offload"].value == "nvidia": + cuda_dir = self.spec["cuda"].prefix + cuda_arch = "sm_" + self.spec.variants["cuda_arch"].value[0] args.append( - "-DSYCL_COMPILER=" + self.spec.variants["implementation"].value.upper() - ) - if self.spec.variants["implementation"].value.upper() != "ONEAPI-DPCPP": - args.append( - "-DSYCL_COMPILER_DIR=" + self.spec.variants["implementation"].value.upper() + "-DCXX_EXTRA_FLAGS=" + + "-fsycl;-fsycl-targets=nvptx64-nvidia-cuda;" + + self.spec.target.optimization_flags( + self.spec.compiler.name, str(self.spec.compiler.version) ) - if self.spec.variants["implementation"].value.upper() == "COMPUTE-CPP": - args.append("-DOpenCL_LIBRARY=") + + " --cuda-path=" + + cuda_dir + ) # =================================== # HIP(ROCM) # =================================== - if self.spec.satisfies("+rocm"): - hip_comp = self.spec["rocm"].prefix + "/bin/hipcc" + if "+hip" in self.spec: + hip_comp = self.spec["hip"].prefix + "/bin/hipcc" + offload_arch = str(self.spec.variants["amdgpu_target"].value[0]) + args.append("-DCMAKE_CXX_COMPILER=" + hip_comp) - args.append( - "-DCXX_EXTRA_FLAGS= --offload-arch=" - + self.spec.variants["amdgpu_target"].value - + " " - + self.spec.variants["flags"].value - + " -O3" - ) + args.append(f"-DCXX_EXTRA_FLAGS=--offload-arch={offload_arch} -O3") + if str(self.spec.variants["hip_mem_mode"].value) != "none": + args.append("-DMEM=" + self.spec.variants["hip_mem_mode"].value.upper()) # =================================== # TBB # =================================== - if self.spec.satisfies("+tbb"): - args.append("-DONE_TBB_DIR=" + self.spec["tbb"].prefix + "/tbb/latest/") - args.append("-DPARTITIONER=" + self.spec.variants["partitioner"].value.upper()) + if "+tbb" in self.spec: + args.append("-DONE_TBB_DIR=" + self.spec["intel-tbb"].prefix + "/tbb/latest/") + args.append("-DCXX_EXTRA_FLAGS=-ltbb") + args.append("-DPARTITIONER=" + self.spec.variants["tbb_partitioner"].value.upper()) + if self.spec.satisfies("+tbb_use_vector"): + args.append("-DUSE_VECTOR=ON") # =================================== # OpenCL (ocl) # =================================== - if self.spec.satisfies("+ocl"): - if "backend" in self.spec.variants: - if "cuda" in self.spec.variants["backend"].value: - cuda_dir = self.spec["cuda"].prefix - args.append("-DOpenCL_LIBRARY=" + cuda_dir + "/lib64/libOpenCL.so") - elif "amd" in self.spec.variants["backend"].value: - rocm_dir = self.spec["rocm-opencl"].prefix - args.append("-DOpenCL_LIBRARY=" + rocm_dir + "/lib64/libOpenCL.so") - elif "intel" in self.spec.variants["backend"].value: - intel_lib = ( - self.spec["intel-oneapi-compilers"].prefix - + "/compiler/2023.0.0/linux/lib/libOpenCL.so" - ) - args.append("-DOpenCL_LIBRARY=" + intel_lib) - elif "pocl" in self.spec.variants["backend"].value: - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) - pocl_lib = self.spec["pocl"].prefix + "/lib64/libOpenCL.so" - args.append("-DOpenCL_LIBRARY=" + pocl_lib) - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) + + if "+ocl" in self.spec: + if "cuda" in self.spec.variants["ocl_backend"].value: + cuda_dir = self.spec["cuda"].prefix + args.append("-DOpenCL_LIBRARY=" + cuda_dir + "/lib64/libOpenCL.so") + elif "amd" in self.spec.variants["ocl_backend"].value: + rocm_dir = self.spec["rocm-opencl"].prefix + args.append("-DOpenCL_LIBRARY=" + rocm_dir + "/lib64/libOpenCL.so") + elif "intel" in self.spec.variants["ocl_backend"].value: + intel_lib = ( + self.spec["intel-oneapi-compilers"].prefix + + "/compiler/" + + str(self.spec["intel-oneapi-compilers"].version) + + "/linux/lib/libOpenCL.so" + ) + args.append("-DOpenCL_LIBRARY=" + intel_lib) + elif "pocl" in self.spec.variants["ocl_backend"].value: + pocl_lib = self.spec["pocl"].prefix + "/lib64/libOpenCL.so" + args.append("-DOpenCL_LIBRARY=" + pocl_lib) # =================================== # RAJA # =================================== - if self.spec.satisfies("+raja"): - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) - args.append("-DRAJA_IN_TREE=" + self.spec.variants["dir"].value) - if "offload" in self.spec.variants: - if "nvidia" in self.spec.variants["offload"].value: - cuda_dir = self.spec["cuda"].prefix - cuda_comp = cuda_dir + "/bin/nvcc" - args.append("-DCMAKE_CUDA_COMPILER=" + cuda_comp) - args.append("-DTARGET=NVIDIA") - cuda_arch_list = self.spec.variants["cuda_arch"].value - cuda_arch = "sm_" + cuda_arch_list[0] - args.append("-DCUDA_ARCH=" + cuda_arch) - args.append("DCUDA_TOOLKIT_ROOT_DIR=" + self.spec["cuda"].prefix) - if self.spec.variants["flags"].value != "none": - args.append("-DCUDA_EXTRA_FLAGS=" + self.spec.variants["flags"].value) - # if("cpu" in self.spec.variants['offload'].value): + if "+raja" in self.spec: + args.append("-DCMAKE_C_COMPILER=" + spack_cc) + args.append("-DRAJA_IN_PACKAGE=" + self.spec["raja"].prefix) + if "nvidia" in self.spec.variants["raja_offload"].value: + cuda_comp = self.spec["cuda"].prefix + "/bin/nvcc" + args.append("-DTARGET=NVIDIA") + cuda_arch = "sm_" + self.spec.variants["cuda_arch"].value[0] + args.append("-DCUDA_ARCH=" + cuda_arch) - if "omp" in self.spec.variants["backend"].value: - args.append("-DENABLE_OPENMP=ON") - if "cuda" in self.spec.variants["backend"].value: args.append("-DENABLE_CUDA=ON") + args.append("-DCUDA_TOOLKIT_ROOT_DIR=" + self.spec["cuda"].prefix) + if self.spec.variants["cuda_extra_flags"].value != "none": + args.append( + "-DCMAKE_CUDA_FLAGS=" + self.spec.variants["cuda_extra_flags"].value + ) # =================================== # THRUST # =================================== - if self.spec.satisfies("+thrust"): - if "cuda" in self.spec.variants["implementation"].value: - args.append("-DTHRUST_IMPL=" + self.spec.variants["implementation"].value.upper()) + + if "+thrust" in self.spec: + if "cuda" in self.spec.variants["thrust_submodel"].value: + args.append("-DTHRUST_IMPL=" + self.spec.variants["thrust_submodel"].value.upper()) + args.append("-SDK_DIR=" + self.spec["thrust"].prefix + "/include") - cuda_arch_list = self.spec.variants["cuda_arch"].value - # the architecture value is only number so append sm_ to the name - cuda_arch = "sm_" + cuda_arch_list[0] - args.append("-DCUDA_ARCH=" + cuda_arch) + # this model uses CMAKE_CUDA_ARCHITECTURES which only requires number of cuda_arch + # no need to append sm_ or cc_ + args.append("-DCUDA_ARCH=" + self.spec.variants["cuda_arch"].value[0]) cuda_dir = self.spec["cuda"].prefix cuda_comp = cuda_dir + "/bin/nvcc" args.append("-DCMAKE_CUDA_COMPILER=" + cuda_comp) - args.append("-DBACKEND=" + self.spec.variants["backend"].value.upper()) - if self.spec.variants["flags"].value != "none": - args.append("-DCUDA_EXTRA_FLAGS=" + self.spec.variants["flags"].value) - - if "rocm" in self.spec.variants["implementation"].value: - args.append("-DTHRUST_IMPL=" + self.spec.variants["implementation"].value.upper()) + # args.append("-DCMAKE_CUDA_COMPILER=" + spack_cxx) + # args.append("-DCMAKE_CUDA_FLAGS=-ccbin " + spack_cc) + args.append("-DBACKEND=" + self.spec.variants["thrust_backend"].value.upper()) + if self.spec.variants["cuda_extra_flags"].value != "none": + args.append( + "-DCUDA_EXTRA_FLAGS=" + self.spec.variants["cuda_extra_flags"].value + ) + if "rocm" in self.spec.variants["thrust_submodel"].value: + args.append("-DCMAKE_CXX_COMPILER=" + self.spec["hip"].hipcc) + args.append("-DTHRUST_IMPL=" + self.spec.variants["thrust_submodel"].value.upper()) args.append("-SDK_DIR=" + self.spec["rocthrust"].prefix) - args.append("-DBACKEND=" + self.spec.variants["backend"].value.upper()) # =================================== # kokkos # =================================== # kokkos implementation is versatile and it could use cuda or omp architectures as backend - # The usage should be spack install babelstream +kokkos +cuda [or +omp] - if self.spec.satisfies("+kokkos"): - args.append("-DCMAKE_CXX_COMPILER=" + self.compiler.cxx) - args.append("-DKOKKOS_IN_TREE=" + self.spec.variants["dir"].value) - # args.append("-DKOKKOS_IN_PACKAGE=" + self.spec["kokkos"].prefix) - if "backend" in self.spec.variants: - if "cuda" in self.spec.variants["backend"].value: - args.append("-DKokkos_ENABLE_CUDA=ON") - cuda_arch_list = self.spec.variants["cuda_arch"].value - cuda_arch = cuda_arch_list[0] - # arhitecture kepler optimisations - if cuda_arch in ("30", "32", "35", "37"): - args.append("-D" + "Kokkos_ARCH_KEPLER" + cuda_arch + "=ON") - # arhitecture maxwell optimisations - if cuda_arch in ("50", "52", "53"): - args.append("-D" + "Kokkos_ARCH_MAXWELL" + cuda_arch + "=ON") - # arhitecture pascal optimisations - if cuda_arch in ("60", "61"): - args.append("-D" + "Kokkos_ARCH_PASCAL" + cuda_arch + "=ON") - # architecture volta optimisations - if cuda_arch in ("70", "72"): - args.append("-D" + "Kokkos_ARCH_VOLTA" + cuda_arch + "=ON") - if cuda_arch == "75": - args.append("-DKokkos_ARCH_TURING75=ON") - if "omp" in self.spec.variants["backend"].value: - args.append("-DKokkos_ENABLE_OPENMP=ON") + + # The usage should be spack install babelstream +kokkos backend=[cuda or omp or none] + if "+kokkos" in self.spec: + args.append("-DCMAKE_C_COMPILER=" + spack_cc) + args.append("-DKOKKOS_IN_PACKAGE=" + self.spec["kokkos"].prefix) + if "cuda" in self.spec.variants["kokkos_backend"].value: + # args.append("-DCMAKE_CXX_COMPILER=" + self.spec["cuda"].nvcc) + args.append("-DCMAKE_CXX_COMPILER=" + spack_cxx) + args.append("-DKokkos_ENABLE_CUDA=ON") + int_cuda_arch = int(self.spec.variants["cuda_arch"].value[0]) + # arhitecture kepler optimisations + if int_cuda_arch in (30, 32, 35, 37): + args.append("-D" + "Kokkos_ARCH_KEPLER" + str(int_cuda_arch) + "=ON") + # arhitecture maxwell optimisations + if int_cuda_arch in (50, 52, 53): + args.append("-D" + "Kokkos_ARCH_MAXWELL" + str(int_cuda_arch) + "=ON") + # arhitecture pascal optimisations + if int_cuda_arch in (60, 61): + args.append("-D" + "Kokkos_ARCH_PASCAL" + str(int_cuda_arch) + "=ON") + # architecture volta optimisations + if int_cuda_arch in (70, 72): + args.append("-D" + "Kokkos_ARCH_VOLTA" + str(int_cuda_arch) + "=ON") + if int_cuda_arch == 75: + args.append("-DKokkos_ARCH_TURING75=ON") + if int_cuda_arch == 80: + args.append("-DKokkos_ARCH_AMPERE80=ON") + if "omp" in self.spec.variants["kokkos_backend"].value: + args.append("-DKokkos_ENABLE_OPENMP=ON") # not in ["kokkos", "raja", "acc", "hip"] then compiler forced true if set(model_list).intersection(["kokkos", "raja", "acc", "hip"]) is True: args.append("-DCMAKE_CXX_COMPILER_FORCED=True") return args + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + build_directory = "src/fortran" + + # Generate Compiler Specific includes + def edit(self, pkg, spec, prefix): + config = { + "FC": pkg.compiler.fc, + "FCFLAGS": "", + "ARCH": spec.target.family, + "DOCONCURRENT_FLAG": "", + "ARRAY_FLAG": "", + "OPENMP_FLAG": "", + "OPENACC_FLAG": "", + "CUDA_FLAG": "", + "SEQUENTIAL_FLAG": "", + } + # Dictionary mapping compiler names to unsupported options + unsupported_options = { + "arm": ["CUDA", "CUDAKernel", "OpenACC", "OpenACCArray"], + "aocc": ["CUDA", "CUDAKernel"], + "cce": ["CUDA", "CUDAKernel"], + "gcc": ["CUDA", "CUDAKernel"], + "nvhpc": ["OpenMPTaskloop"], + "oneapi": ["CUDA", "CUDAKernel", "OpenACC", "OpenACCArray"], + "fj": ["CUDA", "CUDAKernel", "OpenACC"], + } + + # Check if spec.compiler.name is in the unsupported_options dictionary + unsupported_value = self.spec.variants["foption"].value + compiler_name = spec.compiler.name + unsupported = any( + unsupported_value in options + for options in unsupported_options.get(compiler_name, []) + if options == unsupported_value + ) + if unsupported: + raise InstallError( + f"{unsupported_value} is not supported by the {compiler_name} compiler" + ) + # =================================== + # ARM + # =================================== + if spec.compiler.name == "arm": + fortran_flags = ( + "-std=f2018 " + pkg.compiler.opt_flags[4] + " -Wall -Wno-unused-variable" + ) + fortran_flags += self.spec.target.optimization_flags( + self.spec.compiler.name, str(self.spec.compiler.version) + ) + + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["ARRAY_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["OPENMP_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["OPENACC_FLAG"] = "-fopenacc" + + # =================================== + # AMD + # =================================== + if spec.compiler.name == "aocc": + fortran_flags = ( + "-std=f2018 " + pkg.compiler.opt_flags[3] + " -Wall -Wno-unused-variable" + ) + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["ARRAY_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["OPENMP_FLAG"] = pkg.compiler.openmp_flag # libomp.so required + config["OPENACC_FLAG"] = "-fopenacc" + + # =================================== + # CRAY + # =================================== + if spec.compiler.name == "cce": + fortran_flags = "-e F -O3" + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = "-h thread_do_concurrent -DCRAY_THREAD_DOCONCURRENT" + config["ARRAY_FLAG"] = "-h autothread" + config["OPENMP_FLAG"] = pkg.compiler.openmp_flag + config["OPENACC_FLAG"] = "-h acc" # for cpu only -h omp + + # =================================== + # GCC + # =================================== + if spec.compiler.name == "gcc": + fortran_flags = "-std=f2018 -O3 " + fortran_flags += "-Wall -Wno-unused-dummy-argument -Wno-unused-variable " + fortran_flags += self.spec.target.optimization_flags( + self.spec.compiler.name, str(self.spec.compiler.version) + ) + + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = "-ftree-parallelize-loops=4" + config["OPENMP_FLAG"] = pkg.compiler.openmp_flag + config["OPENACC_FLAG"] = "-fopenacc" + + # =================================== + # NVHPC + # =================================== + if spec.compiler.name == "nvhpc": + fortran_flags = pkg.compiler.opt_flags[4] # for -O3 + # FCFLAGS := -O3 -Minform=inform -Minfo=all + fortran_flags += " -Minform=warn " + TARGET = "gpu" # target = "multicore" + config["TARGET"] = TARGET + if "cuda_arch" in self.spec.variants: + cuda_arch_list = self.spec.variants["cuda_arch"].value + # the architecture value is only number so append sm_ to the name + cuda_arch = "cc" + cuda_arch_list[0] + GPUFLAG = " -gpu=" + cuda_arch + fortran_flags += "-tp=" + str(spec.target) + # this is to allow apples-to-apples comparison with DC in non-DC GPU impls + # set exactly one of these pairs! + # MANAGED = "-DUSE_MANAGED -gpu=managed" + # DEVICE="" + # ------------ + DEVICE = "-DUSE_DEVICE -cuda -gpu=nomanaged" + MANAGED = "" + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = GPUFLAG + " -stdpar=" + TARGET + " " + DEVICE + config["ARRAY_FLAG"] = GPUFLAG + " -stdpar=" + TARGET + " " + MANAGED + config["OPENMP_FLAG"] = GPUFLAG + " -mp=" + TARGET + " " + MANAGED + config["OPENACC_FLAG"] = GPUFLAG + " -acc=" + TARGET + " " + MANAGED + config["CUDA_FLAG"] = GPUFLAG + " -cuda -acc=gpu" + " " + MANAGED + + # =================================== + # ONEAPI + # =================================== + if spec.compiler.name == "oneapi": + fortran_flags = "-std18 -Ofast -xHOST -qopt-zmm-usage=low" + if config["FC"] == "ifort": + fortran_flags += "-qopt-streaming-stores=always" + + config["DOCONCURRENT_FLAG"] = "-qopenmp" + ( + "-parallel" if config["FC"] == "ifort" else "" + ) + config["ARRAY_FLAG"] = "-qopenmp" + ("-parallel" if config["FC"] == "ifort" else "") + config["OPENMP_FLAG"] = "-qopenmp" + ( + "-fopenmp-targets=spir64 -DUSE_FLOAT=1" if config["FC"] == "ifx" else "" + ) + config["FCFLAGS"] = fortran_flags + + # =================================== + # FJ + # =================================== + if spec.compiler.name == "fj": + fortran_flags = "-X08 -Kfast -KA64FX -KSVE -KARMV8_3_A -Kzfill=100 " + fortran_flags += "-Kprefetch_sequential=soft " + fortran_flags += "-Kprefetch_line=8 -Kprefetch_line_L2=16 -Koptmsg=2 " + # FJ Fortran system_clock is low resolution + fortran_flags += "-Keval -DUSE_OMP_GET_WTIME=1 " + + config["FCFLAGS"] = fortran_flags + config["DOCONCURRENT_FLAG"] = "-Kparallel,reduction -DNOTSHARED" + config["ARRAY_FLAG"] = "-Kparallel,reduction" + config["OPENMP_FLAG"] = pkg.compiler.openmp_flag + + with open(self.build_directory + "/make.inc." + spec.compiler.name, "w+") as inc: + for key in config: + inc.write("{0} = {1}\n".format(key, config[key])) + + def setup_build_environment(self, env): + ###################################### + # Build and Installation Directories # + ###################################### + + # The environment variable ESMF_DIR must be set to the full pathname + # of the top level ESMF directory before building the framework. + env.set("COMPILER", self.spec.compiler.name) + env.set("IMPLEMENTATION", self.spec.variants["foption"].value) + # DEBUG + # print(self.spec.variants["foption"].value) + # print(self.spec.compiler.version) + # print(platform.machine()) + # This creates a testing tree (if one doesn't already exist) and + # copies the binaries from `src/fortran` to `SpackPackage/bin`. + # This allows you to use the testing tree independently of the + # source tree in the future. + # print(pkg.compiler.cc_pic_flag) + + @property + def build_name(self): + compiler_prefix = self.spec.compiler.name + implementation_prefix = self.spec.variants["foption"].value + return "{}.{}.{}".format("BabelStream", compiler_prefix, implementation_prefix) + + def install(self, pkg, spec, prefix): + mkdir(prefix.bin) + install(self.build_directory + "/" + self.build_name, prefix.bin) + # To check the make.inc file generated + install_tree(self.build_directory, prefix.lib) From 76ed4578e7e2ecb4f87cbf1c9f0967651059dd3a Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Thu, 31 Oct 2024 21:21:25 -0400 Subject: [PATCH 009/208] adios2: add v2.10.2 release and fix build of older versions (#47235) Co-authored-by: Bernhard Kaindl --- .../repos/builtin/packages/adios2/package.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 6083c1ff8aa681..41e80d615d040e 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -26,10 +26,11 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version( - "2.10.1", - sha256="ce776f3a451994f4979c6bd6d946917a749290a37b7433c0254759b02695ad85", + "2.10.2", + sha256="14cf0bcd94772194bce0f2c0e74dba187965d1cffd12d45f801c32929158579e", preferred=True, ) + version("2.10.1", sha256="ce776f3a451994f4979c6bd6d946917a749290a37b7433c0254759b02695ad85") version("2.10.0", sha256="e5984de488bda546553dd2f46f047e539333891e63b9fe73944782ba6c2d95e4") version("2.9.2", sha256="78309297c82a95ee38ed3224c98b93d330128c753a43893f63bbe969320e4979") version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") @@ -39,11 +40,10 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): version("2.8.1", sha256="3f515b442bbd52e3189866b121613fe3b59edb8845692ea86fad83d1eba35d93") version("2.8.0", sha256="5af3d950e616989133955c2430bd09bcf6bad3a04cf62317b401eaf6e7c2d479") version("2.7.1", sha256="c8e237fd51f49d8a62a0660db12b72ea5067512aa7970f3fcf80b70e3f87ca3e") - version("2.7.0", sha256="4b5df1a1f92d7ff380416dec7511cfcfe3dc44da27e486ed63c3e6cffb173924") - version("2.6.0", sha256="45b41889065f8b840725928db092848b8a8b8d1bfae1b92e72f8868d1c76216c") - version("2.5.0", sha256="7c8ff3bf5441dd662806df9650c56a669359cb0185ea232ecb3578de7b065329") - version("2.4.0", sha256="50ecea04b1e41c88835b4b3fd4e7bf0a0a2a3129855c9cc4ba6cf6a1575106e2") - version("2.3.1", sha256="3bf81ccc20a7f2715935349336a76ba4c8402355e1dc3848fcd6f4c3c5931893") + with default_args(deprecated=True): + version("2.7.0", sha256="4b5df1a1f92d7ff380416dec7511cfcfe3dc44da27e486ed63c3e6cffb173924") + version("2.6.0", sha256="45b41889065f8b840725928db092848b8a8b8d1bfae1b92e72f8868d1c76216c") + version("2.5.0", sha256="7c8ff3bf5441dd662806df9650c56a669359cb0185ea232ecb3578de7b065329") depends_on("c", type="build") depends_on("cxx", type="build") @@ -76,7 +76,7 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression") variant("zfp", default=True, description="Enable ZFP compression") variant("png", default=True, when="@2.4:", description="Enable PNG compression") - variant("sz", default=True, description="Enable SZ compression") + variant("sz", default=True, when="@2.6:", description="Enable SZ compression") variant("mgard", default=True, when="@2.8:", description="Enable MGARD compression") # Rransport engines @@ -214,7 +214,7 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): # Fix an unnecessary python dependency when testing is disabled # See https://github.com/ornladios/ADIOS2/pull/2596 - patch("2.7-fix-python-test-deps.patch", when="@2.5.0:2.7.0") + patch("2.7-fix-python-test-deps.patch", when="@2.7.0") # Fix unresolved symbols when built with gcc10. # See https://github.com/ornladios/ADIOS2/pull/2714 @@ -227,7 +227,7 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): # https://github.com/ornladios/adios2/pull/2710 patch( "https://github.com/ornladios/adios2/pull/2710.patch?full_index=1", - when="@:2.7.1", + when="@2.5:2.7.1", sha256="8221073d1b2f8944395a88a5d60a15c7370646b62f5fc6309867bbb6a8c2096c", ) @@ -241,7 +241,7 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): # ROCM: enable support for rocm >= 6 # https://github.com/ornladios/ADIOS2/pull/4214 - patch("2.10-enable-rocm6.patch", when="@2.9.1:") + patch("2.10-enable-rocm6.patch", when="@2.9.1:2.10.1") @when("%fj") def patch(self): From cb92d70d6d4719b1fbbc67b71cce0dd18d908c4e Mon Sep 17 00:00:00 2001 From: Julien Cortial <101571984+jcortial-safran@users.noreply.github.com> Date: Fri, 1 Nov 2024 03:29:54 +0100 Subject: [PATCH 010/208] mmg: add v5.8.0 (#47356) --- var/spack/repos/builtin/packages/mmg/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/mmg/package.py b/var/spack/repos/builtin/packages/mmg/package.py index eb14be0c7761bf..38a8123516b31a 100644 --- a/var/spack/repos/builtin/packages/mmg/package.py +++ b/var/spack/repos/builtin/packages/mmg/package.py @@ -30,8 +30,11 @@ class Mmg(CMakePackage): homepage = "https://www.mmgtools.org/" url = "https://github.com/MmgTools/mmg/archive/v5.3.13.tar.gz" + maintainers("jcortial-safran") + license("LGPL-3.0-or-later") + version("5.8.0", sha256="686eaab84de79c072f3aedf26cd11ced44c84b435d51ce34e016ad203172922f") version("5.7.3", sha256="b0a9c5ad6789df369a68f94295df5b324b6348020b73bcc395d32fdd44abe706") version("5.7.2", sha256="4c396dd44aec69e0a171a04f857e28aad2e0bbfb733b48b6d81a2c6868e86840") version("5.7.1", sha256="27c09477ebc080f54919f76f8533a343936677c81809fe37ce4e2d62fa97237b") From a69af3c71f758df2367dab8da85b6093e97b6c22 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 1 Nov 2024 03:34:09 +0100 Subject: [PATCH 011/208] py-rasterio: add v1.4.2 (#47344) --- var/spack/repos/builtin/packages/py-rasterio/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-rasterio/package.py b/var/spack/repos/builtin/packages/py-rasterio/package.py index 606b983401f950..222f6d5109e49f 100644 --- a/var/spack/repos/builtin/packages/py-rasterio/package.py +++ b/var/spack/repos/builtin/packages/py-rasterio/package.py @@ -22,6 +22,7 @@ class PyRasterio(PythonPackage): version("main", branch="main") version("master", branch="master", deprecated=True) + version("1.4.2", sha256="1be35ccb4d998a4c48fa51bbee9e37927ecd9b9e954a2b2581b8f3e9bb165332") version("1.4.1", sha256="d750362bb792d2311f94803ff309baec48486ecba75c9b905ea9b1f5eb06ef9f") version("1.4.0", sha256="e0d2ff540a4e06016cca2fb46691a10afe71343ea998c50ad8247bb125542133") version("1.3.11", sha256="47aa70b4718ebc80d825bb7db3127577d74e31c53048ce215145c0baf530ece9") @@ -83,3 +84,6 @@ class PyRasterio(PythonPackage): depends_on("gdal@2.4:3.3", when="@1.2.7:1.2") depends_on("gdal@2.3:3.2", when="@1.2.0:1.2.6") depends_on("gdal@1.11:3.2", when="@1.1.0:1.1") + + # https://github.com/rasterio/rasterio/pull/3212 + conflicts("^gdal@3.10:", when="@:1.4.1") From c0c97433002c9baeb16cd78aafa477d65ac10bd6 Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Thu, 31 Oct 2024 23:38:40 -0400 Subject: [PATCH 012/208] py-ase: add v3.23.0 (#47337) --- var/spack/repos/builtin/packages/py-ase/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-ase/package.py b/var/spack/repos/builtin/packages/py-ase/package.py index e949990ea44d1c..fb091295e0d17e 100644 --- a/var/spack/repos/builtin/packages/py-ase/package.py +++ b/var/spack/repos/builtin/packages/py-ase/package.py @@ -16,6 +16,7 @@ class PyAse(PythonPackage): license("LGPL-2.1-or-later") + version("3.23.0", sha256="91a2aa31d89bd90b0efdfe4a7e84264f32828b2abfc9f38e65e041ad76fec8ae") version("3.21.1", sha256="78b01d88529d5f604e76bc64be102d48f058ca50faad72ac740d717545711c7b") version("3.21.0", sha256="2c561e9b767cf16fc8ce198ea9326d77c6b67d33a85f44b68455e23466a64608") version("3.20.1", sha256="72c81f21b6adb907595fce8d883c0231301cbd8e9f6e5ce8e98bab927054daca") @@ -32,8 +33,12 @@ class PyAse(PythonPackage): depends_on("python@2.6:", type=("build", "run"), when="@:3.15.0") depends_on("python@3.5:", type=("build", "run"), when="@3.18.0:") depends_on("python@3.6:", type=("build", "run"), when="@3.20.0:") + depends_on("python@3.8:", type=("build", "run"), when="@3.23.0:") depends_on("py-numpy@1.11.3:", type=("build", "run")) + depends_on("py-numpy@1.18.5:", type=("build", "run"), when="@3.23.0:") depends_on("py-matplotlib@2.0.0:", type=("build", "run")) + depends_on("py-matplotlib@3.3.4:", type=("build", "run"), when="@3.23.0:") depends_on("py-scipy@0.18.1:", type=("build", "run")) + depends_on("py-scipy@1.6.0:", type=("build", "run"), when="@3.23.0:") depends_on("py-flask", type=("build", "run"), when="@:3.18.0") depends_on("py-setuptools", type="build") From 9a25a5821920c75bd4b85eb4332b1e4c4798e267 Mon Sep 17 00:00:00 2001 From: joscot-linaro <126488600+joscot-linaro@users.noreply.github.com> Date: Fri, 1 Nov 2024 03:56:02 +0000 Subject: [PATCH 013/208] linaro-forge: added 24.0.6 version (#47348) --- var/spack/repos/builtin/packages/linaro-forge/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py index a673f275bd92fe..d1f862a5b86a37 100644 --- a/var/spack/repos/builtin/packages/linaro-forge/package.py +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -23,6 +23,9 @@ class LinaroForge(Package): maintainers("kenche-linaro") if platform.machine() == "aarch64": + version( + "24.0.6", sha256="a7f9f71e4352be3680854611fe433a9974fcb8a327ac65ca3bc950c956eac6e4" + ) version( "24.0.5", sha256="fc0c80ce9f66c6966faaca77de0f13e26da564c853e5bfc1e8acd17b65bc2ba0" ) @@ -97,6 +100,9 @@ class LinaroForge(Package): "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" ) elif platform.machine() == "x86_64": + version( + "24.0.6", sha256="eab198b964862b4664359ccbec1edb27c2dd3b9fa82bcb4e14fc616a2b0341da" + ) version( "24.0.5", sha256="da0d4d6fa9120b5e7c4a248795b7f5da32c4987588ecb7406213c8c9846af2bc" ) From b38a29f4df4aef19420238909fe185cf13ea05e4 Mon Sep 17 00:00:00 2001 From: G-Ragghianti <33492707+G-Ragghianti@users.noreply.github.com> Date: Thu, 31 Oct 2024 23:59:30 -0400 Subject: [PATCH 014/208] New versions for slate, lapackpp, and blaspp (#47334) --- var/spack/repos/builtin/packages/blaspp/package.py | 3 +++ var/spack/repos/builtin/packages/lapackpp/package.py | 4 ++++ var/spack/repos/builtin/packages/slate/package.py | 4 ++++ 3 files changed, 11 insertions(+) diff --git a/var/spack/repos/builtin/packages/blaspp/package.py b/var/spack/repos/builtin/packages/blaspp/package.py index e58d274483cc1d..2bdaf62c747a58 100644 --- a/var/spack/repos/builtin/packages/blaspp/package.py +++ b/var/spack/repos/builtin/packages/blaspp/package.py @@ -21,6 +21,9 @@ class Blaspp(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("master", branch="master") + version( + "2024.10.26", sha256="c15ae19dbed1be35e8258048a044d3104da59e7e52b4fe7fe7ea5032708a8d2c" + ) version( "2024.05.31", sha256="24f325d2e1c2cc4275324bd88406555688379480877d19553656a0328287927a" ) diff --git a/var/spack/repos/builtin/packages/lapackpp/package.py b/var/spack/repos/builtin/packages/lapackpp/package.py index db32de97e15430..a97579255586c5 100644 --- a/var/spack/repos/builtin/packages/lapackpp/package.py +++ b/var/spack/repos/builtin/packages/lapackpp/package.py @@ -11,6 +11,7 @@ _versions = [ # LAPACK++, BLAS++ ["master", "master"], + ["2024.10.26", "2024.10.26"], ["2024.05.31", "2024.05.31"], ["2023.11.05", "2023.11.05"], ["2023.08.25", "2023.08.25"], @@ -37,6 +38,9 @@ class Lapackpp(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("master", branch="master") + version( + "2024.10.26", sha256="67f81f585a7ac89b779c79297cab75cc23d2492cb5055c2348381ebdb751821d" + ) version( "2024.05.31", sha256="093646d492a4c2c6b4d7001effb559c80da7fa31fd5ba517a6d686ca8c78cd99" ) diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 9a89c50bcc0ecc..82829b2edcb5f3 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -26,6 +26,9 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("master", branch="master") + version( + "2024.10.29", sha256="e729fad51f44b1340c0f64ac0f862026121183a3c8d731874f0a11a3b5053223" + ) version( "2024.05.31", sha256="9c5d4d6779d8935b6fe41031b46e11ab92102f13c5f684022287c8616661b775" ) @@ -89,6 +92,7 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): for val in ROCmPackage.amdgpu_targets: depends_on("blaspp +rocm amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) depends_on("lapackpp +rocm amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) + depends_on("lapackpp@2024.10.26:", when="@2024.10.29:") depends_on("lapackpp@2024.05.31:", when="@2024.05.31:") depends_on("lapackpp@2023.11.05:", when="@2023.11.05:") depends_on("lapackpp@2023.08.25:", when="@2023.08.25:") From 35aa02771ac8b3f08480ff39f8f9a990cc03b2b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= Date: Fri, 1 Nov 2024 05:26:55 +0100 Subject: [PATCH 015/208] verilator: add 5.028, fix builds when using gcc on newer versions (#47168) --- .../packages/verilator/fix_compile_gch.patch | 13 +++++++++++++ .../repos/builtin/packages/verilator/package.py | 3 +++ 2 files changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/verilator/fix_compile_gch.patch diff --git a/var/spack/repos/builtin/packages/verilator/fix_compile_gch.patch b/var/spack/repos/builtin/packages/verilator/fix_compile_gch.patch new file mode 100644 index 00000000000000..761eeb9fa7b059 --- /dev/null +++ b/var/spack/repos/builtin/packages/verilator/fix_compile_gch.patch @@ -0,0 +1,13 @@ +diff --git a/src/Makefile_obj.in b/src/Makefile_obj.in +index 18947923a..d29baa840 100644 +--- a/src/Makefile_obj.in ++++ b/src/Makefile_obj.in +@@ -363,7 +363,7 @@ $(TGT): $(PREDEP_H) $(OBJS) + .SECONDARY: + + %.gch: % +- $(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH} $< -o $@ ++ $(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH} -c $< -o $@ + %.o: %.cpp + $(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} -c $< -o $@ + %.o: %.c diff --git a/var/spack/repos/builtin/packages/verilator/package.py b/var/spack/repos/builtin/packages/verilator/package.py index 7500e252d93163..0399839d0492c3 100644 --- a/var/spack/repos/builtin/packages/verilator/package.py +++ b/var/spack/repos/builtin/packages/verilator/package.py @@ -42,6 +42,7 @@ class Verilator(AutotoolsPackage): version("master", branch="master") + version("5.028", sha256="02d4b6f34754b46a97cfd70f5fcbc9b730bd1f0a24c3fc37223397778fcb142c") version("5.026", sha256="87fdecf3967007d9ee8c30191ff2476f2a33635d0e0c6e3dbf345cc2f0c50b78") version("5.024", sha256="88b04c953e7165c670d6a700f202cef99c746a0867b4e2efe1d7ea789dee35f3") version("5.022", sha256="3c2f5338f4b6ce7e2f47a142401acdd18cbf4c5da06092618d6d036c0afef12d") @@ -86,6 +87,8 @@ class Verilator(AutotoolsPackage): conflicts("%gcc@:6", msg="C++14 support required") + patch("fix_compile_gch.patch", level=1, when="@5.0.18:") + # we need to fix the CXX and LINK paths, as they point to the spack # wrapper scripts which aren't usable without spack filter_compiler_wrappers("verilated.mk", relative_root="include") From 4a75c3c87a9935b1eff4ff6bc9932b7a2da5fcbf Mon Sep 17 00:00:00 2001 From: Matt Thompson Date: Fri, 1 Nov 2024 01:30:00 -0400 Subject: [PATCH 016/208] mapl: add 2.50.2, 2.47.1 tweaks (#47324) --- var/spack/repos/builtin/packages/mapl/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/mapl/package.py b/var/spack/repos/builtin/packages/mapl/package.py index 9e5a5c5697f4ee..9f31c3be66fbb2 100644 --- a/var/spack/repos/builtin/packages/mapl/package.py +++ b/var/spack/repos/builtin/packages/mapl/package.py @@ -38,11 +38,14 @@ class Mapl(CMakePackage): version("develop", branch="develop") version("main", branch="main") + version("2.50.2", sha256="1c72f8598cf01bab6ef30c1f461444ba5a13f55c61164b7b3c15efb0cd1096c0") version("2.50.1", sha256="26dd7a3ec82d484d60a559bb90a20ad9a2a717af52c25b6a752dd971aeeb5075") version("2.50.0", sha256="12282e547936f667f85c95d466273dcbaccbd600add72fa5981c0c734ccb1f7d") version("2.49.1", sha256="975e349c7ff8be65d4e63f2a6adf74ca96127628505dbce16c7ba7a3901edc70") version("2.49.0", sha256="fdf4d48bd38abd1059180b123c5d9fdc2781992c783244ddc51ab0f2ef63dd67") version("2.48.0", sha256="60a0fc4fd82b1a05050666ae478da7d79d86305aff1643a57bc09cb5347323b7") + version("2.47.1.2", sha256="ae9032b4c833887b9ddc932ea9eb7e59e713829f6c39f3152fee4caf2f3ba21f") + version("2.47.1.1", sha256="9553e91e0325dfe57856564e9970b3871069f902fb109fcced6ad87151f95be7") version("2.47.2", sha256="d4ca384bf249b755454cd486a26bae76944a7cae3a706b9a7c9298825077cac0") version("2.47.1", sha256="ca3e94c0caa78a91591fe63603d1836196f5294d4baad7cf1d83b229b3a85916") version("2.47.0", sha256="66c862d2ab8bcd6969e9728091dbca54f1f420e97e41424c4ba93ef606088459") From 5df7dc88fc6d10e8c560d4d5ae3e98deaf61a90d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 06:23:10 -0500 Subject: [PATCH 017/208] build(deps): bump docutils from 0.20.1 to 0.21.2 in /lib/spack/docs (#45592) Bumps [docutils](https://docutils.sourceforge.io) from 0.20.1 to 0.21.2. --- updated-dependencies: - dependency-name: docutils dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index f9c3ccc1b60de4..530bb17522e00b 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -3,7 +3,7 @@ sphinxcontrib-programoutput==0.17 sphinx_design==0.6.1 sphinx-rtd-theme==3.0.1 python-levenshtein==0.26.0 -docutils==0.20.1 +docutils==0.21.2 pygments==2.18.0 urllib3==2.2.3 pytest==8.3.3 From 492c52089f81654f00b0d52defed71e7b56152b7 Mon Sep 17 00:00:00 2001 From: Kenneth Moreland Date: Fri, 1 Nov 2024 09:18:13 -0400 Subject: [PATCH 018/208] adios2: fix mgard variant (#47223) Co-authored-by: Bernhard Kaindl --- var/spack/repos/builtin/packages/adios2/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 41e80d615d040e..76ca3b0c9f1c5c 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -188,7 +188,8 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): depends_on("libpng@1.6:", when="+png") depends_on("zfp@0.5.1:0.5", when="+zfp") depends_on("sz@2.0.2.0:", when="+sz") - depends_on("mgard", when="+mgard") + depends_on("mgard@2022-11-18:", when="+mgard") + depends_on("mgard@2023-01-10:", when="@2.9: +mgard") extends("python", when="+python") depends_on("python@2.7:2.8,3.5:", when="@:2.4.0 +python", type=("build", "run")) @@ -274,6 +275,7 @@ def cmake_args(self): from_variant("ADIOS2_USE_DataSpaces", "dataspaces"), from_variant("ADIOS2_USE_Fortran", "fortran"), from_variant("ADIOS2_USE_HDF5", "hdf5"), + from_variant("ADIOS2_USE_MGARD", "mgard"), from_variant("ADIOS2_USE_MPI", "mpi"), from_variant("ADIOS2_USE_PNG", "png"), from_variant("ADIOS2_USE_Python", "python"), @@ -292,7 +294,6 @@ def cmake_args(self): self.define("ADIOS2_BUILD_EXAMPLES", False), self.define("ADIOS2_USE_Endian_Reverse", True), self.define("ADIOS2_USE_IME", False), - self.define("ADIOS2_USE_MGARD", False), ] if spec.satisfies("+sst"): From 24d3ed8c1805ccd7553dba2c62984d99dc301f6e Mon Sep 17 00:00:00 2001 From: Stephen Nicholas Swatman Date: Fri, 1 Nov 2024 16:41:34 +0100 Subject: [PATCH 019/208] geant4: make downloading data dependency optional (#47298) * geant4: make downloading data dependency optional This PR makes downloading the data repository of the Geant4 spec optional by adding a sticky, default-enabled variant which controls the dependency on `geant4-data`. This should not change the default behaviour, but should allow users to choose whether or not they want the data directory. * Add comment * Update env variable * Generic docs * Buildable false --- .../repos/builtin/packages/geant4/package.py | 39 ++++++++++++++++++- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index a606c9c7e1cdcc..8dd3f18b5d92b5 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -84,6 +84,40 @@ class Geant4(CMakePackage): variant("timemory", default=False, description="Use TiMemory for profiling", when="@9.5:") variant("vtk", default=False, description="Enable VTK support", when="@11:") + # For most users, obtaining the Geant4 data via Spack will be useful; the + # sticky, default-enabled `+data` variant ensures that this happens. + # Furthermore, if this variant is enabled, Spack will automatically set the + # necessary environment variables to ensure that the Geant4 code runs + # correctly. + # + # However, the Geant4 data is also large and it is, on many machines used + # in HEP, already available via e.g. CVMFS. In these cases, users can save + # network bandwidth by using externally supplied Geant4 data. This can be + # done in two different ways. + # + # The first is to declare the Geant4 data directories as externals. This + # can be done by manually adding them to the `packages.yaml` file, e.g.: + # + # ``` + # g4radioactivedecay: + # externals: + # - spec: g4radioactivedecay@5.6 + # prefix: + # buildable: False + # ``` + # + # Where is a path such that /share/data/ + # exists. + # + # Alternatively, the `~data` variant can be supplied; in this case, Spack + # will not attempt to use the `geant4-data` spec at all. It is then + # essential to set up the `GEANT4_DATA_DIR` environment variable manually + # at runtime; see the Geant4 installation guide for more information: + # https://geant4-userdoc.web.cern.ch/UsersGuides/InstallationGuide/html/postinstall.html + variant( + "data", default=True, sticky=True, description="Enable downloading of the data directory" + ) + depends_on("cmake@3.16:", type="build", when="@11.0.0:") depends_on("cmake@3.8:", type="build", when="@10.6.0:") depends_on("cmake@3.5:", type="build") @@ -109,7 +143,7 @@ class Geant4(CMakePackage): "11.2.2:11.2", "11.3:", ]: - depends_on("geant4-data@" + _vers, type="run", when="@" + _vers) + depends_on("geant4-data@" + _vers, type="run", when="+data @" + _vers) depends_on("expat") depends_on("zlib-api") @@ -301,7 +335,8 @@ def cmake_args(self): # geant4-data's install directory to correctly set up the # Geant4Config.cmake values for Geant4_DATASETS . options.append(self.define("GEANT4_INSTALL_DATA", False)) - options.append(self.define("GEANT4_INSTALL_DATADIR", self.datadir)) + if spec.satisfies("+data"): + options.append(self.define("GEANT4_INSTALL_DATADIR", self.datadir)) # Vecgeom if spec.satisfies("+vecgeom"): From 8f09f523cc12a6c50f8fdc22c490a9a07188bea4 Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Fri, 1 Nov 2024 13:40:10 -0400 Subject: [PATCH 020/208] cp2k: protect 2024.3 against newer libxc (#47363) * cp2k: protect against newer libxc * Compat bound for libxc --- var/spack/repos/builtin/packages/cp2k/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 5d462a906a438c..71d6bab5a2e024 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -201,6 +201,7 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage): depends_on("libxc@5.1.7:5.1", when="@9:2022.2") depends_on("libxc@6.1:", when="@2023.1:") depends_on("libxc@6.2:", when="@2023.2:") + depends_on("libxc@:6", when="@:2024.3") with when("+spla"): depends_on("spla+cuda+fortran", when="+cuda") From 7b2450c22ad4adf4280a8f024ec4778b22d58816 Mon Sep 17 00:00:00 2001 From: "Marc T. Henry de Frahan" Date: Fri, 1 Nov 2024 13:53:59 -0600 Subject: [PATCH 021/208] Add openfast version 3.5.4 (#47369) * Add openfast version 3.5.4 * remove commits --- .../builtin/packages/openfast/package.py | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/openfast/package.py b/var/spack/repos/builtin/packages/openfast/package.py index 8b99fda2c669e3..3d768d3c6fd632 100644 --- a/var/spack/repos/builtin/packages/openfast/package.py +++ b/var/spack/repos/builtin/packages/openfast/package.py @@ -18,22 +18,23 @@ class Openfast(CMakePackage): version("develop", branch="dev") version("master", branch="main") - version("3.5.3", tag="v3.5.3", commit="6a7a543790f3cad4a65b87242a619ac5b34b4c0f") - version("3.4.1", tag="v3.4.1", commit="18704086dad861ab13daf804825da7c4b8d59428") - version("3.4.0", tag="v3.4.0", commit="e8ec53f9c7f9d3f6a13bfb61dba12a0ca04d8a2f") - version("3.3.0", tag="v3.3.0", commit="5f3fb6ef74f48e75ca94000090737a41866fb264") - version("3.2.1", tag="v3.2.1", commit="08fffef240461a8334596179f1de462be43ad3e9") - version("3.2.0", tag="v3.2.0", commit="90a1ffb626baf398d89681b9422bdbfef11cd3ad") - version("3.1.0", tag="v3.1.0", commit="3456a645581456883e44d441eb285ed688e98797") - version("3.0.0", tag="v3.0.0", commit="42a5a8196529ae0349eda6d797a79461c2c03ff0") - version("2.6.0", tag="v2.6.0", commit="bbbb1ca7b28a4ba411613b5c85f5de02f8316754") - version("2.5.0", tag="v2.5.0", commit="718d46f707d78e85edf1b49d3b1a63e8e23e1aae") - version("2.4.0", tag="v2.4.0", commit="ff33ca1cf65f2e13c1de0ab78cc2396ec4a47ce0") - version("2.3.0", tag="v2.3.0", commit="f2419c5d1c23caad9146b95a103d89e9dcaefe30") - version("2.2.0", tag="v2.2.0", commit="e4faf27b774982df274b87c0570e4b58c4a13fe3") - version("2.1.0", tag="v2.1.0", commit="f147b80521eff90c19f065eabeceac13de39ac59") - version("2.0.0", tag="v2.0.0", commit="0769598a17e19b3ccd00a85cde389995f55024a8") - version("1.0.0", tag="v1.0.0", commit="e788b9b18bd5ed96ea59d4bc0812d461bc430cfe") + version("3.5.4", tag="v3.5.4") + version("3.5.3", tag="v3.5.3") + version("3.4.1", tag="v3.4.1") + version("3.4.0", tag="v3.4.0") + version("3.3.0", tag="v3.3.0") + version("3.2.1", tag="v3.2.1") + version("3.2.0", tag="v3.2.0") + version("3.1.0", tag="v3.1.0") + version("3.0.0", tag="v3.0.0") + version("2.6.0", tag="v2.6.0") + version("2.5.0", tag="v2.5.0") + version("2.4.0", tag="v2.4.0") + version("2.3.0", tag="v2.3.0") + version("2.2.0", tag="v2.2.0") + version("2.1.0", tag="v2.1.0") + version("2.0.0", tag="v2.0.0") + version("1.0.0", tag="v1.0.0") depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated From 0cf8cb70f43ae325e8895eb241a92f5aa4680399 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 1 Nov 2024 21:43:16 +0100 Subject: [PATCH 022/208] Fix pickle round-trip of specs propagating variants (#47351) This changes `Spec` serialization to include information about propagation for abstract specs. This was previously not included in the JSON representation for abstract specs, and couldn't be stored. Now, there is a separate `propagate` dictionary alongside the `parameters` dictionary. This isn't beautiful, but when we bump the spec version for Spack `v0.24`, we can clean up this and other aspects of the schema. --- lib/spack/spack/spec.py | 30 ++++++++++++++++++++---------- lib/spack/spack/test/spec_yaml.py | 24 ++++++++++++++++++++++++ lib/spack/spack/variant.py | 10 ++++++---- 3 files changed, 50 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e5b9cad4312764..0ec0a3009d6987 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2199,6 +2199,18 @@ def to_node_dict(self, hash=ht.dag_hash): if params: d["parameters"] = params + if params and not self.concrete: + flag_names = [ + name + for name, flags in self.compiler_flags.items() + if any(x.propagate for x in flags) + ] + d["propagate"] = sorted( + itertools.chain( + [v.name for v in self.variants.values() if v.propagate], flag_names + ) + ) + if self.external: d["external"] = syaml.syaml_dict( [ @@ -2371,16 +2383,10 @@ def node_dict_with_hashes(self, hash=ht.dag_hash): spec is concrete, the full hash is added as well. If 'build' is in the hash_type, the build hash is also added.""" node = self.to_node_dict(hash) + # All specs have at least a DAG hash node[ht.dag_hash.name] = self.dag_hash() - # dag_hash is lazily computed -- but if we write a spec out, we want it - # to be included. This is effectively the last chance we get to compute - # it accurately. - if self.concrete: - # all specs have at least a DAG hash - node[ht.dag_hash.name] = self.dag_hash() - - else: + if not self.concrete: node["concrete"] = False # we can also give them other hash types if we want @@ -4731,13 +4737,17 @@ def from_node_dict(cls, node): else: spec.compiler = None + propagated_names = node.get("propagate", []) for name, values in node.get("parameters", {}).items(): + propagate = name in propagated_names if name in _valid_compiler_flags: spec.compiler_flags[name] = [] for val in values: - spec.compiler_flags.add_flag(name, val, False) + spec.compiler_flags.add_flag(name, val, propagate) else: - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) + spec.variants[name] = vt.MultiValuedVariant.from_node_dict( + name, values, propagate=propagate + ) spec.external_path = None spec.external_modules = None diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 5b64822b38268a..79f128e94833cd 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -16,6 +16,7 @@ import io import json import os +import pickle import pytest import ruamel.yaml @@ -551,3 +552,26 @@ def test_anchorify_2(): e: *id002 """ ) + + +@pytest.mark.parametrize( + "spec_str", + [ + "hdf5 ++mpi", + "hdf5 cflags==-g", + "hdf5 foo==bar", + "hdf5~~mpi++shared", + "hdf5 cflags==-g foo==bar cxxflags==-O3", + "hdf5 cflags=-g foo==bar cxxflags==-O3", + ], +) +def test_pickle_roundtrip_for_abstract_specs(spec_str): + """Tests that abstract specs correctly round trip when pickled. + + This test compares both spec objects and their string representation, due to some + inconsistencies in how `Spec.__eq__` is implemented. + """ + s = spack.spec.Spec(spec_str) + t = pickle.loads(pickle.dumps(s)) + assert s == t + assert str(s) == str(t) diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index 0756841a638135..a15c760a0a2347 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -307,19 +307,21 @@ def __init__(self, name: str, value: Any, propagate: bool = False): self.value = value @staticmethod - def from_node_dict(name: str, value: Union[str, List[str]]) -> "AbstractVariant": + def from_node_dict( + name: str, value: Union[str, List[str]], *, propagate: bool = False + ) -> "AbstractVariant": """Reconstruct a variant from a node dict.""" if isinstance(value, list): # read multi-value variants in and be faithful to the YAML - mvar = MultiValuedVariant(name, ()) + mvar = MultiValuedVariant(name, (), propagate=propagate) mvar._value = tuple(value) mvar._original_value = mvar._value return mvar elif str(value).upper() == "TRUE" or str(value).upper() == "FALSE": - return BoolValuedVariant(name, value) + return BoolValuedVariant(name, value, propagate=propagate) - return SingleValuedVariant(name, value) + return SingleValuedVariant(name, value, propagate=propagate) def yaml_entry(self) -> Tuple[str, SerializedValueType]: """Returns a key, value tuple suitable to be an entry in a yaml dict. From 1462c357619fedf7354bc60f9178b2199258ebd2 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:07:23 -0500 Subject: [PATCH 023/208] Ci generate on change (#47318) * don't concretize in CI if changed packages are not in stacks Signed-off-by: Todd Gamblin * Generate noop job when no specs to rebuild due to untouched pruning * Add test to verify skipping generate creates a noop job * Changed debug for early exit --------- Signed-off-by: Todd Gamblin Co-authored-by: Todd Gamblin --- lib/spack/spack/ci.py | 94 ++++++++++++++++++++++++---------- lib/spack/spack/test/cmd/ci.py | 42 +++++++++++++++ 2 files changed, 109 insertions(+), 27 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 5a8b2ae1e7d49a..8a23bc3ae1357b 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -37,6 +37,7 @@ import spack.error import spack.main import spack.mirror +import spack.package_base import spack.paths import spack.repo import spack.spec @@ -264,14 +265,22 @@ def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions): def get_change_revisions(): """If this is a git repo get the revisions to use when checking for changed packages and spack core modules.""" + rev1 = None + rev2 = None + + # Note: git_dir may be a file in a worktree. If it exists, attempt to use git + # to determine if there is a revision git_dir = os.path.join(spack.paths.prefix, ".git") - if os.path.exists(git_dir) and os.path.isdir(git_dir): - # TODO: This will only find changed packages from the last - # TODO: commit. While this may work for single merge commits - # TODO: when merging the topic branch into the base, it will - # TODO: require more thought outside of that narrow case. - return "HEAD^", "HEAD" - return None, None + if os.path.exists(git_dir): + # The default will only find changed packages from the last + # commit. When the commit is a merge commit, this is will return all of the + # changes on the topic. + # TODO: Handle the case where the clone is not shallow clone of a merge commit + # using `git merge-base` + rev1 = "HEAD^" + rev2 = "HEAD" + + return rev1, rev2 def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): @@ -390,7 +399,7 @@ class SpackCI: used by the CI generator(s). """ - def __init__(self, ci_config, spec_labels, stages): + def __init__(self, ci_config, spec_labels=None, stages=None): """Given the information from the ci section of the config and the staged jobs, set up meta data needed for generating Spack CI IR. @@ -408,8 +417,9 @@ def __init__(self, ci_config, spec_labels, stages): } jobs = self.ir["jobs"] - for spec, dag_hash in _build_jobs(spec_labels, stages): - jobs[dag_hash] = self.__init_job(spec) + if spec_labels and stages: + for spec, dag_hash in _build_jobs(spec_labels, stages): + jobs[dag_hash] = self.__init_job(spec) for name in self.named_jobs: # Skip the special named jobs @@ -705,14 +715,53 @@ def generate_gitlab_ci_yaml( files (spack.yaml, spack.lock), etc should be written. GitLab requires this to be within the project directory. """ + rev1, rev2 = get_change_revisions() + tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}") + + # Get the joined "ci" config with all of the current scopes resolved + ci_config = cfg.get("ci") + spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) + + changed = rev1 and rev2 + affected_pkgs = None + if spack_prune_untouched and changed: + affected_pkgs = compute_affected_packages(rev1, rev2) + tty.debug("affected pkgs:") + if affected_pkgs: + for p in affected_pkgs: + tty.debug(f" {p}") + else: + tty.debug(" no affected packages...") + + possible_builds = spack.package_base.possible_dependencies(*env.user_specs) + changed = any((spec in p for p in possible_builds.values()) for spec in affected_pkgs) + + if not changed: + spack_ci = SpackCI(ci_config) + spack_ci_ir = spack_ci.generate_ir() + + # No jobs should be generated. + noop_job = spack_ci_ir["jobs"]["noop"]["attributes"] + # If this job fails ignore the status and carry on + noop_job["retry"] = 0 + noop_job["allow_failure"] = True + + tty.msg("Skipping concretization, generating no-op job") + output_object = {"no-specs-to-rebuild": noop_job} + + # Ensure the child pipeline always runs + output_object["workflow"] = {"rules": [{"when": "always"}]} + + with open(output_file, "w") as f: + ruamel.yaml.YAML().dump(output_object, f) + + return + with spack.concretize.disable_compiler_existence_check(): with env.write_transaction(): env.concretize() env.write() - # Get the joined "ci" config with all of the current scopes resolved - ci_config = cfg.get("ci") - if not ci_config: raise SpackCIError("Environment does not have a `ci` configuration") @@ -737,20 +786,13 @@ def generate_gitlab_ci_yaml( dependent_depth = None prune_untouched_packages = False - spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true": # Requested to prune untouched packages, but assume we won't do that # unless we're actually in a git repo. - rev1, rev2 = get_change_revisions() - tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}") - if rev1 and rev2: + if changed: # If the stack file itself did not change, proceed with pruning if not get_stack_changed(env.manifest_path, rev1, rev2): prune_untouched_packages = True - affected_pkgs = compute_affected_packages(rev1, rev2) - tty.debug("affected pkgs:") - for p in affected_pkgs: - tty.debug(f" {p}") affected_specs = get_spec_filter_list( env, affected_pkgs, dependent_traverse_depth=dependent_depth ) @@ -1098,11 +1140,6 @@ def main_script_replacements(cmd): # warn only if there was actually a CDash configuration. tty.warn("Unable to populate buildgroup without CDash credentials") - service_job_retries = { - "max": 2, - "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"], - } - if copy_only_pipeline: stage_names.append("copy") sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"]) @@ -1162,7 +1199,10 @@ def main_script_replacements(cmd): ) final_job["when"] = "always" - final_job["retry"] = service_job_retries + final_job["retry"] = { + "max": 2, + "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"], + } final_job["interruptible"] = True final_job["dependencies"] = [] diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 36aa992c639c9c..2f0e053265c2d1 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -1650,3 +1650,45 @@ def fake_dyn_mapping_urlopener(*args, **kwargs): assert job.get("variables", {}).get("MY_VAR") == "hello" assert "ignored_field" not in job assert "unallowed_field" not in job + + +def test_ci_generate_noop_no_concretize( + tmpdir, + working_env, + mutable_mock_env_path, + install_mockery, + mock_packages, + monkeypatch, + ci_base_environment, +): + # Write the enviroment file + filename = str(tmpdir.join("spack.yaml")) + with open(filename, "w") as f: + f.write( + """\ +spack: + specs: + - pkg-a + mirrors: + buildcache-destination: https://my.fake.mirror + ci: + type: gitlab +""" + ) + + def fake_compute_affected(r1=None, r2=None): + return [] + + monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected) + monkeypatch.setenv("SPACK_PRUNE_UNTOUCHED", "TRUE") # enables pruning of untouched specs + + with tmpdir.as_cwd(): + env_cmd("create", "test", "./spack.yaml") + outputfile = str(tmpdir.join(".gitlab-ci.yml")) + + with ev.read("test"): + ci_cmd("generate", "--output-file", outputfile) + + with open(outputfile) as of: + pipeline_doc = syaml.load(of.read()) + assert "no-specs-to-rebuild" in pipeline_doc From e42a4a8bac1f7e79abdd3227a7a743a55044a270 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Fri, 1 Nov 2024 16:49:26 -0700 Subject: [PATCH 024/208] parse_specs: unify specs based on concretizer:unify (#44843) Currently, the `concretizer:unify:` config option only affects environments. With this PR, it now affects any group of specs given to a command using the `parse_specs(*, concretize=True)` interface. - [x] implementation in `parse_specs` - [x] tests - [x] ensure all commands that accept multiple specs and concretize use `parse_specs` interface --------- Co-authored-by: Todd Gamblin Signed-off-by: Todd Gamblin --- lib/spack/spack/cmd/__init__.py | 43 +++++- lib/spack/spack/cmd/clean.py | 3 +- lib/spack/spack/cmd/patch.py | 3 +- lib/spack/spack/cmd/stage.py | 2 +- lib/spack/spack/concretize.py | 140 +++++++++++++++++++ lib/spack/spack/environment/environment.py | 149 +++++---------------- lib/spack/spack/solver/asp.py | 2 + lib/spack/spack/test/cmd/install.py | 2 +- lib/spack/spack/test/concretize.py | 18 +++ 9 files changed, 237 insertions(+), 125 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index c481e931312988..031b29f9528c79 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -173,10 +173,29 @@ def parse_specs( arg_string = " ".join([quote_kvp(arg) for arg in args]) specs = spack.parser.parse(arg_string) - for spec in specs: - if concretize: - spec.concretize(tests=tests) - return specs + if not concretize: + return specs + + to_concretize = [(s, None) for s in specs] + return _concretize_spec_pairs(to_concretize, tests=tests) + + +def _concretize_spec_pairs(to_concretize, tests=False): + """Helper method that concretizes abstract specs from a list of abstract,concrete pairs. + + Any spec with a concrete spec associated with it will concretize to that spec. Any spec + with ``None`` for its concrete spec will be newly concretized. This method respects unification + rules from config.""" + unify = spack.config.get("concretizer:unify", False) + + concretize_method = spack.concretize.concretize_separately # unify: false + if unify is True: + concretize_method = spack.concretize.concretize_together + elif unify == "when_possible": + concretize_method = spack.concretize.concretize_together_when_possible + + concretized = concretize_method(*to_concretize, tests=tests) + return [concrete for _, concrete in concretized] def matching_spec_from_env(spec): @@ -192,6 +211,22 @@ def matching_spec_from_env(spec): return spec.concretized() +def matching_specs_from_env(specs): + """ + Same as ``matching_spec_from_env`` but respects spec unification rules. + + For each spec, if there is a matching spec in the environment it is used. If no + matching spec is found, this will return the given spec but concretized in the + context of the active environment and other given specs, with unification rules applied. + """ + env = ev.active_environment() + spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs] + additional_concrete_specs = ( + [(concrete, concrete) for _, concrete in env.concretized_specs()] if env else [] + ) + return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)] + + def disambiguate_spec(spec, env, local=False, installed=True, first=False): """Given a spec, figure out which installed package it refers to. diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 0b8fb6d6bbf6f2..59d650fd12a7cc 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -105,7 +105,8 @@ def clean(parser, args): # Then do the cleaning falling through the cases if args.specs: specs = spack.cmd.parse_specs(args.specs, concretize=False) - specs = list(spack.cmd.matching_spec_from_env(x) for x in specs) + specs = spack.cmd.matching_specs_from_env(specs) + for spec in specs: msg = "Cleaning build stage [{0}]" tty.msg(msg.format(spec.short_spec)) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index 885ff2f746c352..dbd18f7948a1c4 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -33,8 +33,9 @@ def patch(parser, args): spack.config.set("config:checksum", False, scope="command_line") specs = spack.cmd.parse_specs(args.specs, concretize=False) + specs = spack.cmd.matching_specs_from_env(specs) for spec in specs: - _patch(spack.cmd.matching_spec_from_env(spec).package) + _patch(spec.package) def _patch_env(env: ev.Environment): diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index af5fa412ea7e1a..20da92926f8104 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -47,8 +47,8 @@ def stage(parser, args): if len(specs) > 1 and custom_path: tty.die("`--path` requires a single spec, but multiple were provided") + specs = spack.cmd.matching_specs_from_env(specs) for spec in specs: - spec = spack.cmd.matching_spec_from_env(spec) pkg = spec.package if custom_path: diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 387c7f2de27efd..fabfdbb523a749 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -5,11 +5,17 @@ """ (DEPRECATED) Used to contain the code for the original concretizer """ +import sys +import time from contextlib import contextmanager from itertools import chain +from typing import Tuple + +import llnl.util.tty as tty import spack.config import spack.error +from spack.spec import Spec CHECK_COMPILER_EXISTENCE = True @@ -83,6 +89,140 @@ def concretize_specs_together(*abstract_specs, **kwargs): return [s.copy() for s in result.specs] +def concretize_together(*spec_list, **kwargs): + """Given a number of specs as input, tries to concretize them together. + + Args: + tests (bool or list or set): False to run no tests, True to test + all packages, or a list of package names to run tests for some + *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + already concrete spec or None if not yet concretized + + Returns: + List of tuples of abstract and concretized specs + """ + to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list] + abstract_specs = [abstract for abstract, _ in spec_list] + concrete_specs = concretize_specs_together(*to_concretize, **kwargs) + return list(zip(abstract_specs, concrete_specs)) + + +def concretize_together_when_possible(*spec_list, **kwargs): + """Given a number of specs as input, tries to concretize them together to the extent possible. + + See documentation for ``unify: when_possible`` concretization for the precise definition of + "to the extent possible". + + Args: + tests (bool or list or set): False to run no tests, True to test + all packages, or a list of package names to run tests for some + *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + already concrete spec or None if not yet concretized + + Returns: + List of tuples of abstract and concretized specs + """ + to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list] + old_concrete_to_abstract = { + concrete: abstract for (abstract, concrete) in spec_list if concrete + } + + result_by_user_spec = {} + solver = spack.solver.asp.Solver() + allow_deprecated = spack.config.get("config:deprecated", False) + for result in solver.solve_in_rounds( + to_concretize, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated + ): + result_by_user_spec.update(result.specs_by_input) + + # If the "abstract" spec is a concrete spec from the previous concretization + # translate it back to an abstract spec. Otherwise, keep the abstract spec + return [ + (old_concrete_to_abstract.get(abstract, abstract), concrete) + for abstract, concrete in sorted(result_by_user_spec.items()) + ] + + +def concretize_separately(*spec_list, **kwargs): + """Given a number of specs as input, tries to concretize them together. + + Args: + tests (bool or list or set): False to run no tests, True to test + all packages, or a list of package names to run tests for some + *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + already concrete spec or None if not yet concretized + + Returns: + List of tuples of abstract and concretized specs + """ + tests = kwargs.get("tests", False) + to_concretize = [abstract for abstract, concrete in spec_list if not concrete] + args = [ + (i, str(abstract), tests) + for i, abstract in enumerate(to_concretize) + if not abstract.concrete + ] + ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete] + # Ensure we don't try to bootstrap clingo in parallel + with spack.bootstrap.ensure_bootstrap_configuration(): + spack.bootstrap.ensure_clingo_importable_or_raise() + + # Ensure all the indexes have been built or updated, since + # otherwise the processes in the pool may timeout on waiting + # for a write lock. We do this indirectly by retrieving the + # provider index, which should in turn trigger the update of + # all the indexes if there's any need for that. + _ = spack.repo.PATH.provider_index + + # Ensure we have compilers in compilers.yaml to avoid that + # processes try to write the config file in parallel + _ = spack.compilers.all_compilers_config(spack.config.CONFIG) + + # Early return if there is nothing to do + if len(args) == 0: + # Still have to combine the things that were passed in as abstract with the things + # that were passed in as pairs + return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [ + (abstract, concrete) for abstract, concrete in spec_list if concrete + ] + + # Solve the environment in parallel on Linux + # TODO: support parallel concretization on macOS and Windows + num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True)) + + for j, (i, concrete, duration) in enumerate( + spack.util.parallel.imap_unordered( + spack.concretize._concretize_task, + args, + processes=num_procs, + debug=tty.is_debug(), + maxtaskperchild=1, + ) + ): + ret.append((i, concrete)) + percentage = (j + 1) / len(args) * 100 + tty.verbose( + f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} " + f"{to_concretize[i].colored_str}" + ) + sys.stdout.flush() + + # Add specs in original order + ret.sort(key=lambda x: x[0]) + + return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [ + (abstract, concrete) for abstract, concrete in spec_list if concrete + ] + + +def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: + index, spec_str, tests = packed_arguments + with tty.SuppressOutput(msg_enabled=False): + start = time.time() + spec = Spec(spec_str).concretized(tests=tests) + return index, spec, time.time() - start + + class UnavailableCompilerVersionError(spack.error.SpackError): """Raised when there is no available compiler that satisfies a compiler spec.""" diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 81a2223c4995b8..b61332a0abc92c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -11,12 +11,10 @@ import re import shutil import stat -import sys -import time import urllib.parse import urllib.request import warnings -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional, Tuple, Union import llnl.util.filesystem as fs import llnl.util.tty as tty @@ -57,6 +55,8 @@ from spack.spec_list import SpecList from spack.util.path import substitute_path_variables +SpecPair = Tuple[spack.spec.Spec, spack.spec.Spec] + #: environment variable used to indicate the active environment spack_env_var = "SPACK_ENV" @@ -1510,7 +1510,7 @@ def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True): def _get_specs_to_concretize( self, - ) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]: + ) -> Tuple[List[spack.spec.Spec], List[spack.spec.Spec], List[SpecPair]]: """Compute specs to concretize for unify:true and unify:when_possible. This includes new user specs and any already concretized specs. @@ -1520,19 +1520,17 @@ def _get_specs_to_concretize( """ # Exit early if the set of concretized specs is the set of user specs - new_user_specs = set(self.user_specs) - set(self.concretized_user_specs) - kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs) - kept_user_specs |= set(self.included_user_specs) + new_user_specs = list(set(self.user_specs) - set(self.concretized_user_specs)) + kept_user_specs = list(set(self.user_specs) & set(self.concretized_user_specs)) + kept_user_specs += self.included_user_specs if not new_user_specs: return new_user_specs, kept_user_specs, [] - concrete_specs_to_keep = [ - concrete + specs_to_concretize = [(s, None) for s in new_user_specs] + [ + (abstract, concrete) for abstract, concrete in self.concretized_specs() if abstract in kept_user_specs ] - - specs_to_concretize = list(new_user_specs) + concrete_specs_to_keep return new_user_specs, kept_user_specs, specs_to_concretize def _concretize_together_where_possible( @@ -1546,39 +1544,26 @@ def _concretize_together_where_possible( if not new_user_specs: return [] - old_concrete_to_abstract = { - concrete: abstract for (abstract, concrete) in self.concretized_specs() - } - self.concretized_user_specs = [] self.concretized_order = [] self.specs_by_hash = {} - result_by_user_spec = {} - solver = spack.solver.asp.Solver() - allow_deprecated = spack.config.get("config:deprecated", False) - for result in solver.solve_in_rounds( - specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated - ): - result_by_user_spec.update(result.specs_by_input) - - result = [] - for abstract, concrete in sorted(result_by_user_spec.items()): - # If the "abstract" spec is a concrete spec from the previous concretization - # translate it back to an abstract spec. Otherwise, keep the abstract spec - abstract = old_concrete_to_abstract.get(abstract, abstract) - if abstract in new_user_specs: - result.append((abstract, concrete)) - - # Only add to the environment if it's from this environment (not just included) + ret = [] + result = spack.concretize.concretize_together_when_possible( + *specs_to_concretize, tests=tests + ) + for abstract, concrete in result: + # Only add to the environment if it's from this environment (not included in) if abstract in self.user_specs: self._add_concrete_spec(abstract, concrete) - return result + # Return only the new specs + if abstract in new_user_specs: + ret.append((abstract, concrete)) - def _concretize_together( - self, tests: bool = False - ) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]: + return ret + + def _concretize_together(self, tests: bool = False) -> List[SpecPair]: """Concretization strategy that concretizes all the specs in the same DAG. """ @@ -1592,7 +1577,7 @@ def _concretize_together( self.specs_by_hash = {} try: - concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together( + concretized_specs: List[SpecPair] = spack.concretize.concretize_together( *specs_to_concretize, tests=tests ) except spack.error.UnsatisfiableSpecError as e: @@ -1611,16 +1596,13 @@ def _concretize_together( ) raise - # set() | set() does not preserve ordering, even though sets are ordered - ordered_user_specs = list(new_user_specs) + list(kept_user_specs) - concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)] for abstract, concrete in concretized_specs: # Don't add if it's just included if abstract in self.user_specs: self._add_concrete_spec(abstract, concrete) - # zip truncates the longer list, which is exactly what we want here - return list(zip(new_user_specs, concrete_specs)) + # Return the portion of the return value that is new + return concretized_specs[: len(new_user_specs)] def _concretize_separately(self, tests=False): """Concretization strategy that concretizes separately one @@ -1642,71 +1624,16 @@ def _concretize_separately(self, tests=False): concrete = old_specs_by_hash[h] self._add_concrete_spec(s, concrete, new=False) - # Concretize any new user specs that we haven't concretized yet - args, root_specs, i = [], [], 0 - for uspec in self.user_specs: - if uspec not in old_concretized_user_specs: - root_specs.append(uspec) - args.append((i, str(uspec), tests)) - i += 1 - - # Ensure we don't try to bootstrap clingo in parallel - with spack.bootstrap.ensure_bootstrap_configuration(): - spack.bootstrap.ensure_clingo_importable_or_raise() - - # Ensure all the indexes have been built or updated, since - # otherwise the processes in the pool may timeout on waiting - # for a write lock. We do this indirectly by retrieving the - # provider index, which should in turn trigger the update of - # all the indexes if there's any need for that. - _ = spack.repo.PATH.provider_index - - # Ensure we have compilers in compilers.yaml to avoid that - # processes try to write the config file in parallel - _ = spack.compilers.all_compilers_config(spack.config.CONFIG) - - # Early return if there is nothing to do - if len(args) == 0: - return [] - - # Solve the environment in parallel on Linux - start = time.time() - num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True)) - - # TODO: support parallel concretization on macOS and Windows - msg = "Starting concretization" - if sys.platform not in ("darwin", "win32") and num_procs > 1: - msg += f" pool with {num_procs} processes" - tty.msg(msg) - - batch = [] - for j, (i, concrete, duration) in enumerate( - spack.util.parallel.imap_unordered( - _concretize_task, - args, - processes=num_procs, - debug=tty.is_debug(), - maxtaskperchild=1, - ) - ): - batch.append((i, concrete)) - percentage = (j + 1) / len(args) * 100 - tty.verbose( - f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} " - f"{root_specs[i].colored_str}" - ) - sys.stdout.flush() + to_concretize = [ + (root, None) for root in self.user_specs if root not in old_concretized_user_specs + ] + concretized_specs = spack.concretize.concretize_separately(*to_concretize, tests=tests) - # Add specs in original order - batch.sort(key=lambda x: x[0]) - by_hash = {} # for attaching information on test dependencies - for root, (_, concrete) in zip(root_specs, batch): - self._add_concrete_spec(root, concrete) + by_hash = {} + for abstract, concrete in concretized_specs: + self._add_concrete_spec(abstract, concrete) by_hash[concrete.dag_hash()] = concrete - finish = time.time() - tty.msg(f"Environment concretized in {finish - start:.2f} seconds") - # Unify the specs objects, so we get correct references to all parents self._read_lockfile_dict(self._to_lockfile_dict()) @@ -1726,11 +1653,7 @@ def _concretize_separately(self, tests=False): test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals ) - results = [ - (abstract, self.specs_by_hash[h]) - for abstract, h in zip(self.concretized_user_specs, self.concretized_order) - ] - return results + return concretized_specs @property def default_view(self): @@ -2537,14 +2460,6 @@ def display_specs(specs): print(tree_string) -def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: - index, spec_str, tests = packed_arguments - with tty.SuppressOutput(msg_enabled=False): - start = time.time() - spec = Spec(spec_str).concretized(tests=tests) - return index, spec, time.time() - start - - def make_repo_path(root): """Make a RepoPath from the repo subdirectories in an environment.""" path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 97fbd03e8f1e8c..af2b8a70c3153f 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -515,6 +515,8 @@ def _compute_specs_from_answer_set(self): best = min(self.answers) opt, _, answer = best for input_spec in self.abstract_specs: + # The specs must be unified to get here, so it is safe to associate any satisfying spec + # with the input. Multiple inputs may be matched to the same concrete spec node = SpecBuilder.make_node(pkg=input_spec.name) if input_spec.virtual: providers = [ diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index 13721b2a0d52e6..445f376b1b41e4 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -906,7 +906,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd): specfile = "./spec.json" with open(specfile, "w") as f: f.write(spec.to_json()) - + print(spec.to_json()) install("--log-file=cdash_reports", "--log-format=cdash", specfile) # Verify Configure.xml exists with expected contents. report_dir = tmpdir.join("cdash_reports") diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 553d8fd6426791..4d9940ea9bb815 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -14,6 +14,7 @@ import llnl.util.lang import spack.binary_distribution +import spack.cmd import spack.compiler import spack.compilers import spack.concretize @@ -3106,3 +3107,20 @@ def test_reuse_prefers_standard_over_git_versions( test_spec = spack.spec.Spec("git-ref-package@2").concretized() assert git_spec.dag_hash() != test_spec.dag_hash() assert standard_spec.dag_hash() == test_spec.dag_hash() + + +@pytest.mark.parametrize("unify", [True, "when_possible", False]) +def test_spec_unification(unify, mutable_config, mock_packages): + spack.config.set("concretizer:unify", unify) + a = "pkg-a" + a_restricted = "pkg-a^pkg-b foo=baz" + b = "pkg-b foo=none" + + unrestricted = spack.cmd.parse_specs([a, b], concretize=True) + a_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-a"][0] + b_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-b"][0] + assert (a_concrete_unrestricted["pkg-b"] == b_concrete_unrestricted) == (unify is not False) + + maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext + with maybe_fails(spack.solver.asp.UnsatisfiableSpecError): + _ = spack.cmd.parse_specs([a_restricted, b], concretize=True) From 19e3ab83cfee8fdbd2c19e23c1b93ef83e92fbc4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 23:55:18 +0000 Subject: [PATCH 025/208] build(deps): bump python-levenshtein in /lib/spack/docs (#47372) Bumps [python-levenshtein](https://github.com/rapidfuzz/python-Levenshtein) from 0.26.0 to 0.26.1. - [Release notes](https://github.com/rapidfuzz/python-Levenshtein/releases) - [Changelog](https://github.com/rapidfuzz/python-Levenshtein/blob/main/HISTORY.md) - [Commits](https://github.com/rapidfuzz/python-Levenshtein/compare/v0.26.0...v0.26.1) --- updated-dependencies: - dependency-name: python-levenshtein dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 530bb17522e00b..879fe33874579b 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -2,7 +2,7 @@ sphinx==8.1.3 sphinxcontrib-programoutput==0.17 sphinx_design==0.6.1 sphinx-rtd-theme==3.0.1 -python-levenshtein==0.26.0 +python-levenshtein==0.26.1 docutils==0.21.2 pygments==2.18.0 urllib3==2.2.3 From 133895e7852a03ffeac473d16949c92df90e65ee Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 2 Nov 2024 09:03:42 +0100 Subject: [PATCH 026/208] Rework the schema for reusing environments (#47364) Currently, the schema reads: from: - type: environment: path_or_name but this can't be extended easily to other types, e.g. to buildcaches, without duplicating the extension keys. Use instead: from: - type: environment path: path_or_name --- lib/spack/spack/schema/concretizer.py | 25 ++++++++----------------- lib/spack/spack/solver/asp.py | 4 ++-- lib/spack/spack/test/cmd/env.py | 6 +++--- 3 files changed, 13 insertions(+), 22 deletions(-) diff --git a/lib/spack/spack/schema/concretizer.py b/lib/spack/spack/schema/concretizer.py index b52b305ed9a12d..86e58de2580fe6 100644 --- a/lib/spack/spack/schema/concretizer.py +++ b/lib/spack/spack/schema/concretizer.py @@ -32,24 +32,15 @@ "type": "object", "properties": { "type": { - "oneOf": [ - { - "type": "string", - "enum": [ - "local", - "buildcache", - "environment", - "external", - ], - }, - { - "type": "object", - "properties": { - "environment": {"type": "string"} - }, - }, - ] + "type": "string", + "enum": [ + "local", + "buildcache", + "external", + "environment", + ], }, + "path": {"type": "string"}, "include": LIST_OF_SPECS, "exclude": LIST_OF_SPECS, }, diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index af2b8a70c3153f..ba50ebccd01736 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -4103,8 +4103,8 @@ def __init__(self, configuration: spack.config.Configuration) -> None: for source in reuse_yaml.get("from", default_sources): include = source.get("include", default_include) exclude = source.get("exclude", default_exclude) - if isinstance(source["type"], dict): - env_dir = ev.as_env_dir(source["type"].get("environment")) + if source["type"] == "environment" and "path" in source: + env_dir = ev.as_env_dir(source["path"]) active_env = ev.active_environment() if active_env and env_dir in active_env.included_concrete_envs: # If environment is included as a concrete environment, use the local copy diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index f82cee10d723c3..87941de137ec4a 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -1941,7 +1941,7 @@ def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]: elif reuse_mode == "from_environment": _config = {"concretizer": {"reuse": {"from": [{"type": "environment"}]}}} elif reuse_mode == "from_environment_test1": - _config = {"concretizer": {"reuse": {"from": [{"type": {"environment": "test1"}}]}}} + _config = {"concretizer": {"reuse": {"from": [{"type": "environment", "path": "test1"}]}}} elif reuse_mode == "from_environment_external_test": # Create a new environment called external_test that enables the "debug" # The default is "~debug" @@ -1957,12 +1957,12 @@ def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]: # mpich@3.0 but with include concrete the mpich@1.0 +debug version from the # "external_test" environment will be used. _config = { - "concretizer": {"reuse": {"from": [{"type": {"environment": "external_test"}}]}}, + "concretizer": {"reuse": {"from": [{"type": "environment", "path": "external_test"}]}}, "packages": {"mpich": {"require": ["+debug"]}}, } elif reuse_mode == "from_environment_raise": _config = { - "concretizer": {"reuse": {"from": [{"type": {"environment": "not-a-real-env"}}]}} + "concretizer": {"reuse": {"from": [{"type": "environment", "path": "not-a-real-env"}]}} } # Disable unification in these tests to avoid confusing reuse due to unification using an # include concrete spec vs reuse due to the reuse configuration From b8461f3d2dc72007c63fb24af5694cee8b5791bc Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Sat, 2 Nov 2024 01:36:05 -0700 Subject: [PATCH 027/208] Remove ignored config:install_missing_compilers from unit tests (#47357) --- lib/spack/spack/test/installer.py | 34 +++++++++++++++---------------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 8df6a1a6eaa00d..38bc1609821bc6 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -644,13 +644,12 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch): def test_installer_init_requests(install_mockery): """Test of installer initial requests.""" spec_name = "dependent-install" - with spack.config.override("config:install_missing_compilers", True): - installer = create_installer([spec_name], {}) + installer = create_installer([spec_name], {}) - # There is only one explicit request in this case - assert len(installer.build_requests) == 1 - request = installer.build_requests[0] - assert request.pkg.name == spec_name + # There is only one explicit request in this case + assert len(installer.build_requests) == 1 + request = installer.build_requests[0] + assert request.pkg.name == spec_name @pytest.mark.parametrize("transitive", [True, False]) @@ -743,21 +742,20 @@ def _missing(*args, **kwargs): # Set the configuration to ensure _requeue_with_build_spec_tasks actually # does something. - with spack.config.override("config:install_missing_compilers", True): - installer = create_installer(["depb"], {}) - installer._init_queue() - request = installer.build_requests[0] - task = create_build_task(request.pkg) + installer = create_installer(["depb"], {}) + installer._init_queue() + request = installer.build_requests[0] + task = create_build_task(request.pkg) - # Drop one of the specs so its task is missing before _install_task - popped_task = installer._pop_task() - assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks + # Drop one of the specs so its task is missing before _install_task + popped_task = installer._pop_task() + assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks - monkeypatch.setattr(task, "execute", _missing) - installer._install_task(task, None) + monkeypatch.setattr(task, "execute", _missing) + installer._install_task(task, None) - # Ensure the dropped task/spec was added back by _install_task - assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks + # Ensure the dropped task/spec was added back by _install_task + assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys): From 55918c31d2c1d601be5ad671471a62885c065f09 Mon Sep 17 00:00:00 2001 From: Paul Gessinger Date: Sat, 2 Nov 2024 11:13:37 +0100 Subject: [PATCH 028/208] root: require +opengl when +aqua is on (#47349) According to https://github.com/root-project/root/issues/7160, if `-Dcocoa=ON` build must also be configured with `-Dopengl=ON`, since otherwise the build encounters missing includes. This is/was a silent failure in ROOT CMake, but I believe has been made an explicit failure some time this year. --- var/spack/repos/builtin/packages/root/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 31990569102be7..106bde97757840 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -429,6 +429,8 @@ class Root(CMakePackage): # Incompatible variants if sys.platform == "darwin": conflicts("+opengl", when="~x ~aqua", msg="root+opengl requires X or Aqua") + # https://github.com/root-project/root/issues/7160 + conflicts("+aqua", when="~opengl", msg="+aqua requires OpenGL to be enabled") else: conflicts("+opengl", when="~x", msg="root+opengl requires X") conflicts("+math", when="~gsl", msg="root+math requires GSL") From 632c0095691ffe43f7def4ebe2fddfdddf037945 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sat, 2 Nov 2024 16:18:13 -0700 Subject: [PATCH 029/208] e4s ci stacks: reduce package prefs (#47381) --- .../stacks/e4s-neoverse-v2/spack.yaml | 15 ------------- .../stacks/e4s-neoverse_v1/spack.yaml | 15 ------------- .../stacks/e4s-power/spack.yaml | 6 ------ .../cloud_pipelines/stacks/e4s/spack.yaml | 21 ++++--------------- 4 files changed, 4 insertions(+), 53 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml index 5d5ca5fbd65bed..1eb61ec5e93d23 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml @@ -14,14 +14,10 @@ spack: variants: +mpi binutils: variants: +ld +gold +headers +libiberty ~nls - elfutils: - variants: ~nls hdf5: variants: +fortran +hl +shared libfabric: variants: fabrics=sockets,tcp,udp,rxm - libunwind: - variants: +pic +xz openblas: variants: threads=openmp trilinos: @@ -29,25 +25,14 @@ spack: +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - xz: - variants: +pic mpi: require: mpich mpich: require: '~wrapperrpath ~hwloc %gcc target=neoverse_v2' tbb: require: intel-tbb - boost: - variants: +atomic +chrono +container +date_time +exception +filesystem +graph - +iostreams +locale +log +math +mpi +multithreaded +program_options +random - +regex +serialization +shared +signals +stacktrace +system +test +thread +timer - cxxstd=17 visibility=global - libffi: - require: "@3.4.4 %gcc target=neoverse_v2" vtk-m: require: "+examples %gcc target=neoverse_v2" - cuda: - version: [11.8.0] paraview: require: "+examples %gcc target=neoverse_v2" diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index 948a57f8b9a007..24a488fbe921bf 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -14,14 +14,10 @@ spack: variants: +mpi binutils: variants: +ld +gold +headers +libiberty ~nls - elfutils: - variants: ~nls hdf5: variants: +fortran +hl +shared libfabric: variants: fabrics=sockets,tcp,udp,rxm - libunwind: - variants: +pic +xz openblas: variants: threads=openmp trilinos: @@ -29,27 +25,16 @@ spack: +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - xz: - variants: +pic mpi: require: mpich mpich: require: '~wrapperrpath ~hwloc %gcc target=neoverse_v1' tbb: require: intel-tbb - boost: - variants: +atomic +chrono +container +date_time +exception +filesystem +graph - +iostreams +locale +log +math +mpi +multithreaded +program_options +random - +regex +serialization +shared +signals +stacktrace +system +test +thread +timer - cxxstd=17 visibility=global - libffi: - require: "@3.4.4 %gcc target=neoverse_v1" vtk-m: require: "+examples %gcc target=neoverse_v1" paraview: require: "+examples %gcc target=neoverse_v1" - cuda: - version: [11.8.0] specs: # CPU diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 0453392c1b04f1..a770b0a299a13d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -15,14 +15,10 @@ spack: variants: +mpi cuda_arch=70 binutils: variants: +ld +gold +headers +libiberty ~nls - elfutils: - variants: ~nls hdf5: variants: +fortran +hl +shared libfabric: variants: fabrics=sockets,tcp,udp,rxm - libunwind: - variants: +pic +xz openblas: variants: threads=openmp trilinos: @@ -42,8 +38,6 @@ spack: require: "~tcmalloc %gcc@9.4.0 target=ppc64le" tbb: require: intel-tbb - libffi: - require: "@3.4.4 %gcc@9.4.0 target=ppc64le" vtk-m: require: "+examples %gcc@9.4.0 target=ppc64le" cuda: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index dda9aefb9554e4..0b81e53d568d1d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -14,8 +14,6 @@ spack: variants: +mpi binutils: variants: +ld +gold +headers +libiberty ~nls - elfutils: - variants: ~nls hdf5: variants: +fortran +hl +shared libfabric: @@ -29,30 +27,19 @@ spack: +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - xz: - variants: +pic mpi: require: mpich mpich: - require: '~wrapperrpath ~hwloc' + require: '~wrapperrpath ~hwloc target=x86_64_v3' tbb: require: intel-tbb - boost: - variants: +atomic +chrono +container +date_time +exception +filesystem +graph - +iostreams +locale +log +math +mpi +multithreaded +program_options +random - +regex +serialization +shared +signals +stacktrace +system +test +thread +timer - cxxstd=17 visibility=global - libffi: - require: "@3.4.4" vtk-m: - require: "+examples" + require: "+examples target=x86_64_v3" visit: - require: "~gui" - cuda: - version: [11.8.0] + require: "~gui target=x86_64_v3" paraview: # Don't build GUI support or GLX rendering for HPC/container deployments - require: "@5.11 +examples ~qt ^[virtuals=gl] osmesa" + require: "@5.11 +examples ~qt ^[virtuals=gl] osmesa target=x86_64_v3" specs: # CPU From ff9568fa2f589d8182d65ed9c50116edc9c2b3a4 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sun, 3 Nov 2024 16:32:35 -0600 Subject: [PATCH 030/208] sherpa: add v3.0.1 (#47388) * sherpa: add v3.0.1 * sherpa: no depends_on py-setuptools --- var/spack/repos/builtin/packages/sherpa/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py index f8d0ffa7933196..7d0010a7c2284a 100644 --- a/var/spack/repos/builtin/packages/sherpa/package.py +++ b/var/spack/repos/builtin/packages/sherpa/package.py @@ -22,6 +22,7 @@ class Sherpa(CMakePackage, AutotoolsPackage): license("GPL-3.0-only") + version("3.0.1", sha256="ff5f43e79a9a10919391242307a771eca0c57b0462c11bfb99ee4a0fe8c48c58") version("3.0.0", sha256="e460d8798b323c4ef663293a2c918b1463e9641b35703a54d70d25c852c67d36") version("2.2.15", sha256="0300fd719bf6a089b7dc5441f720e669ac1cb030045d87034a4733bee98e7bbc") version("2.2.14", sha256="f17d88d7f3bc4234a9db3872e8a3c1f3ef99e1e2dc881ada5ddf848715dc82da") From 2664303d7acf831f84566e7275d8d4a233ab5a46 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sun, 3 Nov 2024 16:48:34 -0600 Subject: [PATCH 031/208] pythia8: add v8.312 (#47389) * pythia8: add v8.312 * pythia8: update homepage url --- var/spack/repos/builtin/packages/pythia8/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py index b483dd036258ae..7bf7494b7db79a 100644 --- a/var/spack/repos/builtin/packages/pythia8/package.py +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -12,7 +12,7 @@ class Pythia8(AutotoolsPackage): the evolution from a few-body hard process to a complex multiparticle final state.""" - homepage = "http://home.thep.lu.se/Pythia/" + homepage = "https://pythia.org/" url = "https://pythia.org/download/pythia83/pythia8306.tgz" list_url = "https://pythia.org/releases/" @@ -22,6 +22,7 @@ class Pythia8(AutotoolsPackage): license("GPL-2.0-only") + version("8.312", sha256="bad98e2967b687046c4568c9091d630a0c31b628745c021a994aba4d1d50f8ea") version("8.311", sha256="2782d5e429c1543c67375afe547fd4c4ca0720309deb008f7db78626dc7d1464") version("8.310", sha256="90c811abe7a3d2ffdbf9b4aeab51cf6e0a5a8befb4e3efa806f3d5b9c311e227") version("8.309", sha256="5bdafd9f2c4a1c47fd8a4e82fb9f0d8fcfba4de1003b8e14be4e0347436d6c33") From 395c911689d7a3ee426660f3fab334ccee04da1a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 4 Nov 2024 07:35:16 +0100 Subject: [PATCH 032/208] Specs: propagated variants affect `==` equality (#47376) This PR changes the semantic of == for spec so that: hdf5++mpi == hdf5+mpi won't hold true anymore. It also changes the constrain semantic, so that a non-propagating variant always override a propagating variant. This means: (hdf5++mpi).constrain(hdf5+mpi) -> hdf5+mpi Before we had a very weird semantic, that was supposed to be tested by unit-tests: (libelf++debug).constrain(libelf+debug+foo) -> libelf++debug++foo This semantic has been dropped, as it was never really tested due to the == bug. --- lib/spack/spack/spec.py | 6 ++-- lib/spack/spack/test/spec_semantics.py | 46 +++++++++++++++++++------- lib/spack/spack/variant.py | 5 ++- 3 files changed, 42 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 0ec0a3009d6987..ba3a0f9c379080 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -877,8 +877,9 @@ def constrain(self, other): # Next, if any flags in other propagate, we force them to propagate in our case shared = list(sorted(set(other[flag_type]) - extra_other)) for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])): - if x.propagate: - y.propagate = True + if y.propagate is True and x.propagate is False: + changed = True + y.propagate = False # TODO: what happens if flag groups with a partial (but not complete) # intersection specify different behaviors for flag propagation? @@ -933,6 +934,7 @@ def _cmp_iter(self): def flags(): for flag in v: yield flag + yield flag.propagate yield flags diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index a821c53f2fb934..1b12a8a80315c3 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -231,7 +231,7 @@ class TestSpecSemantics: ("mpich+foo", "mpich foo=True", "mpich+foo"), ("mpich++foo", "mpich foo=True", "mpich+foo"), ("mpich foo=true", "mpich+foo", "mpich+foo"), - ("mpich foo==true", "mpich++foo", "mpich+foo"), + ("mpich foo==true", "mpich++foo", "mpich++foo"), ("mpich~foo", "mpich foo=FALSE", "mpich~foo"), ("mpich~~foo", "mpich foo=FALSE", "mpich~foo"), ("mpich foo=False", "mpich~foo", "mpich~foo"), @@ -271,17 +271,17 @@ class TestSpecSemantics: ("mpich+foo", "mpich", "mpich+foo"), ("mpich~foo", "mpich", "mpich~foo"), ("mpich foo=1", "mpich", "mpich foo=1"), - ("mpich", "mpich++foo", "mpich+foo"), + ("mpich", "mpich++foo", "mpich++foo"), ("libelf+debug", "libelf+foo", "libelf+debug+foo"), ("libelf+debug", "libelf+debug+foo", "libelf+debug+foo"), ("libelf debug=2", "libelf foo=1", "libelf debug=2 foo=1"), ("libelf debug=2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"), ("libelf+debug", "libelf~foo", "libelf+debug~foo"), ("libelf+debug", "libelf+debug~foo", "libelf+debug~foo"), - ("libelf++debug", "libelf+debug+foo", "libelf++debug++foo"), - ("libelf debug==2", "libelf foo=1", "libelf debug==2 foo==1"), - ("libelf debug==2", "libelf debug=2 foo=1", "libelf debug==2 foo==1"), - ("libelf++debug", "libelf++debug~foo", "libelf++debug~~foo"), + ("libelf++debug", "libelf+debug+foo", "libelf+debug+foo"), + ("libelf debug==2", "libelf foo=1", "libelf debug==2 foo=1"), + ("libelf debug==2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"), + ("libelf++debug", "libelf++debug~foo", "libelf++debug~foo"), ("libelf foo=bar,baz", "libelf foo=*", "libelf foo=bar,baz"), ("libelf foo=*", "libelf foo=bar,baz", "libelf foo=bar,baz"), ( @@ -367,19 +367,24 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected): 'mpich cflags="-O3 -g"', 'mpich cflags=="-O3"', 'mpich cflags="-O3 -g"', + 'mpich cflags="-O3 -g"', + [], + [], + ), + ( + 'mpich cflags=="-O3 -g"', + 'mpich cflags=="-O3"', + 'mpich cflags=="-O3 -g"', 'mpich cflags=="-O3 -g"', - [("cflags", "-O3")], - [("cflags", "-O3")], + [("cflags", "-O3"), ("cflags", "-g")], + [("cflags", "-O3"), ("cflags", "-g")], ), ], ) def test_constrain_compiler_flags( self, lhs, rhs, expected_lhs, expected_rhs, propagated_lhs, propagated_rhs ): - """Constraining is asymmetric for compiler flags. Also note that - Spec equality does not account for flag propagation, so the checks - here are manual. - """ + """Constraining is asymmetric for compiler flags.""" lhs, rhs, expected_lhs, expected_rhs = ( Spec(lhs), Spec(rhs), @@ -1904,3 +1909,20 @@ def test_old_format_strings_trigger_error(default_mock_concretization): s = Spec("pkg-a").concretized() with pytest.raises(SpecFormatStringError): s.format("${PACKAGE}-${VERSION}-${HASH}") + + +@pytest.mark.regression("47362") +@pytest.mark.parametrize( + "lhs,rhs", + [ + ("hdf5 +mpi", "hdf5++mpi"), + ("hdf5 cflags==-g", "hdf5 cflags=-g"), + ("hdf5 +mpi ++shared", "hdf5+mpi +shared"), + ("hdf5 +mpi cflags==-g", "hdf5++mpi cflag=-g"), + ], +) +def test_equality_discriminate_on_propagation(lhs, rhs): + """Tests that == can discriminate abstract specs based on their 'propagation' status""" + s, t = Spec(lhs), Spec(rhs) + assert s != t + assert len({s, t}) == 2 diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index a15c760a0a2347..3cc5ba2e0ba25d 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -251,7 +251,7 @@ def implicit_variant_conversion(method): def convert(self, other): # We don't care if types are different as long as I can convert other to type(self) try: - other = type(self)(other.name, other._original_value) + other = type(self)(other.name, other._original_value, propagate=other.propagate) except (error.SpecError, ValueError): return False return method(self, other) @@ -379,6 +379,7 @@ def _value_setter(self, value: ValueType) -> None: def _cmp_iter(self) -> Iterable: yield self.name yield from (str(v) for v in self.value_as_tuple) + yield self.propagate def copy(self) -> "AbstractVariant": """Returns an instance of a variant equivalent to self @@ -453,6 +454,7 @@ def constrain(self, other: "AbstractVariant") -> bool: values.remove("*") self._value_setter(",".join(str(v) for v in values)) + self.propagate = self.propagate and other.propagate return old_value != self.value def __contains__(self, item: Union[str, bool]) -> bool: @@ -557,6 +559,7 @@ def constrain(self, other: "AbstractVariant") -> bool: if self.value != other.value: raise UnsatisfiableVariantSpecError(other.value, self.value) + self.propagate = self.propagate and other.propagate return False def __contains__(self, item: ValueType) -> bool: From 0acd6ae7b2a4f265a12c1fffb62ecc52519a9211 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 09:47:47 +0100 Subject: [PATCH 033/208] lua-luaposix: add missing libxcrypt dependency (#47395) --- var/spack/repos/builtin/packages/lua-luaposix/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/lua-luaposix/package.py b/var/spack/repos/builtin/packages/lua-luaposix/package.py index 04bc97d6e490b1..579b172ce33523 100644 --- a/var/spack/repos/builtin/packages/lua-luaposix/package.py +++ b/var/spack/repos/builtin/packages/lua-luaposix/package.py @@ -23,6 +23,7 @@ class LuaLuaposix(LuaPackage): version("33.4.0", sha256="e66262f5b7fe1c32c65f17a5ef5ffb31c4d1877019b4870a5d373e2ab6526a21") version("33.2.1", sha256="4fb34dfea67f4cf3194cdecc6614c9aea67edc3c4093d34137669ea869c358e1") - depends_on("c", type="build") # generated + depends_on("c", type="build") + depends_on("libxcrypt", when="platform=linux") depends_on("lua-bit32", when="^lua-lang@5.1") From 87329639f29182d35fb8a45b1aa5e1366cedbf47 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 02:50:41 -0600 Subject: [PATCH 034/208] elasticsearch, kibana, logstash: add v8.15.2 (#46873) --- .../builtin/packages/elasticsearch/package.py | 17 ++++++++++++---- .../repos/builtin/packages/kibana/package.py | 7 ++++--- .../builtin/packages/logstash/package.py | 20 +++++++++++++++---- 3 files changed, 33 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/elasticsearch/package.py b/var/spack/repos/builtin/packages/elasticsearch/package.py index b17ec0668613b4..200a8360539541 100644 --- a/var/spack/repos/builtin/packages/elasticsearch/package.py +++ b/var/spack/repos/builtin/packages/elasticsearch/package.py @@ -13,14 +13,23 @@ class Elasticsearch(Package): """ homepage = "https://www.elastic.co/" - url = "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.2.4.tar.gz" + url = "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.15.2-linux-x86_64.tar.gz" - version("6.4.0", sha256="e9786efb5cecd12adee2807c7640ba9a1ab3b484d2e87497bb8d0b6df0e24f01") - version("6.3.0", sha256="0464127140820d82b24bd2830232131ea85bcd49267a8bc7365e4fa391dee2a3") - version("6.2.4", sha256="91e6f1ea1e1dd39011e7a703d2751ca46ee374665b08b0bfe17e0c0c27000e8e") + version("8.15.2", sha256="0b6905ede457be9d1d73d0b6be1c3a7c7c6220829846b532f2604ad30ba7308f") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2018-3831 + version("6.4.0", sha256="e9786efb5cecd12adee2807c7640ba9a1ab3b484d2e87497bb8d0b6df0e24f01") + version("6.3.0", sha256="0464127140820d82b24bd2830232131ea85bcd49267a8bc7365e4fa391dee2a3") + version("6.2.4", sha256="91e6f1ea1e1dd39011e7a703d2751ca46ee374665b08b0bfe17e0c0c27000e8e") depends_on("java", type="run") + def url_for_version(self, version): + if self.spec.satisfies("@:6"): + return f"https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}.tar.gz" + else: + return f"https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz" + def install(self, spec, prefix): dirs = ["bin", "config", "lib", "modules", "plugins"] diff --git a/var/spack/repos/builtin/packages/kibana/package.py b/var/spack/repos/builtin/packages/kibana/package.py index bbf8146ec2fb01..0245929e6b2cbf 100644 --- a/var/spack/repos/builtin/packages/kibana/package.py +++ b/var/spack/repos/builtin/packages/kibana/package.py @@ -13,9 +13,10 @@ class Kibana(Package): homepage = "https://www.elastic.co/products/kibana" url = "https://artifacts.elastic.co/downloads/kibana/kibana-6.4.0-linux-x86_64.tar.gz" - version("6.4.0", sha256="df2056105a08c206a1adf9caed09a152a53429a0f1efc1ba3ccd616092d78aee") - - depends_on("cxx", type="build") # generated + version("8.15.2", sha256="b1f8082a4200867078170e92ad299e293ee514f5fdbb96b7a0d1de17a880d1eb") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2019-7609 + version("6.4.0", sha256="df2056105a08c206a1adf9caed09a152a53429a0f1efc1ba3ccd616092d78aee") depends_on("java", type="run") diff --git a/var/spack/repos/builtin/packages/logstash/package.py b/var/spack/repos/builtin/packages/logstash/package.py index 5f6318faa03bef..4ad690c764d89f 100644 --- a/var/spack/repos/builtin/packages/logstash/package.py +++ b/var/spack/repos/builtin/packages/logstash/package.py @@ -15,12 +15,24 @@ class Logstash(Package): """ homepage = "https://artifacts.elastic.co" - url = "https://artifacts.elastic.co/downloads/logstash/logstash-6.6.0.tar.gz" + url = "https://artifacts.elastic.co/downloads/logstash/logstash-8.15.2-linux-x86_64.tar.gz" - version("6.6.0", sha256="5a9a8b9942631e9d4c3dfb8d47075276e8c2cff343841145550cc0c1cfe7bba7") + version("8.15.2", sha256="fc75c8cad1016b07f7aeeeeb7ea23f4195ab1beee2ced282f11ff6d0e84f7e51") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2019-7612 + version("6.6.0", sha256="5a9a8b9942631e9d4c3dfb8d47075276e8c2cff343841145550cc0c1cfe7bba7") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("java@11:") + + def url_for_version(self, version): + if self.spec.satisfies("@:6"): + return f"https://artifacts.elastic.co/downloads/logstash/logstash-{version}.tar.gz" + else: + return f"https://artifacts.elastic.co/downloads/logstash/logstash-{version}-linux-x86_64.tar.gz" def install(self, spec, prefix): install_tree(".", prefix) + + def setup_run_environment(self, env): + # do not use the bundled jdk + env.set("LS_JAVA_HOME", self.spec["java"].home) From 86ebcabd46210f2db297e05645843dfbd5405ea0 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 02:55:33 -0600 Subject: [PATCH 035/208] cups: add v2.4.11 (#47390) --- var/spack/repos/builtin/packages/cups/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cups/package.py b/var/spack/repos/builtin/packages/cups/package.py index b2c5b84ecfba5d..34c5fdd80bdd5a 100644 --- a/var/spack/repos/builtin/packages/cups/package.py +++ b/var/spack/repos/builtin/packages/cups/package.py @@ -20,9 +20,12 @@ class Cups(AutotoolsPackage): license("Apache-2.0", checked_by="wdconinc") + version("2.4.11", sha256="9a88fe1da3a29a917c3fc67ce6eb3178399d68e1a548c6d86c70d9b13651fd71") version("2.4.10", sha256="d75757c2bc0f7a28b02ee4d52ca9e4b1aa1ba2affe16b985854f5336940e5ad7") - version("2.3.3", sha256="261fd948bce8647b6d5cb2a1784f0c24cc52b5c4e827b71d726020bcc502f3ee") - version("2.2.3", sha256="66701fe15838f2c892052c913bde1ba106bbee2e0a953c955a62ecacce76885f") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2023-4504 + version("2.3.3", sha256="261fd948bce8647b6d5cb2a1784f0c24cc52b5c4e827b71d726020bcc502f3ee") + version("2.2.3", sha256="66701fe15838f2c892052c913bde1ba106bbee2e0a953c955a62ecacce76885f") depends_on("c", type="build") depends_on("cxx", type="build") From 8296aaf175bc9a6401d9eb9546c8c4e0471d82c2 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 02:57:40 -0600 Subject: [PATCH 036/208] minizip: add v1.3.1 (#47379) --- .../repos/builtin/packages/minizip/package.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/minizip/package.py b/var/spack/repos/builtin/packages/minizip/package.py index 55aa8c2cea1925..af3ac94d21cfe2 100644 --- a/var/spack/repos/builtin/packages/minizip/package.py +++ b/var/spack/repos/builtin/packages/minizip/package.py @@ -14,10 +14,14 @@ class Minizip(AutotoolsPackage): license("Zlib") - version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1") + version("1.3.1", sha256="9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2022-37434 + version( + "1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1" + ) - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") configure_directory = "contrib/minizip" @@ -28,8 +32,9 @@ class Minizip(AutotoolsPackage): depends_on("zlib-api") # error: implicit declaration of function 'mkdir' is invalid in C99 - patch("implicit.patch", when="%apple-clang@12:") - patch("implicit.patch", when="%gcc@7.3.0:") + with when("@:1.2.11"): + patch("implicit.patch", when="%apple-clang@12:") + patch("implicit.patch", when="%gcc@7.3.0:") # statically link to libz.a # https://github.com/Homebrew/homebrew-core/blob/master/Formula/minizip.rb From d63f06e4b7e991703064cbd02748f16e3c7289f3 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Mon, 4 Nov 2024 03:58:40 -0500 Subject: [PATCH 037/208] pumi: add version 2.2.9 (#47380) --- var/spack/repos/builtin/packages/pumi/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/pumi/package.py b/var/spack/repos/builtin/packages/pumi/package.py index a2ce76b27ce621..76bb0a7618037b 100644 --- a/var/spack/repos/builtin/packages/pumi/package.py +++ b/var/spack/repos/builtin/packages/pumi/package.py @@ -30,6 +30,9 @@ class Pumi(CMakePackage): # scorec/core develop branch and we prefer not to expose spack users # to the added instability. version("master", submodules=True, branch="master") + version( + "2.2.9", submodules=True, commit="f87525cae7597322edfb2ccf1c7d4437402d9481" + ) # tag 2.2.9 version( "2.2.8", submodules=True, commit="736bb87ccd8db51fc499a1b91e53717a88841b1f" ) # tag 2.2.8 From f05033b0d2fdec1a3d91fb73e779862d65e6ac55 Mon Sep 17 00:00:00 2001 From: Christophe Prud'homme Date: Mon, 4 Nov 2024 10:00:26 +0100 Subject: [PATCH 038/208] cpr: add +pic and +shared variants (#47281) --- var/spack/repos/builtin/packages/cpr/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py index 07a38e0015d498..0f6daf03e42f61 100644 --- a/var/spack/repos/builtin/packages/cpr/package.py +++ b/var/spack/repos/builtin/packages/cpr/package.py @@ -20,6 +20,9 @@ class Cpr(CMakePackage): version("1.10.4", sha256="88462d059cd3df22c4d39ae04483ed50dfd2c808b3effddb65ac3b9aa60b542d") version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31") + variant("pic", default=True, description="Position independent code") + variant("shared", default=True, description="Build shared library") + depends_on("cxx", type="build") depends_on("curl") @@ -32,4 +35,6 @@ def cmake_args(self): self.define("CPR_USE_SYSTEM_GTEST", True), self.define(f"CPR{_force}_USE_SYSTEM_CURL", True), self.define("CPR_ENABLE_SSL", True), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), ] From 417c48b07a9d3de5af55c7f4ce5c21588199de3e Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 03:01:36 -0600 Subject: [PATCH 039/208] py-flask-cors: add v4.0.0 (#47374) --- var/spack/repos/builtin/packages/py-flask-cors/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-flask-cors/package.py b/var/spack/repos/builtin/packages/py-flask-cors/package.py index 6eac377c7b8dbf..4f708db2764b0b 100644 --- a/var/spack/repos/builtin/packages/py-flask-cors/package.py +++ b/var/spack/repos/builtin/packages/py-flask-cors/package.py @@ -16,6 +16,7 @@ class PyFlaskCors(PythonPackage): license("MIT") + version("4.0.0", sha256="f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0") version("3.0.10", sha256="b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de") depends_on("py-setuptools", type="build") From 2fc056e27cb6e362b56946907c48feda61766319 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 03:02:15 -0600 Subject: [PATCH 040/208] py-flask-compress: add v1.14 (#47373) --- var/spack/repos/builtin/packages/py-flask-compress/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-flask-compress/package.py b/var/spack/repos/builtin/packages/py-flask-compress/package.py index 2b037a3da45857..fa5395f9a36ba4 100644 --- a/var/spack/repos/builtin/packages/py-flask-compress/package.py +++ b/var/spack/repos/builtin/packages/py-flask-compress/package.py @@ -15,7 +15,11 @@ class PyFlaskCompress(PythonPackage): license("MIT") + version("1.14", sha256="e46528f37b91857012be38e24e65db1a248662c3dc32ee7808b5986bf1d123ee") version("1.4.0", sha256="468693f4ddd11ac6a41bca4eb5f94b071b763256d54136f77957cfee635badb3") depends_on("py-setuptools", type="build") + depends_on("py-setuptools@0.42:", type="build", when="@1.10:") + depends_on("py-setuptools-scm@3.4: +toml", type="build", when="@1.10:") depends_on("py-flask@0.9:", type=("build", "run")) + depends_on("py-brotli", type="run", when="@1.5:") From fcdaccfeb694653632f2a0f783e37f97f24fb61e Mon Sep 17 00:00:00 2001 From: Weiqun Zhang Date: Mon, 4 Nov 2024 01:06:49 -0800 Subject: [PATCH 041/208] amrex: add v24.11 (#47371) --- var/spack/repos/builtin/packages/amrex/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 724b5130c19169..07c4d2ae66c8f5 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -26,6 +26,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("develop", branch="development") + version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6") version("24.10", sha256="a2d15e417bd7c41963749338e884d939c80c5f2fcae3279fe3f1b463e3e4208a") version("24.09", sha256="a1435d16532d04a1facce9a9ae35d68a57f7cd21a5f22a6590bde3c265ea1449") version("24.08", sha256="e09623e715887a19a1f86ed6fdb8335022fd6c03f19372d8f13b55cdeeadf5de") From 4c247e206c31490efc14127e1fa237d124e57f7b Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Mon, 4 Nov 2024 04:18:24 -0500 Subject: [PATCH 042/208] llvm: add v19.1.3 (#47325) --- var/spack/repos/builtin/packages/llvm/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 98a27080b3f9af..208d73f0940533 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -56,6 +56,7 @@ class Llvm(CMakePackage, CudaPackage, LlvmDetection, CompilerPackage): license("Apache-2.0") version("main", branch="main") + version("19.1.3", sha256="e5106e2bef341b3f5e41340e4b6c6a58259f4021ad801acf14e88f1a84567b05") version("19.1.2", sha256="622cb6c5e95a3bb7e9876c4696a65671f235bd836cfd0c096b272f6c2ada41e7") version("19.1.1", sha256="115dfd98a353d05bffdab3f80db22f159da48aca0124e8c416f437adcd54b77f") version("19.1.0", sha256="0a08341036ca99a106786f50f9c5cb3fbe458b3b74cab6089fd368d0edb2edfe") From a93bd6cee49a77ba7fb45d41043d6b8ca6fe7d2e Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Mon, 4 Nov 2024 03:21:17 -0600 Subject: [PATCH 043/208] hdf5: add develop-2.0 (#47299) Update HDF5 version for develop branch to develop-2.0 to match the new version in the develop branch. Remove develop-1.16 as it has been decided to make next release HDF5 2.0.0. --- var/spack/repos/builtin/packages/hdf5/package.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 01a9992f3c2934..12e99d9bf61dbd 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -38,8 +38,7 @@ class Hdf5(CMakePackage): # The 'develop' version is renamed so that we could uninstall (or patch) it # without affecting other develop version. - version("develop-1.17", branch="develop") - version("develop-1.16", branch="hdf5_1_16") + version("develop-2.0", branch="develop") version("develop-1.14", branch="hdf5_1_14") version("develop-1.12", branch="hdf5_1_12") version("develop-1.10", branch="hdf5_1_10") From 9a94ea7dfeda457b4712be71a645140c7488d869 Mon Sep 17 00:00:00 2001 From: Brian Spilner Date: Mon, 4 Nov 2024 10:26:28 +0100 Subject: [PATCH 044/208] icon: add a maintainer (#47323) --- var/spack/repos/builtin/packages/icon/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/icon/package.py b/var/spack/repos/builtin/packages/icon/package.py index 229e3942866839..e75d8d6b05d028 100644 --- a/var/spack/repos/builtin/packages/icon/package.py +++ b/var/spack/repos/builtin/packages/icon/package.py @@ -16,7 +16,7 @@ class Icon(AutotoolsPackage): homepage = "https://www.icon-model.org" url = "https://gitlab.dkrz.de/icon/icon-model/-/archive/icon-2024.01-public/icon-model-icon-2024.01-public.tar.gz" - maintainers("skosukhin") + maintainers("skosukhin", "Try2Code") license("BSD-3-Clause", checked_by="skosukhin") From 18936771ffad57749d012a6c55e6d9c560d40069 Mon Sep 17 00:00:00 2001 From: Andrey Prokopenko Date: Mon, 4 Nov 2024 04:29:32 -0500 Subject: [PATCH 045/208] arborx: remove Trilinos dependency for @1.6: (#47305) --- .../repos/builtin/packages/arborx/package.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/arborx/package.py b/var/spack/repos/builtin/packages/arborx/package.py index b5fc91a175d542..da7b9e857c6b9a 100644 --- a/var/spack/repos/builtin/packages/arborx/package.py +++ b/var/spack/repos/builtin/packages/arborx/package.py @@ -63,7 +63,7 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): for backend in kokkos_backends: deflt, descr = kokkos_backends[backend] variant(backend.lower(), default=deflt, description=descr) - variant("trilinos", default=False, description="use Kokkos from Trilinos") + variant("trilinos", default=False, when="@:1.5", description="use Kokkos from Trilinos") depends_on("cmake@3.12:", type="build") depends_on("cmake@3.16:", type="build", when="@1.0:") @@ -77,8 +77,8 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): depends_on("kokkos@3.6.00:", when="@1.3~trilinos") depends_on("kokkos@3.7.01:", when="@1.4:1.4.1~trilinos") depends_on("kokkos@4.0.00:", when="@1.5~trilinos") - depends_on("kokkos@4.1.00:", when="@1.6~trilinos") - depends_on("kokkos@4.2.00:", when="@1.7:~trilinos") + depends_on("kokkos@4.1.00:", when="@1.6") + depends_on("kokkos@4.2.00:", when="@1.7:") for backend in kokkos_backends: depends_on("kokkos+%s" % backend.lower(), when="~trilinos+%s" % backend.lower()) @@ -96,8 +96,9 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): conflicts("^kokkos", when="+trilinos") depends_on("kokkos+cuda_lambda", when="~trilinos+cuda") - # Trilinos/Kokkos + # Trilinos with internal Kokkos # Notes: + # - starting with Trilinos 14.4, Trilinos' spack package uses external Kokkos # - current version of Trilinos package does not allow disabling Serial # - current version of Trilinos package does not allow enabling CUDA depends_on("trilinos+kokkos", when="+trilinos") @@ -106,18 +107,16 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): depends_on("trilinos@13.4.0:", when="@1.3+trilinos") depends_on("trilinos@14.0.0:", when="@1.4:1.4.1+trilinos") depends_on("trilinos@14.2.0:", when="@1.5+trilinos") - depends_on("trilinos@14.4.0:", when="@1.6+trilinos") - depends_on("trilinos@15.1.0:", when="@1.7:+trilinos") patch("trilinos14.0-kokkos-major-version.patch", when="@1.4+trilinos ^trilinos@14.0.0") conflicts("~serial", when="+trilinos") def cmake_args(self): spec = self.spec - if "~trilinos" in spec: - kokkos_spec = spec["kokkos"] - else: + if "+trilinos" in spec: kokkos_spec = spec["trilinos"] + else: + kokkos_spec = spec["kokkos"] options = [ f"-DKokkos_ROOT={kokkos_spec.prefix}", From b1fd6dbb6d8c73891bc463ca6599e2bf96f84011 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 03:32:34 -0600 Subject: [PATCH 046/208] libxml2: add v2.11.9, v2.12.9, v2.13.4 (#47297) Co-authored-by: wdconinc --- .../repos/builtin/packages/libxml2/package.py | 47 ++++++++++++++----- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index c799095ab659fc..048debd6589a8d 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -30,19 +30,40 @@ def url_for_version(self, version): license("MIT") - version("2.10.3", sha256="5d2cc3d78bec3dbe212a9d7fa629ada25a7da928af432c93060ff5c17ee28a9c") - version("2.10.2", sha256="d240abe6da9c65cb1900dd9bf3a3501ccf88b3c2a1cb98317d03f272dda5b265") - version("2.10.1", sha256="21a9e13cc7c4717a6c36268d0924f92c3f67a1ece6b7ff9d588958a6db9fb9d8") - version("2.9.14", sha256="60d74a257d1ccec0475e749cba2f21559e48139efba6ff28224357c7c798dfee") - version("2.9.13", sha256="276130602d12fe484ecc03447ee5e759d0465558fbc9d6bd144e3745306ebf0e") - version("2.9.12", sha256="c8d6681e38c56f172892c85ddc0852e1fd4b53b4209e7f4ebf17f7e2eae71d92") - version("2.9.11", sha256="886f696d5d5b45d780b2880645edf9e0c62a4fd6841b853e824ada4e02b4d331") - version("2.9.10", sha256="aafee193ffb8fe0c82d4afef6ef91972cbaf5feea100edc2f262750611b4be1f") - version("2.9.9", sha256="94fb70890143e3c6549f265cee93ec064c80a84c42ad0f23e85ee1fd6540a871") - version("2.9.8", sha256="0b74e51595654f958148759cfef0993114ddccccbb6f31aee018f3558e8e2732") - version("2.9.4", sha256="ffb911191e509b966deb55de705387f14156e1a56b21824357cdf0053233633c") - version("2.9.2", sha256="5178c30b151d044aefb1b08bf54c3003a0ac55c59c866763997529d60770d5bc") - version("2.7.8", sha256="cda23bc9ebd26474ca8f3d67e7d1c4a1f1e7106364b690d822e009fdc3c417ec") + version("2.13.4", sha256="65d042e1c8010243e617efb02afda20b85c2160acdbfbcb5b26b80cec6515650") + version("2.12.9", sha256="59912db536ab56a3996489ea0299768c7bcffe57169f0235e7f962a91f483590") + version("2.11.9", sha256="780157a1efdb57188ec474dca87acaee67a3a839c2525b2214d318228451809f") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2024-25062 + version( + "2.10.3", sha256="5d2cc3d78bec3dbe212a9d7fa629ada25a7da928af432c93060ff5c17ee28a9c" + ) + version( + "2.10.2", sha256="d240abe6da9c65cb1900dd9bf3a3501ccf88b3c2a1cb98317d03f272dda5b265" + ) + version( + "2.10.1", sha256="21a9e13cc7c4717a6c36268d0924f92c3f67a1ece6b7ff9d588958a6db9fb9d8" + ) + version( + "2.9.14", sha256="60d74a257d1ccec0475e749cba2f21559e48139efba6ff28224357c7c798dfee" + ) + version( + "2.9.13", sha256="276130602d12fe484ecc03447ee5e759d0465558fbc9d6bd144e3745306ebf0e" + ) + version( + "2.9.12", sha256="c8d6681e38c56f172892c85ddc0852e1fd4b53b4209e7f4ebf17f7e2eae71d92" + ) + version( + "2.9.11", sha256="886f696d5d5b45d780b2880645edf9e0c62a4fd6841b853e824ada4e02b4d331" + ) + version( + "2.9.10", sha256="aafee193ffb8fe0c82d4afef6ef91972cbaf5feea100edc2f262750611b4be1f" + ) + version("2.9.9", sha256="94fb70890143e3c6549f265cee93ec064c80a84c42ad0f23e85ee1fd6540a871") + version("2.9.8", sha256="0b74e51595654f958148759cfef0993114ddccccbb6f31aee018f3558e8e2732") + version("2.9.4", sha256="ffb911191e509b966deb55de705387f14156e1a56b21824357cdf0053233633c") + version("2.9.2", sha256="5178c30b151d044aefb1b08bf54c3003a0ac55c59c866763997529d60770d5bc") + version("2.7.8", sha256="cda23bc9ebd26474ca8f3d67e7d1c4a1f1e7106364b690d822e009fdc3c417ec") depends_on("c", type="build") # generated From 2782ae6d7eb8cbb5164b61afaf4a0d7e0547a2c2 Mon Sep 17 00:00:00 2001 From: Martin Lang <67915889+lang-m@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:34:22 +0100 Subject: [PATCH 047/208] libpspio: new version 0.4.1 (#47287) --- var/spack/repos/builtin/packages/libpspio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libpspio/package.py b/var/spack/repos/builtin/packages/libpspio/package.py index b462c5c58c21c1..130cebd07c21cb 100644 --- a/var/spack/repos/builtin/packages/libpspio/package.py +++ b/var/spack/repos/builtin/packages/libpspio/package.py @@ -16,6 +16,7 @@ class Libpspio(AutotoolsPackage): license("MPL-2.0") + version("0.4.1", sha256="e4f87f6d8821042db3a88dad60ae07278e36ad2571e28f5d30f02d8b164b4daa") version("0.3.0", sha256="4dc092457e481e5cd703eeecd87e6f17749941fe274043550c8a2557a649afc5") depends_on("c", type="build") # generated From a86f164835f38b1f525d90dc813ef60d5ab58160 Mon Sep 17 00:00:00 2001 From: Martin Lang <67915889+lang-m@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:35:11 +0100 Subject: [PATCH 048/208] nlopt: new version 2.8.0 (#47289) --- var/spack/repos/builtin/packages/nlopt/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nlopt/package.py b/var/spack/repos/builtin/packages/nlopt/package.py index fd9ed9b18f4877..132fff424c70ed 100644 --- a/var/spack/repos/builtin/packages/nlopt/package.py +++ b/var/spack/repos/builtin/packages/nlopt/package.py @@ -22,6 +22,7 @@ class Nlopt(CMakePackage): version("master", branch="master") + version("2.8.0", sha256="e02a4956a69d323775d79fdaec7ba7a23ed912c7d45e439bc933d991ea3193fd") version("2.7.1", sha256="db88232fa5cef0ff6e39943fc63ab6074208831dc0031cf1545f6ecd31ae2a1a") version("2.7.0", sha256="b881cc2a5face5139f1c5a30caf26b7d3cb43d69d5e423c9d78392f99844499f") version("2.6.2", sha256="cfa5981736dd60d0109c534984c4e13c615314d3584cf1c392a155bfe1a3b17e") From cf3576a9bbcef2d6c0621e3f409742f0d987f126 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 10:38:16 +0100 Subject: [PATCH 049/208] suite-sparse: fix missing rpaths for dependencies (#47394) --- var/spack/repos/builtin/packages/suite-sparse/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index 8b72df6f2dbeb3..a6c8fbaed82b91 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -249,6 +249,7 @@ def install(self, spec, prefix): # Mongoose directory finds libsuitesparseconfig.so in system # directories like /usr/lib. cmake_args = [ + "-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON", f"-DCMAKE_INSTALL_PREFIX={prefix}", f"-DCMAKE_LIBRARY_PATH={prefix.lib}", f"-DBLAS_ROOT={spec['blas'].prefix}", From 2148292bdba60f223e4238fab240490a5303fa81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Chevalier?= Date: Mon, 4 Nov 2024 10:39:16 +0100 Subject: [PATCH 050/208] kokkos and kokkos-kernels: use new urls for v4.4 and above (#47330) --- .../packages/kokkos-kernels/package.py | 129 ++++++++++++---- .../repos/builtin/packages/kokkos/package.py | 140 ++++++++++++++---- 2 files changed, 219 insertions(+), 50 deletions(-) diff --git a/var/spack/repos/builtin/packages/kokkos-kernels/package.py b/var/spack/repos/builtin/packages/kokkos-kernels/package.py index f8dc26ad8969e3..af6abf4c11aca1 100644 --- a/var/spack/repos/builtin/packages/kokkos-kernels/package.py +++ b/var/spack/repos/builtin/packages/kokkos-kernels/package.py @@ -11,7 +11,7 @@ class KokkosKernels(CMakePackage, CudaPackage): homepage = "https://github.com/kokkos/kokkos-kernels" git = "https://github.com/kokkos/kokkos-kernels.git" - url = "https://github.com/kokkos/kokkos-kernels/archive/4.0.00.tar.gz" + url = "https://github.com/kokkos/kokkos-kernels/releases/download/4.4.01/kokkos-kernels-4.4.01.tar.gz" tags = ["e4s"] @@ -21,32 +21,111 @@ class KokkosKernels(CMakePackage, CudaPackage): license("Apache-2.0 WITH LLVM-exception") - # generate checksum for each release tarball with the following command - # openssl sha256 kokkos-kernels-x.y.z.tar.gz version("develop", branch="develop") version("master", branch="master") - version("4.4.01", sha256="9f741449f5ace5a7d8a5a81194ff2108e5525d16f08fcd9bb6c9bb4853d7720d") - version("4.4.00", sha256="6559871c091eb5bcff53bae5a0f04f2298971d1aa1b2c135bd5a2dae3f9376a2") - version("4.3.01", sha256="749553a6ea715ba1e56fa0b13b42866bb9880dba7a94e343eadf40d08c68fab8") - version("4.3.00", sha256="03c3226ee97dbca4fa56fe69bc4eefa0673e23c37f2741943d9362424a63950e") - version("4.2.01", sha256="058052b3a40f5d4e447b7ded5c480f1b0d4aa78373b0bc7e43804d0447c34ca8") - version("4.2.00", sha256="c65df9a101dbbef2d8fd43c60c9ea85f2046bb3535fa1ad16e7c661ddd60401e") - version("4.1.00", sha256="d6a4108444ea226e43bf6a9c0dfc557f223a72b1142bf81aa78dd60e16ac2d56") - version("4.0.01", sha256="3f493fcb0244b26858ceb911be64092fbf7785616ad62c81abde0ea1ce86688a") - version("4.0.00", sha256="750079d0be1282d18ecd280e130ca303044ac399f1e5864488284b92f5ce0a86") - version("3.7.01", sha256="b2060f5894bdaf7f7d4793b90444fac260460cfa80595afcbcb955518864b446") - version("3.7.00", sha256="51bc6db3995392065656848e2b152cfd1c3a95a951ab18a3934278113d59f32b") - version("3.6.01", sha256="f000b156c8c0b80e85d38587907c11d9479aaf362408b812effeda5e22b24d0d") - version("3.6.00", sha256="2753643fd643b9eed9f7d370e0ff5fa957211d08a91aa75398e31cbc9e5eb0a5") - version("3.5.00", sha256="a03a41a047d95f9f07cd1e1d30692afdb75b5c705ef524e19c1d02fe60ccf8d1") - version("3.4.01", sha256="f504aa4afbffb58fa7c4430d0fdb8fd5690a268823fa15eb0b7d58dab9d351e6") - version("3.4.00", sha256="07ba11869e686cb0d47272d1ef494ccfbcdef3f93ff1c8b64ab9e136a53a227a") - version("3.3.01", sha256="0f21fe6b5a8b6ae7738290e293aa990719aefe88b32f84617436bfd6074a8f77") - version("3.3.00", sha256="8d7f78815301afb90ddba7914dce5b718cea792ac0c7350d2f8d00bd2ef1cece") - version("3.2.01", sha256="c486e5cac19e354a517498c362838619435734d64b44f44ce909b0531c21d95c") - version("3.2.00", sha256="8ac20ee28ae7813ce1bda461918800ad57fdbac2af86ef5d1ba74e83e10956de") - version("3.1.00", sha256="27fea241ae92f41bd5b070b1a590ba3a56a06aca750207a98bea2f64a4a40c89") - version("3.0.00", sha256="e4b832aed3f8e785de24298f312af71217a26067aea2de51531e8c1e597ef0e6") + version("4.4.01", sha256="4a32bc8330e0113856bdf181df94cc4f9902e3cebb5dc7cea5948f30df03bfa1") + version("4.4.00", sha256="66d5c3f728a8c7689159c97006996164ea00fd39702476220e3dbf2a05c49e8f") + + version( + "4.3.01", + sha256="749553a6ea715ba1e56fa0b13b42866bb9880dba7a94e343eadf40d08c68fab8", + url="https://github.com/kokkos/kokkos-kernels/archive/4.3.01.tar.gz", + ) + version( + "4.3.00", + sha256="03c3226ee97dbca4fa56fe69bc4eefa0673e23c37f2741943d9362424a63950e", + url="https://github.com/kokkos/kokkos-kernels/archive/4.3.00.tar.gz", + ) + version( + "4.2.01", + sha256="058052b3a40f5d4e447b7ded5c480f1b0d4aa78373b0bc7e43804d0447c34ca8", + url="https://github.com/kokkos/kokkos-kernels/archive/4.2.01.tar.gz", + ) + version( + "4.2.00", + sha256="c65df9a101dbbef2d8fd43c60c9ea85f2046bb3535fa1ad16e7c661ddd60401e", + url="https://github.com/kokkos/kokkos-kernels/archive/4.2.00.tar.gz", + ) + version( + "4.1.00", + sha256="d6a4108444ea226e43bf6a9c0dfc557f223a72b1142bf81aa78dd60e16ac2d56", + url="https://github.com/kokkos/kokkos-kernels/archive/4.1.00.tar.gz", + ) + version( + "4.0.01", + sha256="3f493fcb0244b26858ceb911be64092fbf7785616ad62c81abde0ea1ce86688a", + url="https://github.com/kokkos/kokkos-kernels/archive/4.0.01.tar.gz", + ) + version( + "4.0.00", + sha256="750079d0be1282d18ecd280e130ca303044ac399f1e5864488284b92f5ce0a86", + url="https://github.com/kokkos/kokkos-kernels/archive/4.0.00.tar.gz", + ) + version( + "3.7.01", + sha256="b2060f5894bdaf7f7d4793b90444fac260460cfa80595afcbcb955518864b446", + url="https://github.com/kokkos/kokkos-kernels/archive/3.7.01.tar.gz", + ) + version( + "3.7.00", + sha256="51bc6db3995392065656848e2b152cfd1c3a95a951ab18a3934278113d59f32b", + url="https://github.com/kokkos/kokkos-kernels/archive/3.7.00.tar.gz", + ) + version( + "3.6.01", + sha256="f000b156c8c0b80e85d38587907c11d9479aaf362408b812effeda5e22b24d0d", + url="https://github.com/kokkos/kokkos-kernels/archive/3.6.01.tar.gz", + ) + version( + "3.6.00", + sha256="2753643fd643b9eed9f7d370e0ff5fa957211d08a91aa75398e31cbc9e5eb0a5", + url="https://github.com/kokkos/kokkos-kernels/archive/3.6.00.tar.gz", + ) + version( + "3.5.00", + sha256="a03a41a047d95f9f07cd1e1d30692afdb75b5c705ef524e19c1d02fe60ccf8d1", + url="https://github.com/kokkos/kokkos-kernels/archive/3.5.00.tar.gz", + ) + version( + "3.4.01", + sha256="f504aa4afbffb58fa7c4430d0fdb8fd5690a268823fa15eb0b7d58dab9d351e6", + url="https://github.com/kokkos/kokkos-kernels/archive/3.4.01.tar.gz", + ) + version( + "3.4.00", + sha256="07ba11869e686cb0d47272d1ef494ccfbcdef3f93ff1c8b64ab9e136a53a227a", + url="https://github.com/kokkos/kokkos-kernels/archive/3.4.00.tar.gz", + ) + version( + "3.3.01", + sha256="0f21fe6b5a8b6ae7738290e293aa990719aefe88b32f84617436bfd6074a8f77", + url="https://github.com/kokkos/kokkos-kernels/archive/3.3.01.tar.gz", + ) + version( + "3.3.00", + sha256="8d7f78815301afb90ddba7914dce5b718cea792ac0c7350d2f8d00bd2ef1cece", + url="https://github.com/kokkos/kokkos-kernels/archive/3.3.00.tar.gz", + ) + version( + "3.2.01", + sha256="c486e5cac19e354a517498c362838619435734d64b44f44ce909b0531c21d95c", + url="https://github.com/kokkos/kokkos-kernels/archive/3.2.01.tar.gz", + ) + version( + "3.2.00", + sha256="8ac20ee28ae7813ce1bda461918800ad57fdbac2af86ef5d1ba74e83e10956de", + url="https://github.com/kokkos/kokkos-kernels/archive/3.2.00.tar.gz", + ) + version( + "3.1.00", + sha256="27fea241ae92f41bd5b070b1a590ba3a56a06aca750207a98bea2f64a4a40c89", + url="https://github.com/kokkos/kokkos-kernels/archive/3.1.00.tar.gz", + ) + version( + "3.0.00", + sha256="e4b832aed3f8e785de24298f312af71217a26067aea2de51531e8c1e597ef0e6", + url="https://github.com/kokkos/kokkos-kernels/archive/3.0.00.tar.gz", + ) depends_on("cxx", type="build") # generated diff --git a/var/spack/repos/builtin/packages/kokkos/package.py b/var/spack/repos/builtin/packages/kokkos/package.py index 65e6b0adb8ba11..7eeac574d9d3e9 100644 --- a/var/spack/repos/builtin/packages/kokkos/package.py +++ b/var/spack/repos/builtin/packages/kokkos/package.py @@ -15,7 +15,7 @@ class Kokkos(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/kokkos/kokkos" git = "https://github.com/kokkos/kokkos.git" - url = "https://github.com/kokkos/kokkos/archive/3.6.00.tar.gz" + url = "https://github.com/kokkos/kokkos/releases/download/4.4.01/kokkos-4.4.01.tar.gz" tags = ["e4s"] @@ -27,30 +27,120 @@ class Kokkos(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("develop", branch="develop") - version("4.4.01", sha256="3f7096d17eaaa4004c7497ac082bf1ae3ff47b5104149e54af021a89414c3682") - version("4.4.00", sha256="c638980cb62c34969b8c85b73e68327a2cb64f763dd33e5241f5fd437170205a") - version("4.3.01", sha256="5998b7c732664d6b5e219ccc445cd3077f0e3968b4be480c29cd194b4f45ec70") - version("4.3.00", sha256="53cf30d3b44dade51d48efefdaee7a6cf109a091b702a443a2eda63992e5fe0d") - version("4.2.01", sha256="cbabbabba021d00923fb357d2e1b905dda3838bd03c885a6752062fe03c67964") - version("4.2.00", sha256="ac08765848a0a6ac584a0a46cd12803f66dd2a2c2db99bb17c06ffc589bf5be8") - version("4.1.00", sha256="cf725ea34ba766fdaf29c884cfe2daacfdc6dc2d6af84042d1c78d0f16866275") - version("4.0.01", sha256="bb942de8afdd519fd6d5d3974706bfc22b6585a62dd565c12e53bdb82cd154f0") - version("4.0.00", sha256="1829a423883d4b44223c7c3a53d3c51671145aad57d7d23e6a1a4bebf710dcf6") - version("3.7.02", sha256="5024979f06bc8da2fb696252a66297f3e0e67098595a0cc7345312b3b4aa0f54") - version("3.7.01", sha256="0481b24893d1bcc808ec68af1d56ef09b82a1138a1226d6be27c3b3c3da65ceb") - version("3.7.00", sha256="62e3f9f51c798998f6493ed36463f66e49723966286ef70a9dcba329b8443040") - version("3.6.01", sha256="1b80a70c5d641da9fefbbb652e857d7c7a76a0ebad1f477c253853e209deb8db") - version("3.6.00", sha256="53b11fffb53c5d48da5418893ac7bc814ca2fde9c86074bdfeaa967598c918f4") - version("3.5.00", sha256="748f06aed63b1e77e3653cd2f896ef0d2c64cb2e2d896d9e5a57fec3ff0244ff") - version("3.4.01", sha256="146d5e233228e75ef59ca497e8f5872d9b272cb93e8e9cdfe05ad34a23f483d1") - version("3.4.00", sha256="2e4438f9e4767442d8a55e65d000cc9cde92277d415ab4913a96cd3ad901d317") - version("3.3.01", sha256="4919b00bb7b6eb80f6c335a32f98ebe262229d82e72d3bae6dd91aaf3d234c37") - version("3.3.00", sha256="170b9deaa1943185e928f8fcb812cd4593a07ed7d220607467e8f0419e147295") - version("3.2.01", sha256="9e27a3d8f81559845e190d60f277d84d6f558412a3df3301d9545e91373bcaf1") - version("3.2.00", sha256="05e1b4dd1ef383ca56fe577913e1ff31614764e65de6d6f2a163b2bddb60b3e9") - version("3.1.01", sha256="ff5024ebe8570887d00246e2793667e0d796b08c77a8227fe271127d36eec9dd") - version("3.1.00", sha256="b935c9b780e7330bcb80809992caa2b66fd387e3a1c261c955d622dae857d878") - version("3.0.00", sha256="c00613d0194a4fbd0726719bbed8b0404ed06275f310189b3493f5739042a92b") + + version("4.4.01", sha256="3413f0cb39912128d91424ebd92e8832009e7eeaf6fa8da58e99b0d37860d972") + version("4.4.00", sha256="0b46372f38c48aa088411ac1b7c173a5c90f0fdb69ab40271827688fc134f58b") + + version( + "4.3.01", + sha256="5998b7c732664d6b5e219ccc445cd3077f0e3968b4be480c29cd194b4f45ec70", + url="https://github.com/kokkos/kokkos/archive/4.3.01.tar.gz", + ) + version( + "4.3.00", + sha256="53cf30d3b44dade51d48efefdaee7a6cf109a091b702a443a2eda63992e5fe0d", + url="https://github.com/kokkos/kokkos/archive/4.3.00.tar.gz", + ) + version( + "4.2.01", + sha256="cbabbabba021d00923fb357d2e1b905dda3838bd03c885a6752062fe03c67964", + url="https://github.com/kokkos/kokkos/archive/4.2.01.tar.gz", + ) + version( + "4.2.00", + sha256="ac08765848a0a6ac584a0a46cd12803f66dd2a2c2db99bb17c06ffc589bf5be8", + url="https://github.com/kokkos/kokkos/archive/4.2.00.tar.gz", + ) + version( + "4.1.00", + sha256="cf725ea34ba766fdaf29c884cfe2daacfdc6dc2d6af84042d1c78d0f16866275", + url="https://github.com/kokkos/kokkos/archive/4.1.00.tar.gz", + ) + version( + "4.0.01", + sha256="bb942de8afdd519fd6d5d3974706bfc22b6585a62dd565c12e53bdb82cd154f0", + url="https://github.com/kokkos/kokkos/archive/4.0.01.tar.gz", + ) + version( + "4.0.00", + sha256="1829a423883d4b44223c7c3a53d3c51671145aad57d7d23e6a1a4bebf710dcf6", + url="https://github.com/kokkos/kokkos/archive/4.0.00.tar.gz", + ) + version( + "3.7.02", + sha256="5024979f06bc8da2fb696252a66297f3e0e67098595a0cc7345312b3b4aa0f54", + url="https://github.com/kokkos/kokkos/archive/3.7.02.tar.gz", + ) + version( + "3.7.01", + sha256="0481b24893d1bcc808ec68af1d56ef09b82a1138a1226d6be27c3b3c3da65ceb", + url="https://github.com/kokkos/kokkos/archive/3.7.01.tar.gz", + ) + version( + "3.7.00", + sha256="62e3f9f51c798998f6493ed36463f66e49723966286ef70a9dcba329b8443040", + url="https://github.com/kokkos/kokkos/archive/3.7.00.tar.gz", + ) + version( + "3.6.01", + sha256="1b80a70c5d641da9fefbbb652e857d7c7a76a0ebad1f477c253853e209deb8db", + url="https://github.com/kokkos/kokkos/archive/3.6.01.tar.gz", + ) + version( + "3.6.00", + sha256="53b11fffb53c5d48da5418893ac7bc814ca2fde9c86074bdfeaa967598c918f4", + url="https://github.com/kokkos/kokkos/archive/3.6.00.tar.gz", + ) + version( + "3.5.00", + sha256="748f06aed63b1e77e3653cd2f896ef0d2c64cb2e2d896d9e5a57fec3ff0244ff", + url="https://github.com/kokkos/kokkos/archive/3.5.00.tar.gz", + ) + version( + "3.4.01", + sha256="146d5e233228e75ef59ca497e8f5872d9b272cb93e8e9cdfe05ad34a23f483d1", + url="https://github.com/kokkos/kokkos/archive/3.4.01.tar.gz", + ) + version( + "3.4.00", + sha256="2e4438f9e4767442d8a55e65d000cc9cde92277d415ab4913a96cd3ad901d317", + url="https://github.com/kokkos/kokkos/archive/3.4.00.tar.gz", + ) + version( + "3.3.01", + sha256="4919b00bb7b6eb80f6c335a32f98ebe262229d82e72d3bae6dd91aaf3d234c37", + url="https://github.com/kokkos/kokkos/archive/3.3.01.tar.gz", + ) + version( + "3.3.00", + sha256="170b9deaa1943185e928f8fcb812cd4593a07ed7d220607467e8f0419e147295", + url="https://github.com/kokkos/kokkos/archive/3.3.00.tar.gz", + ) + version( + "3.2.01", + sha256="9e27a3d8f81559845e190d60f277d84d6f558412a3df3301d9545e91373bcaf1", + url="https://github.com/kokkos/kokkos/archive/3.2.01.tar.gz", + ) + version( + "3.2.00", + sha256="05e1b4dd1ef383ca56fe577913e1ff31614764e65de6d6f2a163b2bddb60b3e9", + url="https://github.com/kokkos/kokkos/archive/3.2.00.tar.gz", + ) + version( + "3.1.01", + sha256="ff5024ebe8570887d00246e2793667e0d796b08c77a8227fe271127d36eec9dd", + url="https://github.com/kokkos/kokkos/archive/3.1.01.tar.gz", + ) + version( + "3.1.00", + sha256="b935c9b780e7330bcb80809992caa2b66fd387e3a1c261c955d622dae857d878", + url="https://github.com/kokkos/kokkos/archive/3.1.00.tar.gz", + ) + version( + "3.0.00", + sha256="c00613d0194a4fbd0726719bbed8b0404ed06275f310189b3493f5739042a92b", + url="https://github.com/kokkos/kokkos/archive/3.0.00.tar.gz", + ) depends_on("cxx", type="build") # Kokkos requires a C++ compiler From 754011643c83aefda92b43602456ca31c7703121 Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Mon, 4 Nov 2024 04:41:07 -0500 Subject: [PATCH 051/208] rocal and rocm-openmp-extras: fix build failures (#47314) --- var/spack/repos/builtin/packages/mivisionx/package.py | 4 ++-- var/spack/repos/builtin/packages/rocal/package.py | 3 ++- .../repos/builtin/packages/rocm-openmp-extras/package.py | 9 ++++++--- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index f3e565f99f40a9..332d9d4ca97d4b 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -15,7 +15,7 @@ class Mivisionx(CMakePackage): git = "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX.git" url = "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX/archive/rocm-6.1.2.tar.gz" - maintainers("srekolam", "renjithravindrankannath") + maintainers("srekolam", "renjithravindrankannath", "afzpatel") tags = ["rocm"] def url_for_version(self, version): @@ -211,7 +211,7 @@ def patch(self): when="@5.3:", ) depends_on("openssl") - depends_on("libjpeg-turbo@2.0.6+partial_decoder", type="build") + depends_on("libjpeg-turbo@2.0.6+partial_decoder", type="build", when="@:6.2.0") depends_on("rpp@1.2.0", when="@5.5:5.6") depends_on("lmdb", when="@5.5:") depends_on("py-setuptools", when="@5.6:") diff --git a/var/spack/repos/builtin/packages/rocal/package.py b/var/spack/repos/builtin/packages/rocal/package.py index 84e45834935154..c9196dd562f61c 100644 --- a/var/spack/repos/builtin/packages/rocal/package.py +++ b/var/spack/repos/builtin/packages/rocal/package.py @@ -20,7 +20,8 @@ class Rocal(CMakePackage): version("6.2.1", sha256="77d3e63e02afaee6f1ee1d877d88b48c6ea66a0afca96a1313d0f1c4f8e86b2a") version("6.2.0", sha256="c7c265375a40d4478a628258378726c252caac424f974456d488fce43890e157") - depends_on("libjpeg-turbo@2.0.6+partial_decoder") + depends_on("libjpeg-turbo@2.0.6+partial_decoder", when="@6.2.0") + depends_on("libjpeg-turbo@3.0.2:", when="@6.2.1:") depends_on("rapidjson") depends_on("ffmpeg@4.4:") diff --git a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py index 3586bef851b6e8..471b662d4b207e 100644 --- a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py +++ b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py @@ -155,7 +155,7 @@ class RocmOpenmpExtras(Package): license("Apache-2.0") - maintainers("srekolam", "renjithravindrankannath", "estewart08") + maintainers("srekolam", "renjithravindrankannath", "estewart08", "afzpatel") version("6.2.1", sha256=versions_dict["6.2.1"]["aomp"]) version("6.2.0", sha256=versions_dict["6.2.0"]["aomp"]) version("6.1.2", sha256=versions_dict["6.1.2"]["aomp"]) @@ -189,6 +189,7 @@ class RocmOpenmpExtras(Package): depends_on("libffi", type=("build", "link")) depends_on("libdrm", when="@5.7:6.0") depends_on("numactl", when="@5.7:6.0") + depends_on("zlib", when="@6.2:") for ver in [ "5.5.0", @@ -489,6 +490,7 @@ def install(self, spec, prefix): ffi_inc = spec["libffi"].prefix.include if self.spec.satisfies("@6.2:"): ncurses_lib_dir = self.spec["ncurses"].prefix.lib + zlib_lib_dir = self.spec["zlib"].prefix.lib # flang1 and flang2 symlink needed for build of flang-runtime # libdevice symlink to rocm-openmp-extras for runtime @@ -638,11 +640,12 @@ def install(self, spec, prefix): flang_legacy_flags.append("-D_GLIBCXX_USE_CXX11_ABI=0") if self.spec.satisfies("@6.2:"): flang_legacy_flags.append("-L{0}".format(ncurses_lib_dir)) + flang_legacy_flags.append("-L{0}".format(zlib_lib_dir)) components["flang-legacy-llvm"] += [ - "-DCMAKE_CXX_FLAGS={0}".format(",".join(flang_legacy_flags)) + "-DCMAKE_CXX_FLAGS={0}".format(" ".join(flang_legacy_flags)) ] components["flang-legacy"] += [ - "-DCMAKE_CXX_FLAGS={0}".format(",".join(flang_legacy_flags)) + "-DCMAKE_CXX_FLAGS={0}".format(" ".join(flang_legacy_flags)) ] components["flang"] = [ From 1ee344c75c75b07769bb0b73024a9c59537fa333 Mon Sep 17 00:00:00 2001 From: Martin Lang <67915889+lang-m@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:47:54 +0100 Subject: [PATCH 052/208] bigdft-futile: fix compilation for @1.9.5~mpi (#47292) When compiled without MPI support, a fake mpi header is autogenerated during configure/build. The header is missing one symbol in version 1.9.5. The problem has since been fixed upstream. A simular problem does also occur for 1.9.4. Unfortunately, the patch does not work for 1.9.4 and I also don't know if further fixes would be required for 1.9.4. Therefore, only the newest version 1.9.5 is patched. --- var/spack/repos/builtin/packages/bigdft-futile/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/bigdft-futile/package.py b/var/spack/repos/builtin/packages/bigdft-futile/package.py index 94a66f100dd528..99bf739a1d01b7 100644 --- a/var/spack/repos/builtin/packages/bigdft-futile/package.py +++ b/var/spack/repos/builtin/packages/bigdft-futile/package.py @@ -49,6 +49,14 @@ class BigdftFutile(AutotoolsPackage, CudaPackage): configure_directory = "futile" + # missing MPI_BOTTOM in fake mpif.h (generated when compiling without MPI support) + # similar issue (maybe others) also in 1.9.4 but this patch does not work for 1.9.4 + patch( + "https://gitlab.com/l_sim/bigdft-suite/-/commit/ec7419011fa9fd815de77bfca8642973091fb64b.diff", + sha256="66c493e37fe7f7f9800ae7f49bb0172a5b2372a2ce0ee4c3bcb7ff5c59a3925c", + when="@1.9.5~mpi", + ) + def configure_args(self): spec = self.spec prefix = self.prefix From 5b93466340ae94e98c97f5353f108a0e234a1bf9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 10:48:35 +0100 Subject: [PATCH 053/208] libssh2: fix crypto (#47393) --- .../repos/builtin/packages/libssh2/package.py | 31 +++++++++++++------ 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/libssh2/package.py b/var/spack/repos/builtin/packages/libssh2/package.py index 23dc57ce2deaff..754d593c4b1305 100644 --- a/var/spack/repos/builtin/packages/libssh2/package.py +++ b/var/spack/repos/builtin/packages/libssh2/package.py @@ -14,6 +14,7 @@ class Libssh2(AutotoolsPackage, CMakePackage): license("BSD-3-Clause") + version("1.11.1", sha256="d9ec76cbe34db98eec3539fe2c899d26b0c837cb3eb466a56b0f109cabf658f7") version("1.11.0", sha256="3736161e41e2693324deb38c26cfdc3efe6209d634ba4258db1cecff6a5ad461") version("1.10.0", sha256="2d64e90f3ded394b91d3a2e774ca203a4179f69aebee03003e5a6fa621e41d51") version("1.9.0", sha256="d5fb8bd563305fd1074dda90bd053fb2d29fc4bce048d182f96eaa466dfadafd") @@ -23,8 +24,7 @@ class Libssh2(AutotoolsPackage, CMakePackage): "1.4.3", sha256="eac6f85f9df9db2e6386906a6227eb2cd7b3245739561cad7d6dc1d5d021b96d" ) # CentOS7 - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") build_system("autotools", "cmake", default="autotools") @@ -53,7 +53,7 @@ class Libssh2(AutotoolsPackage, CMakePackage): # and fails to prepend the -L flags, which is causing issues in libgit2, as # it tries to locate e.g. libssl in the dirs of the pc file's -L flags, and # cannot find the lib. - patch("pr-1114.patch", when="@1.7:") + patch("pr-1114.patch", when="@1.7:1.11.0") class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): @@ -77,14 +77,27 @@ def cmake_args(self): class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): def configure_args(self): - args = ["--disable-tests", "--disable-docker-tests", "--disable-examples-build"] - args += self.enable_or_disable("shared") + args = [ + "--disable-tests", + "--disable-docker-tests", + "--disable-examples-build", + "--without-libgcrypt", + "--without-wincng", + *self.enable_or_disable("shared"), + ] crypto = self.spec.variants["crypto"].value - if crypto == "openssl": - args.append(f"--with-libssl-prefix={self.spec['openssl'].prefix}") - elif crypto == "mbedtls": - args.append(f"--with-libmbedcrypto-prefix={self.spec['mbedtls'].prefix}") + if self.spec.satisfies("@1.9:"): + # single flag for all crypto backends + args.append(f"--with-crypto={crypto}") + else: + # one flag per crypto backend + if crypto == "openssl": + args.append(f"--with-libssl-prefix={self.spec['openssl'].prefix}") + args.append("--without-mbedtls") + elif crypto == "mbedtls": + args.append(f"--with-libmbedcrypto-prefix={self.spec['mbedtls'].prefix}") + args.append("--without-openssl") return args From 2c1d74db9b5b32aa5e510acd0e03a6a1a344995f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 11:20:33 +0100 Subject: [PATCH 054/208] krb5: disable missing keyutils dependency (#47397) --- var/spack/repos/builtin/packages/krb5/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/krb5/package.py b/var/spack/repos/builtin/packages/krb5/package.py index 0485a999005c8b..c4ee935801bbda 100644 --- a/var/spack/repos/builtin/packages/krb5/package.py +++ b/var/spack/repos/builtin/packages/krb5/package.py @@ -103,7 +103,7 @@ def patch(self): def configure_args(self): spec = self.spec - args = ["--without-system-verto"] + args = ["--without-system-verto", "--without-keyutils"] if spec.satisfies("~shared"): args.append("--enable-static") From c9ed91758d308297b4267a8dd178b88881b20415 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 11:42:34 +0100 Subject: [PATCH 055/208] tcsh: add missing libxcrypt dependency (#47398) --- var/spack/repos/builtin/packages/tcsh/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/tcsh/package.py b/var/spack/repos/builtin/packages/tcsh/package.py index 2b8a1d9a6c8de4..24215b5d9ff204 100644 --- a/var/spack/repos/builtin/packages/tcsh/package.py +++ b/var/spack/repos/builtin/packages/tcsh/package.py @@ -108,6 +108,7 @@ class Tcsh(AutotoolsPackage): ) depends_on("ncurses+termlib") + depends_on("libxcrypt", when="platform=linux") @run_after("install") def link_csh(self): From 10f7014addebfe1a104f558f9cf1901fd1702452 Mon Sep 17 00:00:00 2001 From: Teague Sterling Date: Mon, 4 Nov 2024 03:37:21 -0800 Subject: [PATCH 056/208] vep-cache: new package (#44523) * py-uvloop: add v3.8.14, v3.9.15, v3.10.3 and dependencies * rollback * vep: add v110,v111,v112 * vep-cache: add v110,v111,v112 * Cleanup * Reorganizigng Signed-off-by: Teague Sterling * Update package.py * Update package.py * [@spackbot] updating style on behalf of teaguesterling * Update package.py * Update package.py * Update package.py * [@spackbot] updating style on behalf of teaguesterling * Update package.py * [@spackbot] updating style on behalf of teaguesterling * Fix scoping and syntax issues Signed-off-by: Teague Sterling * fix styles Signed-off-by: Teague Sterling * fix variants * fixing up variant issues and cleaning up resource code Signed-off-by: Teague Sterling * fixing unused imports Signed-off-by: Teague Sterling * Apply suggestions from code review Co-authored-by: Arne Becker <101113822+EbiArnie@users.noreply.github.com> * fixing vep dependencies Signed-off-by: Teague Sterling * Fixing resources Signed-off-by: Teague Sterling * Fixing issue where resources are not downloaded Signed-off-by: Teague Sterling * vep-cache fixing downloads Signed-off-by: Teague Sterling * defaulting to using VEP installer Signed-off-by: Teague Sterling * Removing resource-based cache installation and simplifying package. Resources without checksums doesn't work (anymore?) and calculating them with be difficult Signed-off-by: Teague Sterling --------- Signed-off-by: Teague Sterling Co-authored-by: Arne Becker <101113822+EbiArnie@users.noreply.github.com> --- .../builtin/packages/vep-cache/package.py | 151 ++++++++++++++++++ 1 file changed, 151 insertions(+) create mode 100644 var/spack/repos/builtin/packages/vep-cache/package.py diff --git a/var/spack/repos/builtin/packages/vep-cache/package.py b/var/spack/repos/builtin/packages/vep-cache/package.py new file mode 100644 index 00000000000000..6e84426a52a12e --- /dev/null +++ b/var/spack/repos/builtin/packages/vep-cache/package.py @@ -0,0 +1,151 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class VepCache(Package): + """Separate installation and management for the Ensembl Variant Effect Predictor (vep)""" + + homepage = "https://useast.ensembl.org/info/docs/tools/vep/index.html" + maintainers("teaguesterling") + # This is a dummy value to get spack to resolve resources, which are not downloaded + # when has_code = False + has_code = False + + license("Apache-2.0", checked_by="teaguesterling") + + vep_versions = ["112", "111", "110"] + depends_on("vep", type="build") + for major in vep_versions: + version(major) + depends_on(f"vep@{major}", type="build", when=f"@{major}+match_vep_version") + + vep_assembly_sources = ["ensembl", "refseq", "merged"] + + # This is an incomplete list + vep_species = { + "bos_taurus": ["UMD3.1"], + "danio_rerio": ["GRCz11"], + "homo_sapiens": ["GRCh38", "GRCh37"], + "mus_musculus": ["GRCm38"], + "rattus_norvegicus": ["Rnor_6.0"], + } + + variant("match_vep_version", default=True, description="Match cache and software version") + variant("env", default=True, description="Setup VEP environment variables for this cache") + + # Cache configuration options + variant("fasta", default=True, description="Add FASTA files to the cache") + variant("indexed", default=True, description="Use indexed cache") + + variant( + "assembly_source", + values=vep_assembly_sources, + default="ensembl", + description="What reference genome source", + ) + variant( + "species", + values=vep_species.keys(), + default="homo_sapiens", + description="Which species to download the cache for (only one at a time)", + ) + variant( + "assembly", + values=["latest"] + + [ + conditional(*assemblies, when=f"species={species}") + for species, assemblies in vep_species.items() + ], + default="latest", + multi=False, + description="Which assembly of genome to use (only needed for homo sapiens)", + ) + + def cache_from_spec(self, spec): + variants = spec.variants + indexed = spec.satisfies("+indexed") + cache_type = variants["assembly_source"].value + species = variants["species"].value + assembly = variants["assembly"].value + assembly = self.vep_species[species][0] if assembly == "latest" else assembly + return indexed, cache_type, species, assembly + + def vep_cache_config(self, base): + spec = self.spec + cache_version = spec.version.up_to(1) + indexed, cache_type, species, assembly = self.cache_from_spec(spec) + user_root = join_path(base, "share", "vep") + root = user_root # Should this be VEP install dir? + + suffix = "" if cache_type == "ensembl" else f"_{cache_type}" + species_cache = f"{species}{suffix}" + + if species == "homo_sapiens": + cache_dir = join_path(species, f"{cache_version}_{assembly}") + else: + cache_dir = join_path(species, f"{cache_version}") + + return { + "root": root, + "user_root": user_root, + "version": f"{cache_version}", + "type": f"{cache_type}", + "species": species, + "cache_species": species_cache, + "assembly": f"{assembly}", + "indexed": indexed, + "dir": cache_dir, + "full_path": join_path(root, cache_dir), + } + + def setup_run_environment(self, env): + if self.spec.satisfies("+env"): + cache = self.vep_cache_config(self.home) + env.set("VEP_OFFLINE", "1") + env.set("VEP_CACHE", "1") + env.set("VEP_DIR", cache["user_root"]) + env.set("VEP_SPECIES", cache["species"]) + env.set("VEP_CACHE_VERSION", cache["version"]) + if cache["assembly"] is not None: + env.set("VEP_ASSEMBLY", cache["assembly"]) + if cache["type"] == "refseq": + env.set("VEP_REFSEQ", "1") + if cache["type"] == "merged": + env.set("VEP_MERGED", "1") + if self.spec.satisfies("+fasta"): + pass + + def cache_installer_args(self): + cache = self.vep_cache_config(self.prefix) + args = [ + "--CACHEDIR", + cache["full_path"], + "--CACHE_VERSION", + cache["version"], + "--SPECIES", + cache["cache_species"], + ] + if cache["species"] == "homo_sapiens": + args += ["--ASSEMBLY", cache["assembly"]] + + return args + + def installer_args(self): + auto = "cf" if self.spec.satisfies("+fasta") else "c" + args = ["--AUTO", auto, "--NO_UPDATE", "--NO_TEST"] + args += self.cache_installer_args() + return args + + def install_with_installer(self): + vep = self.spec["vep"].package + installer = which(vep.vep_installer_path) + installer(*self.installer_args()) + + def install(self, spec, prefix): + cache = self.vep_cache_config(self.prefix) + mkdirp(cache["full_path"]) + self.install_with_installer() From 8d0856d1cc3f8d097b8f4a7e5a18ad662b8434c5 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 13:52:05 +0100 Subject: [PATCH 057/208] packaging_guide.rst: explain forward and backward compat before the less common cases (#47402) The idea is to go from most to least used: backward compat -> forward compat -> pinning on major or major.minor version -> pinning specific, concrete versions. Further, the following ```python # backward compatibility with Python depends_on("python@3.8:") depends_on("python@3.9:", when="@1.2:") depends_on("python@3.10:", when="@1.4:") # forward compatibility with Python depends_on("python@:3.12", when="@:1.10") depends_on("python@:3.13", when="@:1.12") depends_on("python@:3.14") ``` is better than disjoint when ranges causing repetition of the rules on dependencies, and requiring frequent editing of existing lines after new releases are done: ```python depends_on("python@3.8:3.12", when="@:1.1") depends_on("python@3.9:3.12", when="@1.2:1.3") depends_on("python@3.10:3.12", when="@1.4:1.10") depends_on("python@3.10:3.13", when="@1.11:1.12") depends_on("python@3.10:3.14", when="@1.13:") --- lib/spack/docs/packaging_guide.rst | 91 +++++++++++++++++++++--------- 1 file changed, 63 insertions(+), 28 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index a736ff8c793005..d9a37175b6c72d 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2503,15 +2503,14 @@ with. For example, suppose that in the ``libdwarf`` package you write: depends_on("libelf@0.8") -Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``. -You can also specify a requirement for a particular variant or for -specific compiler flags: +Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which +includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version +restrictions, you can also specify variants if this package requires +optional features of the dependency. .. code-block:: python - depends_on("libelf@0.8+debug") - depends_on("libelf debug=True") - depends_on("libelf cppflags='-fPIC'") + depends_on("libelf@0.8 +parser +pic") Both users *and* package authors can use the same spec syntax to refer to different package configurations. Users use the spec syntax on the @@ -2519,46 +2518,82 @@ command line to find installed packages or to install packages with particular constraints, and package authors can use specs to describe relationships between packages. -^^^^^^^^^^^^^^ -Version ranges -^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Specifying backward and forward compatibility +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Packages are often compatible with a range of versions of their +dependencies. This is typically referred to as backward and forward +compatibility. Spack allows you to specify this in the ``depends_on`` +directive using version ranges. -Although some packages require a specific version for their dependencies, -most can be built with a range of versions. For example, if you are -writing a package for a legacy Python module that only works with Python -2.4 through 2.6, this would look like: +**Backwards compatibility** means that the package requires at least a +certain version of its dependency: .. code-block:: python - depends_on("python@2.4:2.6") + depends_on("python@3.10:") -Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version -greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If -you want to specify that a package works with any version of Python 3 (or -higher), this would look like: +In this case, the package requires Python 3.10 or newer. + +Commonly, packages drop support for older versions of a dependency as +they release new versions. In Spack you can conveniently add every +backward compatibility rule as a separate line: .. code-block:: python - depends_on("python@3:") + # backward compatibility with Python + depends_on("python@3.8:") + depends_on("python@3.9:", when="@1.2:") + depends_on("python@3.10:", when="@1.4:") + +This means that in general we need Python 3.8 or newer; from version +1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we +need Python 3.10 or newer. Notice that it's fine to have overlapping +ranges in the ``when`` clauses. -Here we leave out the upper bound. If you want to say that a package -requires Python 2, you can similarly leave out the lower bound: +**Forward compatibility** means that the package requires at most a +certain version of its dependency. Forward compatibility rules are +necessary when there are breaking changes in the dependency that the +package cannot handle. In Spack we often add forward compatibility +bounds only at the time a new, breaking version of a dependency is +released. As with backward compatibility, it is typical to see a list +of forward compatibility bounds in a package file as seperate lines: .. code-block:: python - depends_on("python@:2") + # forward compatibility with Python + depends_on("python@:3.12", when="@:1.10") + depends_on("python@:3.13", when="@:1.12") + +Notice how the ``:`` now appears before the version number both in the +dependency and in the ``when`` clause. This tells Spack that in general +we need Python 3.13 or older up to version ``1.12.x``, and up to version +``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility +with Python 3.13 was added in version 1.11, while version 1.13 added forward +compatibility with Python 3.14. + +Notice that a version range ``@:3.12`` includes *any* patch version +number ``3.12.x``, which is often useful when specifying forward compatibility +bounds. + +So far we have seen open-ended version ranges, which is by far the most +common use case. It is also possible to specify both a lower and an upper bound +on the version of a dependency, like this: + +.. code-block:: python -Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so -``@:3`` means "up to and including any 3.x version". + depends_on("python@3.10:3.12") -You can also simply write +There is short syntax to specify that a package is compatible with say any +``3.x`` version: .. code-block:: python - depends_on("python@2.7") + depends_on("python@3") -to tell Spack that the package needs Python 2.7.x. This is equivalent to -``@2.7:2.7``. +The above is equivalent to ``depends_on("python@3:3")``, which means at least +Python version 3 and at most any version ``3.x.y``. In very rare cases, you may need to specify an exact version, for example if you need to distinguish between ``3.2`` and ``3.2.1``: From b95936f752f55ae4e1a70d4301feab2406d9dea8 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 07:20:09 -0600 Subject: [PATCH 058/208] zabbix: add v5.0.44, v6.0.34, v7.0.4 (fix CVEs) (#47001) * zabbix: add v5.0.44, v6.0.34, v7.0.4 (fix CVEs) * [@spackbot] updating style on behalf of wdconinc * zabbix: use f-string * zabbix: fix f-string quoting * zabbix: use mysql-client * @wdconic, this fixes the mysql client virtual for me --------- Co-authored-by: wdconinc Co-authored-by: Bernhard Kaindl --- .../repos/builtin/packages/mysql/package.py | 9 ++++ .../repos/builtin/packages/zabbix/package.py | 46 ++++++++++++++----- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/mysql/package.py b/var/spack/repos/builtin/packages/mysql/package.py index 42288aaab74b95..1e7cb834d36351 100644 --- a/var/spack/repos/builtin/packages/mysql/package.py +++ b/var/spack/repos/builtin/packages/mysql/package.py @@ -226,3 +226,12 @@ def setup_build_environment(self, env): if "python" in self.spec and self.spec.satisfies("@:7"): self._fix_dtrace_shebang(env) + + @run_before("install") + def fixup_mysqlconfig(self): + if not self.spec.satisfies("platform=windows"): + # mysql uses spack libz but exports -lzlib to its dependencies. Fix that: + with working_dir(self.build_directory): + for config in ("scripts/mysql_config", "scripts/mysqlclient.pc"): + if os.path.exists(config): + filter_file(" -lzlib ", " -lz ", config) diff --git a/var/spack/repos/builtin/packages/zabbix/package.py b/var/spack/repos/builtin/packages/zabbix/package.py index aeb8bfd1d11037..a84097c1c7ee46 100644 --- a/var/spack/repos/builtin/packages/zabbix/package.py +++ b/var/spack/repos/builtin/packages/zabbix/package.py @@ -11,35 +11,59 @@ class Zabbix(AutotoolsPackage): such as networks, servers, VMs, applications and the cloud.""" homepage = "https://www.zabbix.com" - url = "https://github.com/zabbix/zabbix/archive/5.0.3.tar.gz" + url = "https://github.com/zabbix/zabbix/archive/refs/tags/5.0.3.tar.gz" - license("GPL-2.0-or-later") + license("AGPL-3.0-only", when="@7:", checked_by="wdconinc") + license("GPL-2.0-or-later", when="@:6", checked_by="wdconinc") - version("5.0.3", sha256="d579c5fa4e9065e8041396ace24d7132521ef5054ce30dfd9d151cbb7f0694ec") - version("4.0.24", sha256="c7e4962d745277d67797d90e124555ce27d198822a7e65c55d86aee45d3e93fc") - version("4.0.23", sha256="652143614f52411cad47db64e93bf3ba1cd547d6ca9591296223b5f0528b3b61") + version("7.0.4", sha256="73aa6b47bd4078587589b30f09671fb30c7743f5b57e81ea8e9bd5a7c5f221c7") + version("6.0.34", sha256="e60558911230d27ffad98850e414b46e318c9d41591a6ff65a255c0810cfcb8b") + version("5.0.44", sha256="f8ee86fd21f0f57e7fad68387271b995c1e5cc402d517cd7df5d5221fd6129fd") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2023-32724 + version("5.0.3", sha256="d579c5fa4e9065e8041396ace24d7132521ef5054ce30dfd9d151cbb7f0694ec") + # https://nvd.nist.gov/vuln/detail/CVE-2019-17382 + version( + "4.0.24", sha256="c7e4962d745277d67797d90e124555ce27d198822a7e65c55d86aee45d3e93fc" + ) + version( + "4.0.23", sha256="652143614f52411cad47db64e93bf3ba1cd547d6ca9591296223b5f0528b3b61" + ) - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") depends_on("autoconf", type="build") + depends_on("autoconf-archive", type="build") depends_on("automake", type="build") depends_on("libtool", type="build") depends_on("m4", type="build") - depends_on("mysql") + depends_on("pkgconfig", type="build") + depends_on("mysql-client") + # Older versions of mysql use openssl-1.x, causing build issues: + depends_on("mysql@8.0.35:", when="^[virtuals=mysql-client] mysql") depends_on("libevent") depends_on("pcre") depends_on("go") + def autoreconf(self, spec, prefix): + Executable("./bootstrap.sh")() + def configure_args(self): + mysql_prefix = self.spec["mysql-client"].prefix + if self.spec.satisfies("^[virtuals=mysql-client] mysql"): + mysql_config = mysql_prefix.bin.mysql_config + else: + mysql_config = mysql_prefix.bin.mariadb_config + args = [ "--enable-server", "--enable-proxy", "--enable-agent", "--enable-agent2", - "--with-mysql", - "--with-libevent=%s" % self.spec["libevent"].prefix, - "--with-libpcre=%s" % self.spec["pcre"].prefix, + f"--with-mysql={mysql_config}", + f"--with-libevent={self.spec['libevent'].prefix}", + f"--with-libpcre={self.spec['pcre'].prefix}", ] return args From e952f6be8e80f2d84ccb254ed826b1957f78dd0f Mon Sep 17 00:00:00 2001 From: Stephen Nicholas Swatman Date: Mon, 4 Nov 2024 14:48:08 +0100 Subject: [PATCH 059/208] acts dependencies: new versions as of 2024/11/01 (#47366) * acts dependencies: new versions as of 2024/11/01 Includes new versions of ACTS itself, Detray, and Vecmem. * Bind TBB --- var/spack/repos/builtin/packages/acts/package.py | 4 +++- var/spack/repos/builtin/packages/detray/package.py | 1 + var/spack/repos/builtin/packages/vecmem/package.py | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index 7845ada663b639..4063fb491b6a75 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -41,6 +41,7 @@ class Acts(CMakePackage, CudaPackage): # Supported Acts versions version("main", branch="main") version("master", branch="main", deprecated=True) # For compatibility + version("37.3.0", commit="b3e856d4dadcda7d1a88a9b846ce5a7acd8410c4", submodules=True) version("37.2.0", commit="821144dc40d35b44aee0d7857a0bd1c99e4a3932", submodules=True) version("37.1.0", commit="fa6ad4d52e0bd09cf8c78507fcbb18e9ac2c87a3", submodules=True) version("37.0.1", commit="998b9c9dd42d5160c2540f8fa820505869bfdb79", submodules=True) @@ -337,7 +338,7 @@ class Acts(CMakePackage, CudaPackage): "tbb", default=True, description="Build the examples with Threading Building Blocks library", - when="@19.8:19,20.1: +examples", + when="@19.8:19,20.1:37.2 +examples", ) variant("analysis", default=False, description="Build analysis applications in the examples") @@ -382,6 +383,7 @@ class Acts(CMakePackage, CudaPackage): depends_on("hepmc3 @3.2.1:", when="+hepmc3") depends_on("heppdt", when="+hepmc3 @:4.0") depends_on("intel-tbb @2020.1:", when="+examples +tbb") + depends_on("intel-tbb @2020.1:", when="+examples @37.3:") depends_on("mlpack@3.1.1:", when="+mlpack") depends_on("nlohmann-json @3.9.1:", when="@0.14: +json") depends_on("nlohmann-json @3.10.5:", when="@37: +json") diff --git a/var/spack/repos/builtin/packages/detray/package.py b/var/spack/repos/builtin/packages/detray/package.py index 1d08d4134fa57c..ab94c97a835f33 100644 --- a/var/spack/repos/builtin/packages/detray/package.py +++ b/var/spack/repos/builtin/packages/detray/package.py @@ -20,6 +20,7 @@ class Detray(CMakePackage): license("MPL-2.0", checked_by="stephenswat") + version("0.81.0", sha256="821313a7e3ea90fcf5c92153d28bba1f85844e03d7c6b6b98d0b3407adb86357") version("0.80.0", sha256="a12f3e333778ddd20a568b5c8df5b2375f9a4d74caf921822c1864b07b3f8ab7") version("0.79.0", sha256="3b9f18cb003e59795a0e4b1414069ac8558b975714626449293a71bc4398a380") version("0.78.0", sha256="ca3a348f4e12ed690c3106197e107b9c393b6902224b2543b00382050864bcf3") diff --git a/var/spack/repos/builtin/packages/vecmem/package.py b/var/spack/repos/builtin/packages/vecmem/package.py index 3372b8e2692410..c647e396759059 100644 --- a/var/spack/repos/builtin/packages/vecmem/package.py +++ b/var/spack/repos/builtin/packages/vecmem/package.py @@ -17,6 +17,7 @@ class Vecmem(CMakePackage, CudaPackage): license("MPL-2.0-no-copyleft-exception") + version("1.11.0", sha256="8f4ef9b50da45ea245291e2a4fef86025245150df4a4654ecb708a20adec5c42") version("1.10.0", sha256="1fbdc599a65ad7b2cd1176844c7578da38911bc747fbe51a71e00d20e6105330") version("1.9.0", sha256="c1ddc43ff0d742306cbee71afd80efd348b6b0b1ba9e4210ca7f8b607f03bd70") version("1.8.0", sha256="d04f1bfcd08837f85c794a69da9f248e163985214a302c22381037feb5b3a7a9") From d44bdc40c93fdc6350aa02fc03a61ccc3f36d2d2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 16:12:16 +0100 Subject: [PATCH 060/208] boost: require +icu when +locale (#47396) --- var/spack/repos/builtin/packages/boost/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 75a6d543e4b6b1..0dbb60ae9b8fcb 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -246,6 +246,7 @@ def libs(self): depends_on("icu4c cxxstd=14", when="+icu cxxstd=14") depends_on("icu4c cxxstd=17", when="+icu cxxstd=17") conflicts("cxxstd=98", when="+icu") # Requires c++11 at least + conflicts("+locale ~icu") # Boost.Locale "strongly recommends" icu, so enforce it depends_on("python", when="+python") # https://github.com/boostorg/python/commit/cbd2d9f033c61d29d0a1df14951f4ec91e7d05cd From 2214fc855dc1be677693e0e88f18996a5f5b5fa8 Mon Sep 17 00:00:00 2001 From: Stephen Nicholas Swatman Date: Mon, 4 Nov 2024 16:38:12 +0100 Subject: [PATCH 061/208] geant4-data: symlink only specific data dirs (#47367) Currently, the `geant4-data` spec creates symlink to all of its dependencies, and it does so by globbing their `share/` directories. This works very well for the way Spack installs these, but it doesn't work for anybody wanting to use e.g. the Geant4 data on CVMFS. See pull request #47298. This commit changes the way the `geant4-data` spec works. It no longer blindly globs directories and makes symlinks, but it asks its dependencies specifically for the name of their data directory. This should allow us to use Spack to use the CVMFS installations as externals. --- var/spack/repos/builtin/packages/g4abla/package.py | 9 +++++++-- var/spack/repos/builtin/packages/g4emlow/package.py | 9 +++++++-- .../repos/builtin/packages/g4ensdfstate/package.py | 9 +++++++-- var/spack/repos/builtin/packages/g4incl/package.py | 9 +++++++-- var/spack/repos/builtin/packages/g4ndl/package.py | 9 +++++++-- .../repos/builtin/packages/g4neutronxs/package.py | 9 +++++++-- .../repos/builtin/packages/g4nudexlib/package.py | 9 +++++++-- .../repos/builtin/packages/g4particlexs/package.py | 9 +++++++-- .../builtin/packages/g4photonevaporation/package.py | 11 +++++++---- var/spack/repos/builtin/packages/g4pii/package.py | 9 +++++++-- .../builtin/packages/g4radioactivedecay/package.py | 11 +++++++---- .../repos/builtin/packages/g4realsurface/package.py | 9 +++++++-- .../repos/builtin/packages/g4saiddata/package.py | 9 +++++++-- var/spack/repos/builtin/packages/g4tendl/package.py | 9 +++++++-- var/spack/repos/builtin/packages/g4urrpt/package.py | 9 +++++++-- .../repos/builtin/packages/geant4-data/package.py | 11 ++++++++--- 16 files changed, 113 insertions(+), 37 deletions(-) diff --git a/var/spack/repos/builtin/packages/g4abla/package.py b/var/spack/repos/builtin/packages/g4abla/package.py index 710d8de011829c..c36cb6f14845d0 100644 --- a/var/spack/repos/builtin/packages/g4abla/package.py +++ b/var/spack/repos/builtin/packages/g4abla/package.py @@ -24,13 +24,18 @@ class G4abla(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4ABLA{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4ABLA{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4ABLADATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4ABLA.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4ABLA{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4emlow/package.py b/var/spack/repos/builtin/packages/g4emlow/package.py index 87c25b950b6121..458f6da76bf99b 100644 --- a/var/spack/repos/builtin/packages/g4emlow/package.py +++ b/var/spack/repos/builtin/packages/g4emlow/package.py @@ -35,13 +35,18 @@ class G4emlow(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4EMLOW{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4EMLOW{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4LEDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "https://geant4-data.web.cern.ch/geant4-data/datasets/G4EMLOW.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4EMLOW{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4ensdfstate/package.py b/var/spack/repos/builtin/packages/g4ensdfstate/package.py index 59fe04f45deebf..6cb3904756d39d 100644 --- a/var/spack/repos/builtin/packages/g4ensdfstate/package.py +++ b/var/spack/repos/builtin/packages/g4ensdfstate/package.py @@ -25,11 +25,11 @@ class G4ensdfstate(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4ENSDFSTATE{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4ENSDFSTATE{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4ENSDFSTATEDATA", install_path) def url_for_version(self, version): @@ -37,3 +37,8 @@ def url_for_version(self, version): return ( "http://geant4-data.web.cern.ch/geant4-data/datasets/G4ENSDFSTATE.%s.tar.gz" % version ) + + @property + def g4datasetname(self): + spec = self.spec + return "G4ENSDFSTATE{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4incl/package.py b/var/spack/repos/builtin/packages/g4incl/package.py index 479ce2dda269d1..056910d71408de 100644 --- a/var/spack/repos/builtin/packages/g4incl/package.py +++ b/var/spack/repos/builtin/packages/g4incl/package.py @@ -25,13 +25,18 @@ class G4incl(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4INCL{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4INCL{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4INCLDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4INCL.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4INCL{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4ndl/package.py b/var/spack/repos/builtin/packages/g4ndl/package.py index a8c8f688985555..fa94da01b307d0 100644 --- a/var/spack/repos/builtin/packages/g4ndl/package.py +++ b/var/spack/repos/builtin/packages/g4ndl/package.py @@ -25,13 +25,18 @@ class G4ndl(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4NDL{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4NDL{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4NEUTRONHPDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4NDL.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4NDL{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4neutronxs/package.py b/var/spack/repos/builtin/packages/g4neutronxs/package.py index 39f6915346ba04..595d86c71e0cc6 100644 --- a/var/spack/repos/builtin/packages/g4neutronxs/package.py +++ b/var/spack/repos/builtin/packages/g4neutronxs/package.py @@ -24,11 +24,11 @@ class G4neutronxs(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4NEUTRONXS{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4NEUTRONXS{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4NEUTRONXSDATA", install_path) def url_for_version(self, version): @@ -36,3 +36,8 @@ def url_for_version(self, version): return ( "http://geant4-data.web.cern.ch/geant4-data/datasets/G4NEUTRONXS.%s.tar.gz" % version ) + + @property + def g4datasetname(self): + spec = self.spec + return "G4NEUTRONXS{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4nudexlib/package.py b/var/spack/repos/builtin/packages/g4nudexlib/package.py index 073aebcc7f8fc4..2e02321fe4291f 100644 --- a/var/spack/repos/builtin/packages/g4nudexlib/package.py +++ b/var/spack/repos/builtin/packages/g4nudexlib/package.py @@ -23,13 +23,18 @@ class G4nudexlib(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4NUDEXLIB{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4NUDEXLIB{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4NUDEXLIBDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4NUDEXLIB.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4NUDEXLIB{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4particlexs/package.py b/var/spack/repos/builtin/packages/g4particlexs/package.py index fb86575b466cf1..209831dfa658f4 100644 --- a/var/spack/repos/builtin/packages/g4particlexs/package.py +++ b/var/spack/repos/builtin/packages/g4particlexs/package.py @@ -28,11 +28,11 @@ class G4particlexs(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4PARTICLEXS{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4PARTICLEXS{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4PARTICLEXSDATA", install_path) def url_for_version(self, version): @@ -40,3 +40,8 @@ def url_for_version(self, version): return ( "http://geant4-data.web.cern.ch/geant4-data/datasets/G4PARTICLEXS.%s.tar.gz" % version ) + + @property + def g4datasetname(self): + spec = self.spec + return "G4PARTICLEXS{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4photonevaporation/package.py b/var/spack/repos/builtin/packages/g4photonevaporation/package.py index 1c847dec3e8eb9..099ded6a5f7044 100644 --- a/var/spack/repos/builtin/packages/g4photonevaporation/package.py +++ b/var/spack/repos/builtin/packages/g4photonevaporation/package.py @@ -27,13 +27,11 @@ class G4photonevaporation(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "PhotonEvaporation{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path( - self.prefix.share, "data", "PhotonEvaporation{0}".format(self.version) - ) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4LEVELGAMMADATA", install_path) def url_for_version(self, version): @@ -42,3 +40,8 @@ def url_for_version(self, version): "http://geant4-data.web.cern.ch/geant4-data/datasets/G4PhotonEvaporation.%s.tar.gz" % version ) + + @property + def g4datasetname(self): + spec = self.spec + return "PhotonEvaporation{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4pii/package.py b/var/spack/repos/builtin/packages/g4pii/package.py index a1aa5590bc1a42..c9cf78e5cb095e 100644 --- a/var/spack/repos/builtin/packages/g4pii/package.py +++ b/var/spack/repos/builtin/packages/g4pii/package.py @@ -22,13 +22,18 @@ class G4pii(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4PII{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4PII{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4PIIDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "https://geant4-data.web.cern.ch/geant4-data/datasets/G4PII.1.3.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4PII{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4radioactivedecay/package.py b/var/spack/repos/builtin/packages/g4radioactivedecay/package.py index 69f5681b64996c..33a75291ffa24d 100644 --- a/var/spack/repos/builtin/packages/g4radioactivedecay/package.py +++ b/var/spack/repos/builtin/packages/g4radioactivedecay/package.py @@ -27,13 +27,11 @@ class G4radioactivedecay(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "RadioactiveDecay{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path( - self.prefix.share, "data", "RadioactiveDecay{0}".format(self.version) - ) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4RADIOACTIVEDATA", install_path) def url_for_version(self, version): @@ -42,3 +40,8 @@ def url_for_version(self, version): "http://geant4-data.web.cern.ch/geant4-data/datasets/G4RadioactiveDecay.%s.tar.gz" % version ) + + @property + def g4datasetname(self): + spec = self.spec + return "RadioactiveDecay{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4realsurface/package.py b/var/spack/repos/builtin/packages/g4realsurface/package.py index 28f335bba029c3..1d924943647307 100644 --- a/var/spack/repos/builtin/packages/g4realsurface/package.py +++ b/var/spack/repos/builtin/packages/g4realsurface/package.py @@ -25,11 +25,11 @@ class G4realsurface(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "RealSurface{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "RealSurface{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4REALSURFACEDATA", install_path) def url_for_version(self, version): @@ -39,3 +39,8 @@ def url_for_version(self, version): "G4" if version > Version("1.0") else "", version ) ) + + @property + def g4datasetname(self): + spec = self.spec + return "RealSurface{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4saiddata/package.py b/var/spack/repos/builtin/packages/g4saiddata/package.py index 371811381b0967..b4da3185807f31 100644 --- a/var/spack/repos/builtin/packages/g4saiddata/package.py +++ b/var/spack/repos/builtin/packages/g4saiddata/package.py @@ -23,13 +23,18 @@ class G4saiddata(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4SAIDDATA{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4SAIDDATA{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4SAIDXSDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4SAIDDATA.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4SAIDDATA{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4tendl/package.py b/var/spack/repos/builtin/packages/g4tendl/package.py index 92c12423c27d5a..1a2e9095115c5a 100644 --- a/var/spack/repos/builtin/packages/g4tendl/package.py +++ b/var/spack/repos/builtin/packages/g4tendl/package.py @@ -24,13 +24,18 @@ class G4tendl(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4TENDL{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4TENDL{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4PARTICLEHPDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4TENDL.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4TENDL{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/g4urrpt/package.py b/var/spack/repos/builtin/packages/g4urrpt/package.py index e9ff5df2840c3b..a04e6d5d1addc7 100644 --- a/var/spack/repos/builtin/packages/g4urrpt/package.py +++ b/var/spack/repos/builtin/packages/g4urrpt/package.py @@ -23,13 +23,18 @@ class G4urrpt(Package): def install(self, spec, prefix): mkdirp(join_path(prefix.share, "data")) - install_path = join_path(prefix.share, "data", "G4URRPT{0}".format(self.version)) + install_path = join_path(prefix.share, "data", self.g4datasetname) install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - install_path = join_path(self.prefix.share, "data", "G4URRPT{0}".format(self.version)) + install_path = join_path(self.prefix.share, "data", self.g4datasetname) env.set("G4URRPTDATA", install_path) def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4URRPT.%s.tar.gz" % version + + @property + def g4datasetname(self): + spec = self.spec + return "G4URRPT{0}".format(spec.version) diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py index 201d2e8d88f79a..3b934da3922bcb 100644 --- a/var/spack/repos/builtin/packages/geant4-data/package.py +++ b/var/spack/repos/builtin/packages/geant4-data/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import glob import os from spack.package import * @@ -204,5 +203,11 @@ def datadir(self): def install(self, spec, prefix): with working_dir(self.datadir, create=True): for s in spec.dependencies(): - for d in glob.glob("{0}/data/*".format(s.prefix.share)): - os.symlink(d, os.path.basename(d)) + if not s.name.startswith("g4"): + continue + + if not hasattr(s.package, "g4datasetname"): + raise InstallError(f"Dependency `{s.name}` does not expose `g4datasetname`") + + d = "{0}/data/{1}".format(s.prefix.share, s.package.g4datasetname) + os.symlink(d, os.path.basename(d)) From 8c3068809fec10bc193a0ea56063b068a363254b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 4 Nov 2024 17:32:47 +0100 Subject: [PATCH 062/208] papi: add forward compat bound for cuda (#47409) --- var/spack/repos/builtin/packages/papi/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index 7b22451ae2b357..4ce382af5f1b2f 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -85,6 +85,8 @@ class Papi(AutotoolsPackage, ROCmPackage): conflicts("%gcc@8:", when="@5.3.0", msg="Requires GCC version less than 8.0") conflicts("+sde", when="@:5", msg="Software defined events (SDE) added in 6.0.0") conflicts("^cuda", when="@:5", msg="CUDA support for versions < 6.0.0 not implemented") + # https://github.com/icl-utk-edu/papi/pull/205 + conflicts("^cuda@12.4:", when="@:7.1") conflicts("%cce", when="@7.1:", msg="-ffree-form flag not recognized") conflicts("@=6.0.0", when="+static_tools", msg="Static tools cannot build on version 6.0.0") From 23ac56edfb411f7b4cfadf843663dbb8827dab8c Mon Sep 17 00:00:00 2001 From: John Gouwar Date: Mon, 4 Nov 2024 12:48:18 -0500 Subject: [PATCH 063/208] Times spec building and timing to public concretizer API (#47310) This PR has two small contributions: - It adds another phase to the timer for concrectization, "construct_specs", to actually see the time the concretizer spends interpreting the `clingo` output to build the Python object for a concretized spec. - It adds the method `Solver.solve_with_stats` to expose the timers that were already in the concretizer to the public solver API. `Solver.solve` just becomes a special case of `Solver.solve_with_stats` that throws away the timing output (which is what it was already doing). These changes will make it easier to benchmark concretizer performance and provide a more complete picture of the time spent in the concretizer by including the time spent interpreting clingo output. --- lib/spack/spack/solver/asp.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index ba50ebccd01736..cb4799a45f37bf 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -889,6 +889,7 @@ def on_model(model): result.satisfiable = solve_result.satisfiable if result.satisfiable: + timer.start("construct_specs") # get the best model builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible) min_cost, best_model = min(models) @@ -913,7 +914,8 @@ def on_model(model): # record the possible dependencies in the solve result.possible_dependencies = setup.pkgs - + timer.stop("construct_specs") + timer.stop() elif cores: result.control = self.control result.cores.extend(cores) @@ -4191,7 +4193,7 @@ def _check_input_and_extract_concrete_specs(specs): spack.spec.Spec.ensure_valid_variants(s) return reusable - def solve( + def solve_with_stats( self, specs, out=None, @@ -4202,6 +4204,8 @@ def solve( allow_deprecated=False, ): """ + Concretize a set of specs and track the timing and statistics for the solve + Arguments: specs (list): List of ``Spec`` objects to solve for. out: Optionally write the generate ASP program to a file-like object. @@ -4213,15 +4217,22 @@ def solve( setup_only (bool): if True, stop after setup and don't solve (default False). allow_deprecated (bool): allow deprecated version in the solve """ - # Check upfront that the variants are admissible specs = [s.lookup_hash() for s in specs] reusable_specs = self._check_input_and_extract_concrete_specs(specs) reusable_specs.extend(self.selector.reusable_specs(specs)) setup = SpackSolverSetup(tests=tests) output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only) - result, _, _ = self.driver.solve( + return self.driver.solve( setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated ) + + def solve(self, specs, **kwargs): + """ + Convenience function for concretizing a set of specs and ignoring timing + and statistics. Uses the same kwargs as solve_with_stats. + """ + # Check upfront that the variants are admissible + result, _, _ = self.solve_with_stats(specs, **kwargs) return result def solve_in_rounds( From 575a006ca332846615f0afb21357b15b64476a9e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 22 Sep 2024 22:44:39 -0700 Subject: [PATCH 064/208] `cc`: simplify ordered list handling `cc` divides most paths up into system paths, spack managed paths, and other paths. This gets really repetitive and makes the code hard to read. Simplify the script by adding some functions to do most of the redundant work for us. Signed-off-by: Todd Gamblin --- lib/spack/env/cc | 179 +++++++++++++++++------------------------------ 1 file changed, 65 insertions(+), 114 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index ccfc14bb89dc53..44f8b9316afe4f 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -238,6 +238,36 @@ esac } " +# path_list functions. Path_lists have 3 parts: spack_store_, and system_, +# which are used to prioritize paths when assembling the final command line. + +# init_path_lists LISTNAME +# Set , spack_store_, and system_ to "". +init_path_lists() { + eval "spack_store_$1=\"\"" + eval "$1=\"\"" + eval "system_$1=\"\"" +} + +# assign_path_lists LISTNAME1 LISTNAME2 +# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix. +assign_path_lists() { + eval "spack_store_$1=\"\${spack_store_$2}\"" + eval "$1=\"\${$2}\"" + eval "system_$1=\"\${system_$2}\"" +} + +# append_path_lists LISTNAME ELT +# Append the provided ELT to the appropriate list, based on the result of path_order(). +append_path_lists() { + path_order "$2" + case $? in + 0) eval "append spack_store_$1 \"\$2\"" ;; + 1) eval "append $1 \"\$2\"" ;; + 2) eval "append system_$1 \"\$2\"" ;; + esac +} + # Check if optional parameters are defined # If we aren't asking for debug flags, don't add them if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then @@ -470,12 +500,7 @@ input_command="$*" parse_Wl() { while [ $# -ne 0 ]; do if [ "$wl_expect_rpath" = yes ]; then - path_order "$1" - case $? in - 0) append return_spack_store_rpath_dirs_list "$1" ;; - 1) append return_rpath_dirs_list "$1" ;; - 2) append return_system_rpath_dirs_list "$1" ;; - esac + append_path_lists return_rpath_dirs_list "$1" wl_expect_rpath=no else case "$1" in @@ -484,24 +509,14 @@ parse_Wl() { if [ -z "$arg" ]; then shift; continue fi - path_order "$arg" - case $? in - 0) append return_spack_store_rpath_dirs_list "$arg" ;; - 1) append return_rpath_dirs_list "$arg" ;; - 2) append return_system_rpath_dirs_list "$arg" ;; - esac + append_path_lists return_rpath_dirs_list "$arg" ;; --rpath=*) arg="${1#--rpath=}" if [ -z "$arg" ]; then shift; continue fi - path_order "$arg" - case $? in - 0) append return_spack_store_rpath_dirs_list "$arg" ;; - 1) append return_rpath_dirs_list "$arg" ;; - 2) append return_system_rpath_dirs_list "$arg" ;; - esac + append_path_lists return_rpath_dirs_list "$arg" ;; -rpath|--rpath) wl_expect_rpath=yes @@ -509,8 +524,7 @@ parse_Wl() { "$dtags_to_strip") ;; -Wl) - # Nested -Wl,-Wl means we're in NAG compiler territory, we don't support - # it. + # Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it. return 1 ;; *) @@ -529,21 +543,10 @@ categorize_arguments() { return_other_args_list="" return_isystem_was_used="" - return_isystem_spack_store_include_dirs_list="" - return_isystem_system_include_dirs_list="" - return_isystem_include_dirs_list="" - - return_spack_store_include_dirs_list="" - return_system_include_dirs_list="" - return_include_dirs_list="" - - return_spack_store_lib_dirs_list="" - return_system_lib_dirs_list="" - return_lib_dirs_list="" - - return_spack_store_rpath_dirs_list="" - return_system_rpath_dirs_list="" - return_rpath_dirs_list="" + init_path_lists return_isystem_include_dirs_list + init_path_lists return_include_dirs_list + init_path_lists return_lib_dirs_list + init_path_lists return_rpath_dirs_list # Global state for keeping track of -Wl,-rpath -Wl,/path wl_expect_rpath=no @@ -609,32 +612,17 @@ categorize_arguments() { arg="${1#-isystem}" return_isystem_was_used=true if [ -z "$arg" ]; then shift; arg="$1"; fi - path_order "$arg" - case $? in - 0) append return_isystem_spack_store_include_dirs_list "$arg" ;; - 1) append return_isystem_include_dirs_list "$arg" ;; - 2) append return_isystem_system_include_dirs_list "$arg" ;; - esac + append_path_lists return_isystem_include_dirs_list "$arg" ;; -I*) arg="${1#-I}" if [ -z "$arg" ]; then shift; arg="$1"; fi - path_order "$arg" - case $? in - 0) append return_spack_store_include_dirs_list "$arg" ;; - 1) append return_include_dirs_list "$arg" ;; - 2) append return_system_include_dirs_list "$arg" ;; - esac + append_path_lists return_include_dirs_list "$arg" ;; -L*) arg="${1#-L}" if [ -z "$arg" ]; then shift; arg="$1"; fi - path_order "$arg" - case $? in - 0) append return_spack_store_lib_dirs_list "$arg" ;; - 1) append return_lib_dirs_list "$arg" ;; - 2) append return_system_lib_dirs_list "$arg" ;; - esac + append_path_lists return_lib_dirs_list "$arg" ;; -l*) # -loopopt=0 is generated erroneously in autoconf <= 2.69, @@ -667,32 +655,17 @@ categorize_arguments() { break elif [ "$xlinker_expect_rpath" = yes ]; then # Register the path of -Xlinker -rpath -Xlinker - path_order "$1" - case $? in - 0) append return_spack_store_rpath_dirs_list "$1" ;; - 1) append return_rpath_dirs_list "$1" ;; - 2) append return_system_rpath_dirs_list "$1" ;; - esac + append_path_lists return_rpath_dirs_list "$1" xlinker_expect_rpath=no else case "$1" in -rpath=*) arg="${1#-rpath=}" - path_order "$arg" - case $? in - 0) append return_spack_store_rpath_dirs_list "$arg" ;; - 1) append return_rpath_dirs_list "$arg" ;; - 2) append return_system_rpath_dirs_list "$arg" ;; - esac + append_path_lists return_rpath_dirs_list "$arg" ;; --rpath=*) arg="${1#--rpath=}" - path_order "$arg" - case $? in - 0) append return_spack_store_rpath_dirs_list "$arg" ;; - 1) append return_rpath_dirs_list "$arg" ;; - 2) append return_system_rpath_dirs_list "$arg" ;; - esac + append_path_lists return_rpath_dirs_list "$arg" ;; -rpath|--rpath) xlinker_expect_rpath=yes @@ -731,21 +704,10 @@ categorize_arguments() { categorize_arguments "$@" -spack_store_include_dirs_list="$return_spack_store_include_dirs_list" -system_include_dirs_list="$return_system_include_dirs_list" -include_dirs_list="$return_include_dirs_list" - -spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list" -system_lib_dirs_list="$return_system_lib_dirs_list" -lib_dirs_list="$return_lib_dirs_list" - -spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list" -system_rpath_dirs_list="$return_system_rpath_dirs_list" -rpath_dirs_list="$return_rpath_dirs_list" - -isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list" -isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list" -isystem_include_dirs_list="$return_isystem_include_dirs_list" +assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list +assign_path_lists include_dirs_list return_include_dirs_list +assign_path_lists lib_dirs_list return_lib_dirs_list +assign_path_lists rpath_dirs_list return_rpath_dirs_list isystem_was_used="$return_isystem_was_used" other_args_list="$return_other_args_list" @@ -821,21 +783,10 @@ IFS="$lsep" categorize_arguments $spack_flags_list unset IFS -spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list" -spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list" -spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list" - -spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list" -spack_flags_system_include_dirs_list="$return_system_include_dirs_list" -spack_flags_include_dirs_list="$return_include_dirs_list" - -spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list" -spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list" -spack_flags_lib_dirs_list="$return_lib_dirs_list" - -spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list" -spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list" -spack_flags_rpath_dirs_list="$return_rpath_dirs_list" +assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list +assign_path_lists spack_flags_include_dirs_list return_include_dirs_list +assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list +assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list spack_flags_isystem_was_used="$return_isystem_was_used" spack_flags_other_args_list="$return_other_args_list" @@ -894,7 +845,7 @@ esac case "$mode" in cpp|cc|as|ccld) if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then - extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS + extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS extend isystem_include_dirs_list SPACK_INCLUDE_DIRS else extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS @@ -910,32 +861,32 @@ args_list="$flags_list" # Include search paths partitioned by (in store, non-sytem, system) # NOTE: adding ${lsep} to the prefix here turns every added element into two -extend args_list spack_flags_spack_store_include_dirs_list -I +extend args_list spack_store_spack_flags_include_dirs_list -I extend args_list spack_store_include_dirs_list -I extend args_list spack_flags_include_dirs_list -I extend args_list include_dirs_list -I -extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}" -extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}" +extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}" +extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}" extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}" extend args_list isystem_include_dirs_list "-isystem${lsep}" -extend args_list spack_flags_system_include_dirs_list -I +extend args_list system_spack_flags_include_dirs_list -I extend args_list system_include_dirs_list -I -extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}" -extend args_list isystem_system_include_dirs_list "-isystem${lsep}" +extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}" +extend args_list system_isystem_include_dirs_list "-isystem${lsep}" # Library search paths partitioned by (in store, non-sytem, system) -extend args_list spack_flags_spack_store_lib_dirs_list "-L" +extend args_list spack_store_spack_flags_lib_dirs_list "-L" extend args_list spack_store_lib_dirs_list "-L" extend args_list spack_flags_lib_dirs_list "-L" extend args_list lib_dirs_list "-L" -extend args_list spack_flags_system_lib_dirs_list "-L" +extend args_list system_spack_flags_lib_dirs_list "-L" extend args_list system_lib_dirs_list "-L" # RPATHs arguments @@ -944,26 +895,26 @@ case "$mode" in if [ -n "$dtags_to_add" ] ; then append args_list "$linker_arg$dtags_to_add" fi - extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath" + extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath" extend args_list spack_store_rpath_dirs_list "$rpath" extend args_list spack_flags_rpath_dirs_list "$rpath" extend args_list rpath_dirs_list "$rpath" - extend args_list spack_flags_system_rpath_dirs_list "$rpath" + extend args_list system_spack_flags_rpath_dirs_list "$rpath" extend args_list system_rpath_dirs_list "$rpath" ;; ld) if [ -n "$dtags_to_add" ] ; then append args_list "$dtags_to_add" fi - extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}" + extend args_list spack_store_spack_flags_rpath_dirs_list "-rpath${lsep}" extend args_list spack_store_rpath_dirs_list "-rpath${lsep}" extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}" extend args_list rpath_dirs_list "-rpath${lsep}" - extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}" + extend args_list system_spack_flags_rpath_dirs_list "-rpath${lsep}" extend args_list system_rpath_dirs_list "-rpath${lsep}" ;; esac From 5cc07522abef9fa169cfa341e431c030dbbff7fe Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Oct 2024 21:54:08 -0700 Subject: [PATCH 065/208] cc: parse RPATHs when in `ld` mode In the pure `ld` case, we weren't actually parsing `RPATH` arguments separately as we do for `ccld`. Fix this by adding *another* nested case statement for raw `RPATH` parsing. There are now 3 places where we deal with `-rpath` and friends, but I don't see a great way to unify them, as `-Wl,`, `-Xlinker`, and raw `-rpath` arguments are all ever so slightly different. Also, this Fixes ordering of assertions to make `pytest` diffs more intelligible. The meaning of `+` and `-` in diffs changed in `pytest` 6.0 and the "preferred" order for assertions became `assert actual == expected` instead of the other way around. Signed-off-by: Todd Gamblin --- lib/spack/env/cc | 67 ++++++++++++++++++++++++++------------ lib/spack/spack/test/cc.py | 39 +++++++++++++++++++++- 2 files changed, 85 insertions(+), 21 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 44f8b9316afe4f..88969d3f3097f7 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -101,10 +101,9 @@ setsep() { esac } -# prepend LISTNAME ELEMENT [SEP] +# prepend LISTNAME ELEMENT # -# Prepend ELEMENT to the list stored in the variable LISTNAME, -# assuming the list is separated by SEP. +# Prepend ELEMENT to the list stored in the variable LISTNAME. # Handles empty lists and single-element lists. prepend() { varname="$1" @@ -682,7 +681,36 @@ categorize_arguments() { "$dtags_to_strip") ;; *) - append return_other_args_list "$1" + # if mode is not ld, we can just add to other args + if [ "$mode" != "ld" ]; then + append return_other_args_list "$1" + shift + continue + fi + + # if we're in linker mode, we need to parse raw RPATH args + case "$1" in + -rpath=*) + arg="${1#-rpath=}" + append_path_lists return_rpath_dirs_list "$arg" + ;; + --rpath=*) + arg="${1#--rpath=}" + append_path_lists return_rpath_dirs_list "$arg" + ;; + -rpath|--rpath) + if [ $# -eq 1 ]; then + # -rpath without value: let the linker raise an error. + append return_other_args_list "$1" + break + fi + shift + append_path_lists return_rpath_dirs_list "$1" + ;; + *) + append return_other_args_list "$1" + ;; + esac ;; esac shift @@ -890,35 +918,34 @@ extend args_list system_spack_flags_lib_dirs_list "-L" extend args_list system_lib_dirs_list "-L" # RPATHs arguments +rpath_prefix="" case "$mode" in ccld) if [ -n "$dtags_to_add" ] ; then append args_list "$linker_arg$dtags_to_add" fi - extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath" - extend args_list spack_store_rpath_dirs_list "$rpath" - - extend args_list spack_flags_rpath_dirs_list "$rpath" - extend args_list rpath_dirs_list "$rpath" - - extend args_list system_spack_flags_rpath_dirs_list "$rpath" - extend args_list system_rpath_dirs_list "$rpath" + rpath_prefix="$rpath" ;; ld) if [ -n "$dtags_to_add" ] ; then append args_list "$dtags_to_add" fi - extend args_list spack_store_spack_flags_rpath_dirs_list "-rpath${lsep}" - extend args_list spack_store_rpath_dirs_list "-rpath${lsep}" - - extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}" - extend args_list rpath_dirs_list "-rpath${lsep}" - - extend args_list system_spack_flags_rpath_dirs_list "-rpath${lsep}" - extend args_list system_rpath_dirs_list "-rpath${lsep}" + rpath_prefix="-rpath${lsep}" ;; esac +# if mode is ccld or ld, extend RPATH lists with the prefix determined above +if [ -n "$rpath_prefix" ]; then + extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix" + extend args_list spack_store_rpath_dirs_list "$rpath_prefix" + + extend args_list spack_flags_rpath_dirs_list "$rpath_prefix" + extend args_list rpath_dirs_list "$rpath_prefix" + + extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix" + extend args_list system_rpath_dirs_list "$rpath_prefix" +fi + # Other arguments from the input command extend args_list other_args_list extend args_list spack_flags_other_args_list diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 4a394680f480b8..f65bef9b0161d4 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -199,7 +199,7 @@ def check_args(cc, args, expected): """ with set_env(SPACK_TEST_COMMAND="dump-args"): cc_modified_args = cc(*args, output=str).strip().split("\n") - assert expected == cc_modified_args + assert cc_modified_args == expected def check_args_contents(cc, args, must_contain, must_not_contain): @@ -272,6 +272,43 @@ def test_ld_mode(wrapper_environment): assert dump_mode(ld, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ld" +def test_ld_unterminated_rpath(wrapper_environment): + check_args( + ld, + ["foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"], + ["ld", "--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"], + ) + + +def test_xlinker_unterminated_rpath(wrapper_environment): + check_args( + cc, + ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Xlinker", "-rpath"], + [real_cc] + + target_args + + [ + "-Wl,--disable-new-dtags", + "foo.o", + "bar.o", + "baz.o", + "-o", + "foo", + "-Xlinker", + "-rpath", + ], + ) + + +def test_wl_unterminated_rpath(wrapper_environment): + check_args( + cc, + ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"], + [real_cc] + + target_args + + ["-Wl,--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"], + ) + + def test_ld_flags(wrapper_environment, wrapper_flags): check_args( ld, From 38c8069ab42f44aa9f4779968937fc6842dc2109 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 4 Nov 2024 11:31:57 -0800 Subject: [PATCH 066/208] filesystem.py: add `max_depth` argument to `find` (#41945) * `find(..., max_depth=...)` can be used to control how many directories at most to descend into below the starting point * `find` now enters every unique (symlinked) directory once at the lowest depth * `find` is now repeatable: it traverses the directory tree in a deterministic order --- lib/spack/llnl/util/filesystem.py | 175 +++++++++++++----- lib/spack/spack/test/llnl/util/file_list.py | 32 +--- lib/spack/spack/test/llnl/util/filesystem.py | 158 +++++++++++++++- .../packages/attributes-foo/package.py | 4 +- 4 files changed, 292 insertions(+), 77 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 00bb270151908c..b63b6e94b39a2e 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -1673,16 +1673,20 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() -def find(root, files, recursive=True): +def find(root, files, recursive=True, max_depth: Optional[int] = None): """Search for ``files`` starting from the ``root`` directory. Like GNU/BSD find but written entirely in Python. + Specifically this behaves like `find -type f`: it only returns + results that are files. When searching recursively, this behaves + as `find` with the `-L` option (follows symlinks). + Examples: .. code-block:: console - $ find /usr -name python + $ find -L /usr -name python is equivalent to: @@ -1712,6 +1716,8 @@ def find(root, files, recursive=True): files (str or collections.abc.Sequence): Library name(s) to search for recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to True. + max_depth (int): if set, don't search below this depth. Cannot be set + if recursive is False Returns: list: The files that have been found @@ -1719,59 +1725,135 @@ def find(root, files, recursive=True): if isinstance(files, str): files = [files] - if recursive: - tty.debug(f"Find (recursive): {root} {str(files)}") - result = _find_recursive(root, files) - else: - tty.debug(f"Find (not recursive): {root} {str(files)}") - result = _find_non_recursive(root, files) + # If recursive is false, max_depth can only be None or 0 + if max_depth and not recursive: + raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") + + if not recursive: + max_depth = 0 + elif max_depth is None: + max_depth = sys.maxsize + + tty.debug(f"Find (max depth = {max_depth}): {root} {str(files)}") + result = find_max_depth(root, files, max_depth) tty.debug(f"Find complete: {root} {str(files)}") return result -@system_path_filter -def _find_recursive(root, search_files): - # The variable here is **on purpose** a defaultdict. The idea is that - # we want to poke the filesystem as little as possible, but still maintain - # stability in the order of the answer. Thus we are recording each library - # found in a key, and reconstructing the stable order later. - found_files = collections.defaultdict(list) +@system_path_filter(arg_slice=slice(1)) +def find_max_depth(root, globs, max_depth: Optional[int] = None): + """Given a set of non-recursive glob file patterns, finds all + files matching those patterns up to a maximum specified depth. - # Make the path absolute to have os.walk also return an absolute path - root = os.path.abspath(root) - for path, _, list_files in os.walk(root): - for search_file in search_files: - matches = glob.glob(os.path.join(path, search_file)) - matches = [os.path.join(path, x) for x in matches] - found_files[search_file].extend(matches) + If a directory has a name which matches an input pattern, it will + not be included in the results. - answer = [] - for search_file in search_files: - answer.extend(found_files[search_file]) + If ``max_depth`` is specified, does not search below that depth. - return answer + If ``globs`` is a list, files matching earlier entries are placed + in the return value before files matching later entries. + """ + # If root doesn't exist, then we say we found nothing. If it + # exists but is not a dir, we assume the user would want to + # know; likewise if it exists but we do not have permission to + # access it. + try: + stat_root = os.stat(root) + except OSError as e: + if e.errno == errno.ENOENT: + return [] + else: + raise + if not stat.S_ISDIR(stat_root.st_mode): + raise ValueError(f"{root} is not a directory") + if max_depth is None: + max_depth = sys.maxsize -@system_path_filter -def _find_non_recursive(root, search_files): - # The variable here is **on purpose** a defaultdict as os.list_dir - # can return files in any order (does not preserve stability) - found_files = collections.defaultdict(list) + if isinstance(globs, str): + globs = [globs] + # Apply normcase to regular expressions and to the filenames: + # this respects case-sensitivity semantics of different OSes + # (e.g. file search is typically case-insensitive on Windows) + regexes = [re.compile(fnmatch.translate(os.path.normcase(x))) for x in globs] - # Make the path absolute to have absolute path returned + # Note later calls to os.scandir etc. return abspaths if the + # input is absolute, see https://docs.python.org/3/library/os.html#os.DirEntry.path root = os.path.abspath(root) - for search_file in search_files: - matches = glob.glob(os.path.join(root, search_file)) - matches = [os.path.join(root, x) for x in matches] - found_files[search_file].extend(matches) + found_files = collections.defaultdict(list) - answer = [] - for search_file in search_files: - answer.extend(found_files[search_file]) + def _dir_id(stat_info): + # Note: on windows, st_ino is the file index and st_dev + # is the volume serial number. See + # https://github.com/python/cpython/blob/3.9/Python/fileutils.c + return (stat_info.st_ino, stat_info.st_dev) + + def _log_file_access_issue(e): + errno_name = errno.errorcode.get(e.errno, "UNKNOWN") + tty.debug(f"find must skip {dir_entry.path}: {errno_name} {str(e)}") + + visited_dirs = set([_dir_id(stat_root)]) + + # Each queue item stores the depth and path + # This achieves a consistent traversal order by iterating through + # each directory in alphabetical order. + # This also traverses in BFS order to ensure finding the shortest + # path to any file (or one of the shortest paths, if there are + # several - the one returned will be consistent given the prior + # point). + dir_queue = collections.deque([(0, root)]) + while dir_queue: + depth, next_dir = dir_queue.pop() + try: + dir_iter = os.scandir(next_dir) + except OSError: + # Most commonly, this would be a permissions issue, for + # example if we are scanning an external directory like /usr + continue - return answer + with dir_iter: + ordered_entries = sorted(dir_iter, key=lambda x: x.name) + for dir_entry in ordered_entries: + try: + it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) + except OSError as e: + # Possible permission issue, or a symlink that cannot + # be resolved (ELOOP). + _log_file_access_issue(e) + continue + + if it_is_a_dir and (depth < max_depth): + try: + # The stat should be performed in a try/except block. + # We repeat that here vs. moving to the above block + # because we only want to call `stat` if we haven't + # exceeded our max_depth + if sys.platform == "win32": + # Note: st_ino/st_dev on DirEntry.stat are not set on + # Windows, so we have to call os.stat + stat_info = os.stat(dir_entry.path, follow_symlinks=True) + else: + stat_info = dir_entry.stat(follow_symlinks=True) + except OSError as e: + _log_file_access_issue(e) + continue + + dir_id = _dir_id(stat_info) + if dir_id not in visited_dirs: + dir_queue.appendleft((depth + 1, dir_entry.path)) + visited_dirs.add(dir_id) + else: + fname = os.path.basename(dir_entry.path) + for pattern in regexes: + if pattern.match(os.path.normcase(fname)): + found_files[pattern].append(os.path.join(next_dir, fname)) + + # TODO: for fully-recursive searches, we can print a warning after + # after having searched everything up to some fixed depth + + return list(itertools.chain(*[found_files[x] for x in regexes])) # Utilities for libraries and headers @@ -2210,7 +2292,9 @@ def find_system_libraries(libraries, shared=True): return libraries_found -def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): +def find_libraries( + libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None +): """Returns an iterable of full paths to libraries found in a root dir. Accepts any glob characters accepted by fnmatch: @@ -2231,6 +2315,8 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): otherwise for static. Defaults to True. recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. + max_depth (int): if set, don't search below this depth. Cannot be set + if recursive is False runtime (bool): Windows only option, no-op elsewhere. If true, search for runtime shared libs (.DLL), otherwise, search for .Lib files. If shared is false, this has no meaning. @@ -2239,6 +2325,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): Returns: LibraryList: The libraries that have been found """ + if isinstance(libraries, str): libraries = [libraries] elif not isinstance(libraries, collections.abc.Sequence): @@ -2271,8 +2358,10 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes] if not recursive: + if max_depth: + raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") # If not recursive, look for the libraries directly in root - return LibraryList(find(root, libraries, False)) + return LibraryList(find(root, libraries, recursive=False)) # To speedup the search for external packages configured e.g. in /usr, # perform first non-recursive search in root/lib then in root/lib64 and @@ -2290,7 +2379,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): if found_libs: break else: - found_libs = find(root, libraries, True) + found_libs = find(root, libraries, recursive=True, max_depth=max_depth) return LibraryList(found_libs) diff --git a/lib/spack/spack/test/llnl/util/file_list.py b/lib/spack/spack/test/llnl/util/file_list.py index 75ba3ae89d9aca..e2ff5a82109510 100644 --- a/lib/spack/spack/test/llnl/util/file_list.py +++ b/lib/spack/spack/test/llnl/util/file_list.py @@ -9,7 +9,7 @@ import pytest -from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries +from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries import spack.paths @@ -324,33 +324,3 @@ def test_searching_order(search_fn, search_list, root, kwargs): # List should be empty here assert len(rlist) == 0 - - -@pytest.mark.parametrize( - "root,search_list,kwargs,expected", - [ - ( - search_dir, - "*/*bar.tx?", - {"recursive": False}, - [ - os.path.join(search_dir, os.path.join("a", "foobar.txt")), - os.path.join(search_dir, os.path.join("b", "bar.txp")), - os.path.join(search_dir, os.path.join("c", "bar.txt")), - ], - ), - ( - search_dir, - "*/*bar.tx?", - {"recursive": True}, - [ - os.path.join(search_dir, os.path.join("a", "foobar.txt")), - os.path.join(search_dir, os.path.join("b", "bar.txp")), - os.path.join(search_dir, os.path.join("c", "bar.txt")), - ], - ), - ], -) -def test_find_with_globbing(root, search_list, kwargs, expected): - matches = find(root, search_list, **kwargs) - assert sorted(matches) == sorted(expected) diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index a0c98747698b20..01379be94c0614 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -6,6 +6,7 @@ """Tests for ``llnl/util/filesystem.py``""" import filecmp import os +import pathlib import shutil import stat import sys @@ -14,7 +15,8 @@ import pytest import llnl.util.filesystem as fs -from llnl.util.symlink import islink, readlink, symlink +import llnl.util.symlink +from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink import spack.paths @@ -1035,3 +1037,157 @@ def test_windows_sfn(tmpdir): assert "d\\LONGER~1" in fs.windows_sfn(d) assert "d\\LONGER~2" in fs.windows_sfn(e) shutil.rmtree(tmpdir.join("d")) + + +@pytest.fixture +def dir_structure_with_things_to_find(tmpdir): + """ + / + dir_one/ + file_one + dir_two/ + dir_three/ + dir_four/ + file_two + file_three + file_four + """ + dir_one = tmpdir.join("dir_one").ensure(dir=True) + tmpdir.join("dir_two").ensure(dir=True) + dir_three = tmpdir.join("dir_three").ensure(dir=True) + dir_four = dir_three.join("dir_four").ensure(dir=True) + + locations = {} + locations["file_one"] = str(dir_one.join("file_one").ensure()) + locations["file_two"] = str(dir_four.join("file_two").ensure()) + locations["file_three"] = str(dir_three.join("file_three").ensure()) + locations["file_four"] = str(tmpdir.join("file_four").ensure()) + + return str(tmpdir), locations + + +def test_find_max_depth(dir_structure_with_things_to_find): + root, locations = dir_structure_with_things_to_find + + # Make sure the paths we use to verify are absolute + assert os.path.isabs(locations["file_one"]) + + assert set(fs.find_max_depth(root, "file_*", 0)) == {locations["file_four"]} + assert set(fs.find_max_depth(root, "file_*", 1)) == { + locations["file_one"], + locations["file_three"], + locations["file_four"], + } + assert set(fs.find_max_depth(root, "file_two", 2)) == {locations["file_two"]} + assert not set(fs.find_max_depth(root, "file_two", 1)) + assert set(fs.find_max_depth(root, "file_two")) == {locations["file_two"]} + assert set(fs.find_max_depth(root, "file_*")) == set(locations.values()) + + +def test_find_max_depth_relative(dir_structure_with_things_to_find): + """find_max_depth should return absolute paths even if + the provided path is relative. + """ + root, locations = dir_structure_with_things_to_find + with fs.working_dir(root): + assert set(fs.find_max_depth(".", "file_*", 0)) == {locations["file_four"]} + assert set(fs.find_max_depth(".", "file_two", 2)) == {locations["file_two"]} + + +@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)]) +def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth): + root = str(tmpdir) + error_str = "cannot be set if recursive is False" + with pytest.raises(ValueError, match=error_str): + fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth) + + with pytest.raises(ValueError, match=error_str): + fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth) + + +def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): + """ + "lx-dy" means "level x, directory y" + "lx-fy" means "level x, file y" + "lx-sy" means "level x, symlink y" + + / + l1-d1/ + l2-d1/ + l3-s1 -> l1-d2 # points to directory above l2-d1 + l3-d2/ + l4-f1 + l3-s3 -> l1-d1 # cyclic link + l3-d4/ + l4-f2 + l1-d2/ + l2-f1 + l2-d2/ + l3-f3 + l2-s3 -> l2-d2 + l1-s3 -> l3-d4 # a link that "skips" a directory level + l1-s4 -> l2-s3 # a link to a link to a dir + """ + if sys.platform == "win32" and (not use_junctions) and (not _windows_can_symlink()): + pytest.skip("This Windows instance is not configured with symlink support") + + l1_d1 = tmpdir.join("l1-d1").ensure(dir=True) + l2_d1 = l1_d1.join("l2-d1").ensure(dir=True) + l3_d2 = l2_d1.join("l3-d2").ensure(dir=True) + l3_d4 = l2_d1.join("l3-d4").ensure(dir=True) + l1_d2 = tmpdir.join("l1-d2").ensure(dir=True) + l2_d2 = l1_d2.join("l1-d2").ensure(dir=True) + + if use_junctions: + link_fn = llnl.util.symlink._windows_create_junction + else: + link_fn = os.symlink + + link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1") + link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3") + link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3") + l2_s3 = pathlib.Path(l1_d2) / "l2-s3" + link_fn(l2_d2, l2_s3) + link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4") + + locations = {} + locations["l4-f1"] = str(l3_d2.join("l4-f1").ensure()) + locations["l4-f2-full"] = str(l3_d4.join("l4-f2").ensure()) + locations["l4-f2-link"] = str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2") + locations["l2-f1"] = str(l1_d2.join("l2-f1").ensure()) + locations["l2-f1-link"] = str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1") + locations["l3-f3-full"] = str(l2_d2.join("l3-f3").ensure()) + locations["l3-f3-link-l1"] = str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3") + + return str(tmpdir), locations + + +def _check_find_links(root, locations): + root = pathlib.Path(root) + assert set(fs.find_max_depth(root, "l4-f1")) == {locations["l4-f1"]} + assert set(fs.find_max_depth(root / "l1-s3", "l4-f2", 0)) == {locations["l4-f2-link"]} + assert set(fs.find_max_depth(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} + # File is accessible via symlink and subdir, the link path will be + # searched first, and the directory will not be searched again when + # it is encountered the second time (via not-link) in the traversal + assert set(fs.find_max_depth(root, "l4-f2")) == {locations["l4-f2-link"]} + # File is accessible only via the dir, so the full file path should + # be reported + assert set(fs.find_max_depth(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} + # Check following links to links + assert set(fs.find_max_depth(root, "l3-f3")) == {locations["l3-f3-link-l1"]} + + +@pytest.mark.parametrize( + "use_junctions", + [ + False, + pytest.param( + True, + marks=pytest.mark.skipif(sys.platform != "win32", reason="Only Windows has junctions"), + ), + ], +) +def test_find_max_depth_symlinks(tmpdir, use_junctions): + root, locations = dir_structure_with_things_to_find_links(tmpdir, use_junctions=use_junctions) + _check_find_links(root, locations) diff --git a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py index 31c88f4b08564a..b882fc9b6595b6 100644 --- a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py +++ b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py @@ -44,7 +44,7 @@ def libs(self): # Header provided by the bar virutal package @property def bar_headers(self): - return find_headers("bar/bar", root=self.home.include, recursive=False) + return find_headers("bar", root=self.home.include, recursive=True) # Libary provided by the bar virtual package @property @@ -59,7 +59,7 @@ def baz_home(self): # Header provided by the baz virtual package @property def baz_headers(self): - return find_headers("baz/baz", root=self.baz_home.include, recursive=False) + return find_headers("baz", root=self.baz_home.include, recursive=True) # Library provided by the baz virtual package @property From 6924c530e2a619801efcc5b5c29b9fa945876049 Mon Sep 17 00:00:00 2001 From: Darren Bolduc Date: Mon, 4 Nov 2024 14:50:54 -0500 Subject: [PATCH 067/208] google-cloud-cpp: add v2.29.0, v2.30.0 (#47146) * google-cloud-cpp: add v2.29.0; fix cxx-std versions * d'oh, single value for the variant --- .../builtin/packages/google-cloud-cpp/package.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/google-cloud-cpp/package.py b/var/spack/repos/builtin/packages/google-cloud-cpp/package.py index 7a477505cc94ca..e66aedae3f67b1 100644 --- a/var/spack/repos/builtin/packages/google-cloud-cpp/package.py +++ b/var/spack/repos/builtin/packages/google-cloud-cpp/package.py @@ -18,6 +18,8 @@ class GoogleCloudCpp(CMakePackage): sanity_check_is_dir = ["lib", "include"] + version("2.30.0", sha256="170650b11ece54977b42dd85be648b6bd2d614ff68ea6863a0013865e576b49c") + version("2.29.0", sha256="758e1eca8186b962516c0659b34ce1768ba1c9769cfd998c5bbffb084ad901ff") version("2.28.0", sha256="1d51910cb4419f6100d8b9df6bccd33477d09f50e378f12b06dae0f137ed7bc6") depends_on("abseil-cpp") @@ -30,11 +32,17 @@ class GoogleCloudCpp(CMakePackage): variant("shared", default=False, description="Build shared instead of static libraries") variant( "cxxstd", - default="11", - values=("11", "14", "17", "20"), + default="14", + values=("14", "17", "20"), multi=False, description="Use the specified C++ standard when building.", ) + variant( + "libraries", + default="__ga_libraries__", + multi=False, + description="Which client libraries to build/install. e.g. libraries=bigtable,storage", + ) def cmake_args(self): args = [ @@ -43,6 +51,6 @@ def cmake_args(self): "-DBUILD_TESTING:Bool=OFF", "-DGOOGLE_CLOUD_CPP_WITH_MOCKS:Bool=OFF", "-DGOOGLE_CLOUD_CPP_ENABLE_EXAMPLES:Bool=OFF", - "-DGOOGLE_CLOUD_CPP_ENABLE:String=__ga_libraries__", + self.define_from_variant("GOOGLE_CLOUD_CPP_ENABLE", "libraries"), ] return args From 0de6c174774d8fb22a8a5cfbf7b42e7e445ac47c Mon Sep 17 00:00:00 2001 From: Sreenivasa Murthy Kolam Date: Tue, 5 Nov 2024 01:24:48 +0530 Subject: [PATCH 068/208] fix the error libroctx64.so.o not found when executing MIOpenDriver (#47196) --- ...cer-when-building-miopendriver-6.1.0.patch | 26 ++++++++++++++ ...cer-when-building-miopendriver-6.2.0.patch | 26 ++++++++++++++ ...002-add-include-dir-miopen-hip-6.1.0.patch | 35 ------------------- .../builtin/packages/miopen-hip/package.py | 13 +++---- 4 files changed, 59 insertions(+), 41 deletions(-) create mode 100644 var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.1.0.patch create mode 100644 var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.2.0.patch delete mode 100644 var/spack/repos/builtin/packages/miopen-hip/0002-add-include-dir-miopen-hip-6.1.0.patch diff --git a/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.1.0.patch b/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.1.0.patch new file mode 100644 index 00000000000000..c12450b45164bb --- /dev/null +++ b/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.1.0.patch @@ -0,0 +1,26 @@ +From bbfc08e034b80d8b8c6895cb74c38544ffa9a9b4 Mon Sep 17 00:00:00 2001 +From: sreenivasa murthy kolam +Date: Thu, 24 Oct 2024 14:01:27 +0000 +Subject: [PATCH] link with roctracer when building miopendriver for 6.1.0 + +--- + driver/CMakeLists.txt | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/driver/CMakeLists.txt b/driver/CMakeLists.txt +index 7d4fdbb..31de1ba 100644 +--- a/driver/CMakeLists.txt ++++ b/driver/CMakeLists.txt +@@ -34,6 +34,9 @@ endif() + add_dependencies(MIOpenDriver generate_kernels) + target_include_directories(MIOpenDriver PRIVATE ../src/kernels) + target_link_libraries(MIOpenDriver MIOpen Threads::Threads) ++if(MIOPEN_USE_ROCTRACER) ++ target_link_libraries(MIOpenDriver ${rocTracer}) ++endif() + if(NOT MIOPEN_EMBED_DB STREQUAL "") + target_link_libraries(MIOpenDriver $ ) + endif() +-- +2.39.3 + diff --git a/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.2.0.patch b/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.2.0.patch new file mode 100644 index 00000000000000..fd8a3bb88c4abe --- /dev/null +++ b/var/spack/repos/builtin/packages/miopen-hip/0001-link-with-roctracer-when-building-miopendriver-6.2.0.patch @@ -0,0 +1,26 @@ +From 5565f0bf0a8e7b8217ed1a943a4210fec303ec42 Mon Sep 17 00:00:00 2001 +From: sreenivasa murthy kolam +Date: Thu, 24 Oct 2024 13:55:01 +0000 +Subject: [PATCH] link with roctracer when building miopendriver + +--- + driver/CMakeLists.txt | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/driver/CMakeLists.txt b/driver/CMakeLists.txt +index 8f19a90..6c701d6 100644 +--- a/driver/CMakeLists.txt ++++ b/driver/CMakeLists.txt +@@ -64,6 +64,9 @@ endif() + add_dependencies(MIOpenDriver generate_kernels) + target_include_directories(MIOpenDriver PRIVATE ../src/kernels) + target_link_libraries(MIOpenDriver MIOpen Threads::Threads roc::rocrand) ++if(MIOPEN_USE_ROCTRACER) ++ target_link_libraries(MIOpenDriver ${rocTracer}) ++endif() + if(NOT MIOPEN_EMBED_DB STREQUAL "") + target_link_libraries(MIOpenDriver $ ) + endif() +-- +2.39.3 + diff --git a/var/spack/repos/builtin/packages/miopen-hip/0002-add-include-dir-miopen-hip-6.1.0.patch b/var/spack/repos/builtin/packages/miopen-hip/0002-add-include-dir-miopen-hip-6.1.0.patch deleted file mode 100644 index 44e68e401c1cea..00000000000000 --- a/var/spack/repos/builtin/packages/miopen-hip/0002-add-include-dir-miopen-hip-6.1.0.patch +++ /dev/null @@ -1,35 +0,0 @@ -From 1693afd9690b97fcceff09ffce765712e3c7361a Mon Sep 17 00:00:00 2001 -From: Renjith Ravindran -Date: Mon, 29 Apr 2024 08:01:47 +0000 -Subject: [PATCH] Adding roctracer-dev include and library path - ---- - src/CMakeLists.txt | 5 ++++- - 1 file changed, 4 insertions(+), 1 deletion(-) - -diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index 0741a60..84b6805 100644 ---- a/src/CMakeLists.txt -+++ b/src/CMakeLists.txt -@@ -739,6 +739,9 @@ if(WIN32) - endif() - - target_include_directories(MIOpen SYSTEM PUBLIC $) -+target_include_directories(MIOpen SYSTEM PUBLIC "${NLOHMANN_JSON_INCLUDE}") -+target_include_directories(MIOpen SYSTEM PUBLIC "${ROCTRACER_INCLUDE_DIR}") -+target_include_directories(MIOpen SYSTEM PUBLIC "${SQLITE_INCLUDE_DIR}") - # Workaround : change in rocm-cmake was causing linking error so had to add ${CMAKE_DL_LIBS} - # We can remove ${CMAKE_DL_LIBS} once root cause is identified. - target_link_libraries(MIOpen PRIVATE ${CMAKE_DL_LIBS} Threads::Threads BZip2::BZip2 ${MIOPEN_CK_LINK_FLAGS}) -@@ -861,7 +864,7 @@ if(NOT WIN32 AND NOT APPLE) - endif() - - if(MIOPEN_USE_ROCTRACER) -- target_link_libraries(MIOpen PRIVATE roctx64) -+ target_link_libraries(MIOpen PRIVATE "${ROCTRACER_LIB_DIR}/libroctx64.so") - endif() - - ############################################################ --- -2.31.1 - diff --git a/var/spack/repos/builtin/packages/miopen-hip/package.py b/var/spack/repos/builtin/packages/miopen-hip/package.py index 73c7d08c6c1da1..c9544379f46dae 100644 --- a/var/spack/repos/builtin/packages/miopen-hip/package.py +++ b/var/spack/repos/builtin/packages/miopen-hip/package.py @@ -63,7 +63,8 @@ class MiopenHip(CMakePackage): patch("miopen-hip-include-nlohmann-include-directory.patch", when="@5.4.0:5.7") patch("0002-add-include-dir-miopen-hip-6.0.0.patch", when="@6.0") - patch("0002-add-include-dir-miopen-hip-6.1.0.patch", when="@6.1") + patch("0001-link-with-roctracer-when-building-miopendriver-6.1.0.patch", when="@6.1") + patch("0001-link-with-roctracer-when-building-miopendriver-6.2.0.patch", when="@6.2:") patch( "https://github.com/ROCm/MIOpen/commit/f60aa1ff89f8fb596b4a6a4c70aa7d557803db87.patch?full_index=1", sha256="7f382c872d89f22da1ad499e85ffe9881cc7404c8465e42877a210a09382e2ea", @@ -135,7 +136,7 @@ class MiopenHip(CMakePackage): depends_on("nlohmann-json", type="link") depends_on(f"rocmlir@{ver}", when=f"@{ver}") for ver in ["6.0.0", "6.0.2", "6.1.0", "6.1.1", "6.1.2", "6.2.0", "6.2.1"]: - depends_on("roctracer-dev@" + ver, when="@" + ver) + depends_on(f"roctracer-dev@{ver}", when=f"@{ver}") for ver in ["6.1.0", "6.1.1", "6.1.2"]: depends_on("googletest") for ver in ["6.2.0", "6.2.1"]: @@ -200,19 +201,19 @@ def cmake_args(self): args.append(self.define("MIOPEN_USE_MLIR", "OFF")) if self.spec.satisfies("@5.7.0:"): args.append(self.define("MIOPEN_ENABLE_AI_IMMED_MODE_FALLBACK", "OFF")) - if self.spec.satisfies("@6:6.1"): + if self.spec.satisfies("@6.0"): args.append( "-DROCTRACER_INCLUDE_DIR={0}".format(self.spec["roctracer-dev"].prefix.include) ) args.append("-DROCTRACER_LIB_DIR={0}".format(self.spec["roctracer-dev"].prefix.lib)) - if self.spec.satisfies("@6.1"): args.append("-DSQLITE_INCLUDE_DIR={0}".format(self.spec["sqlite"].prefix.include)) - if self.spec.satisfies("@6.2:"): + if self.spec.satisfies("@6.1:"): + args.append(self.define("MIOPEN_USE_ROCTRACER", "ON")) args.append( self.define( "CMAKE_CXX_FLAGS", f"-I{self.spec['roctracer-dev'].prefix.include} " - f"-L{self.spec['roctracer-dev'].prefix.lib} " + f"-L{self.spec['roctracer-dev'].prefix.roctracer.lib} " f"-I{self.spec['nlohmann-json'].prefix.include} " f"-I{self.spec['sqlite'].prefix.include} ", ) From c7659df4af728395f100cc9f88706be8186b4f1c Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Mon, 4 Nov 2024 18:30:55 -0500 Subject: [PATCH 069/208] libxc: add v7.0.0 (#47263) --- var/spack/repos/builtin/packages/libxc/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py index 84e6878512970c..46ff91c34f1378 100644 --- a/var/spack/repos/builtin/packages/libxc/package.py +++ b/var/spack/repos/builtin/packages/libxc/package.py @@ -15,6 +15,7 @@ class Libxc(AutotoolsPackage, CudaPackage): license("MPL-2.0-no-copyleft-exception") + version("7.0.0", sha256="8d4e343041c9cd869833822f57744872076ae709a613c118d70605539fb13a77") version("6.2.2", sha256="d1b65ef74615a1e539d87a0e6662f04baf3a2316706b4e2e686da3193b26b20f") version("6.2.1", sha256="da96fc4f6e4221734986f49758b410ffe1d406efd3538761062a4af57a2bd272") version("6.2.0", sha256="31edb72c69157b6c0beaff1f10cbbb6348ce7579ef81d8f286764e5ab61194d1") From c8873ea35c00023ad4c7b12f3c91a22034125ade Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Mon, 4 Nov 2024 17:33:41 -0600 Subject: [PATCH 070/208] dyninst: patch broken builds for 10.0.0:12.2.0 (#47339) * dyninst: patch broken builds for 10.0.0:12.3.0 * Only apply before 12.3.0 --- .../packages/dyninst/missing_include_deque.patch | 11 +++++++++++ var/spack/repos/builtin/packages/dyninst/package.py | 5 +++++ 2 files changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/dyninst/missing_include_deque.patch diff --git a/var/spack/repos/builtin/packages/dyninst/missing_include_deque.patch b/var/spack/repos/builtin/packages/dyninst/missing_include_deque.patch new file mode 100644 index 00000000000000..cee31fdbb0c5ee --- /dev/null +++ b/var/spack/repos/builtin/packages/dyninst/missing_include_deque.patch @@ -0,0 +1,11 @@ +diff --git a/dataflowAPI/src/AbslocInterface.C b/dataflowAPI/src/AbslocInterface.C +index 9d7ad000c..582e64004 100644 +--- a/dataflowAPI/src/AbslocInterface.C ++++ b/dataflowAPI/src/AbslocInterface.C +@@ -29,6 +29,7 @@ + */ + + ++#include + #include "Absloc.h" + #include "AbslocInterface.h" diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py index f71ab53fb762ba..cca8a3026eb7e6 100644 --- a/var/spack/repos/builtin/packages/dyninst/package.py +++ b/var/spack/repos/builtin/packages/dyninst/package.py @@ -110,6 +110,11 @@ class Dyninst(CMakePackage): patch("stackanalysis_h.patch", when="@9.2.0") patch("v9.3.2-auto.patch", when="@9.3.2 %gcc@:4.7") patch("tribool.patch", when="@9.3.0:10.0.0 ^boost@1.69:") + patch( + "missing_include_deque.patch", + when="@10.0.0:12.2.0", + sha256="0064d8d51bd01bd0035e1ebc49276f627ce6366d4524c92cf47d3c09b0031f96", + ) requires("%gcc", when="@:13.0.0", msg="dyninst builds only with GCC") From 5a29c9d82bac87546e7436248a9b9d5342ab1c29 Mon Sep 17 00:00:00 2001 From: Edward Hartnett <38856240+edwardhartnett@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:48:48 -0700 Subject: [PATCH 071/208] added g2c-2.0.0 (#47399) --- var/spack/repos/builtin/packages/g2c/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/g2c/package.py b/var/spack/repos/builtin/packages/g2c/package.py index 1ddc6fe37a7313..d4cb7c74a25915 100644 --- a/var/spack/repos/builtin/packages/g2c/package.py +++ b/var/spack/repos/builtin/packages/g2c/package.py @@ -18,6 +18,7 @@ class G2c(CMakePackage): maintainers("AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") version("develop", branch="develop") + version("2.0.0", sha256="39c23bf1219c60101548c8525e3a879c84119558f768081779d404a8caf4cec9") version("1.9.0", sha256="5554276e18bdcddf387a08c2dd23f9da310c6598905df6a2a244516c22ded9aa") version("1.8.0", sha256="4ce9f5a7cb0950699fe08ebc5a463ab4d09ef550c050391a319308a2494f971f") version("1.7.0", sha256="73afba9da382fed73ed8692d77fa037bb313280879cd4012a5e5697dccf55175") From 54aaa95a3517fa0c9068eaf98655fe13813d2968 Mon Sep 17 00:00:00 2001 From: Pranav Sivaraman Date: Mon, 4 Nov 2024 20:51:23 -0500 Subject: [PATCH 072/208] flux: new package (#47392) --- .../repos/builtin/packages/flux/package.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 var/spack/repos/builtin/packages/flux/package.py diff --git a/var/spack/repos/builtin/packages/flux/package.py b/var/spack/repos/builtin/packages/flux/package.py new file mode 100644 index 00000000000000..934f9d56554f20 --- /dev/null +++ b/var/spack/repos/builtin/packages/flux/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Flux(CMakePackage): + """A C++20 library for sequence-orientated programming""" + + homepage = "https://tristanbrindle.com/flux/" + url = "https://github.com/tcbrindle/flux/archive/refs/tags/v0.4.0.tar.gz" + + maintainers("pranav-sivaraman") + + license("BSL-1.0", checked_by="pranav-sivaraman") + + version("0.4.0", sha256="95e7d9d71c9ee9e89bb24b46ccba77ddfb0a1580630c2faab0b415dacc7c8d56") + + variant("docs", default=False, description="Build Flux documentation") + + depends_on("cxx", type="build") + depends_on("cmake@3.23:", type="build") + + with default_args(when="+docs"): + depends_on("py-sphinx") + depends_on("py-sphinx-copybutton") + depends_on("py-furo") + + def cmake_args(self): + args = [ + self.define("FLUX_BUILD_TESTS", self.run_tests), + self.define("FLUX_BUILD_EXAMPLES", False), + self.define_from_variant("FLUX_BUILD_DOCS", "docs"), + ] + + return args From 8650ba3cea6cfd15713f6f76898ef4e0d53e2abe Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Tue, 5 Nov 2024 01:55:29 +0000 Subject: [PATCH 073/208] prometheus-cpp: added package prometheus-cpp (#47384) * prometheus-cpp: added package prometheus-cpp * prometheus-cpp: edited PR for style --- .../packages/prometheus-cpp/package.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/prometheus-cpp/package.py diff --git a/var/spack/repos/builtin/packages/prometheus-cpp/package.py b/var/spack/repos/builtin/packages/prometheus-cpp/package.py new file mode 100644 index 00000000000000..fc1a62b150d254 --- /dev/null +++ b/var/spack/repos/builtin/packages/prometheus-cpp/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PrometheusCpp(CMakePackage): + """Prometheus Client Library for Modern C++.""" + + homepage = "https://jupp0r.github.io/prometheus-cpp/" + url = "https://github.com/jupp0r/prometheus-cpp/releases/download/v1.2.4/prometheus-cpp-with-submodules.tar.gz" + git = "https://github.com/jupp0r/prometheus-cpp.git" + + license("MIT", checked_by="mdorier") + + version("master", branch="master", submodules=True) + version("1.2.4", sha256="0d6852291063c35853e88805c73b52f73c0c08b78c1e7bc4d588fcf72a7172eb") + + depends_on("c", type="build") + depends_on("cxx", type="build") + depends_on("cmake@3.14.0:", type="build") + depends_on("zlib") + depends_on("curl") + + def cmake_args(self): + args = ["-DBUILD_SHARED_LIBS=ON", "-DENABLE_TESTING=OFF"] + return args From f8da72cffe07d8836159c7bc6984c13fa4bb2656 Mon Sep 17 00:00:00 2001 From: Paul Gessinger Date: Tue, 5 Nov 2024 03:34:47 +0100 Subject: [PATCH 074/208] pythia8: Include patch for C++20 / Clang (#47400) * pythia8: Include patch for C++20 / Clang Pythia8 vendors some FJCore sources that are as of Pythia8 312 incompatible with C++20 on clang. This adds a patch that makes it compatible in these scenarios * Add issue link * rename setup_cxxstd function * Remove an accidental printout * Apply patch to all compilers, add lower bound --- .../repos/builtin/packages/pythia8/package.py | 17 +++++---- .../pythia8-cpp20-fjcore-forward-decl.patch | 37 +++++++++++++++++++ 2 files changed, 47 insertions(+), 7 deletions(-) create mode 100644 var/spack/repos/builtin/packages/pythia8/pythia8-cpp20-fjcore-forward-decl.patch diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py index 7bf7494b7db79a..d70cc40d3fabe3 100644 --- a/var/spack/repos/builtin/packages/pythia8/package.py +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -132,17 +132,20 @@ class Pythia8(AutotoolsPackage): filter_compiler_wrappers("Makefile.inc", relative_root="share/Pythia8/examples") @run_before("configure") - def setup_cxxstd(self): + def setup_configure(self): filter_file( r"-std=c\+\+[0-9][0-9]", f"-std=c++{self.spec.variants['cxxstd'].value}", "configure" ) - # Fix for https://gitlab.com/Pythia8/releases/-/issues/428 - @when("@:8.311") - def patch(self): - filter_file( - r"[/]examples[/]Makefile[.]inc\|;n' \\", "/examples/Makefile.inc|' \\", "configure" - ) + # Fix for https://gitlab.com/Pythia8/releases/-/issues/428 + with when("@:8.311"): + filter_file( + r"[/]examples[/]Makefile[.]inc\|;n' \\", "/examples/Makefile.inc|' \\", "configure" + ) + + # Fix for https://gitlab.com/Pythia8/releases/-/issues/523 + with when("@8.307:8.312 cxxstd=20"): + patch("pythia8-cpp20-fjcore-forward-decl.patch") def configure_args(self): args = [] diff --git a/var/spack/repos/builtin/packages/pythia8/pythia8-cpp20-fjcore-forward-decl.patch b/var/spack/repos/builtin/packages/pythia8/pythia8-cpp20-fjcore-forward-decl.patch new file mode 100644 index 00000000000000..447e73cba582fb --- /dev/null +++ b/var/spack/repos/builtin/packages/pythia8/pythia8-cpp20-fjcore-forward-decl.patch @@ -0,0 +1,37 @@ +diff --git a/src/FJcore.cc b/src/FJcore.cc +index c60108e2..afd32eee 100644 +--- a/src/FJcore.cc ++++ b/src/FJcore.cc +@@ -730,14 +730,10 @@ FJCORE_BEGIN_NAMESPACE // defined in fastjet/internal/base.hh + class ClosestPair2D : public ClosestPair2DBase { + public: + ClosestPair2D(const std::vector & positions, +- const Coord2D & left_corner, const Coord2D & right_corner) { +- _initialize(positions, left_corner, right_corner, positions.size()); +- }; ++ const Coord2D & left_corner, const Coord2D & right_corner); + ClosestPair2D(const std::vector & positions, + const Coord2D & left_corner, const Coord2D & right_corner, +- const unsigned int max_size) { +- _initialize(positions, left_corner, right_corner, max_size); +- }; ++ const unsigned int max_size); + void closest_pair(unsigned int & ID1, unsigned int & ID2, + double & distance2) const; + void remove(unsigned int ID); +@@ -808,6 +804,15 @@ public: + return coord.distance2(other.coord); + }; + }; ++inline ClosestPair2D::ClosestPair2D(const std::vector & positions, ++ const Coord2D & left_corner, const Coord2D & right_corner) { ++ _initialize(positions, left_corner, right_corner, positions.size()); ++}; ++inline ClosestPair2D::ClosestPair2D(const std::vector & positions, ++ const Coord2D & left_corner, const Coord2D & right_corner, ++ const unsigned int max_size) { ++ _initialize(positions, left_corner, right_corner, max_size); ++}; + inline bool floor_ln2_less(unsigned x, unsigned y) { + if (x>y) return false; + return (x < (x^y)); // beware of operator precedence... From dcc199ae63d0d979bf73ee555b75a8e6756d5ae2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= <765740+giordano@users.noreply.github.com> Date: Tue, 5 Nov 2024 04:34:08 +0000 Subject: [PATCH 075/208] extrae: fix typo (#47406) --- var/spack/repos/builtin/packages/extrae/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py index 24f12bba595749..832f0cd21fcb35 100644 --- a/var/spack/repos/builtin/packages/extrae/package.py +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -128,7 +128,7 @@ def configure_args(self): args += ( ["--with-cuda=%s" % spec["cuda"].prefix] - if spec.satisifes("+cuda") + if spec.satisfies("+cuda") else ["--without-cuda"] ) From b862eec6bc47fc19bf927cdd6fe145f72e8e99e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= <765740+giordano@users.noreply.github.com> Date: Tue, 5 Nov 2024 04:43:02 +0000 Subject: [PATCH 076/208] extrae: add more versions (#47408) --- var/spack/repos/builtin/packages/extrae/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py index 832f0cd21fcb35..ea7fc38bbb8496 100644 --- a/var/spack/repos/builtin/packages/extrae/package.py +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -42,6 +42,15 @@ class Extrae(AutotoolsPackage): license("LGPL-2.1-or-later") + version("4.2.3", sha256="c132f3609b2e6f34d95ca1598eea01e5097257b6a663bb9698206ec271825ed0") + version("4.2.2", sha256="1f776f1a3401942b79685ba13489a954a731bce7cbb8549594f6da0b557c58a7") + version("4.2.1", sha256="0260a9a4952b6ac9b82ee33ee2749c22ae10d39447e42167a2626c77f664bb9a") + version("4.2.0", sha256="7b83a1ed008440bbc1bda88297d2d0e9256780db1cf8401b3c12718451f8919a") + version("4.1.7", sha256="0ed87449f74db0abc239ee8c40176e89f9ca6a69738fe751ec0df8fc46da1712") + version("4.1.6", sha256="9f146e70311b8ae9d77584f6efc7b30478885cfd095f7bd3937d5b08aec19985") + version("4.1.5", sha256="ab425f2e155e9af3332c01177df1776a6a953e721dfe8774eb23733f942b76a0") + version("4.1.4", sha256="6b5894bea046273a0d2a5c72204937ad310b2f88cd5d87d10f5ca0aaf1d637da") + version("4.1.3", sha256="889f136ddcfec2f8f9401b24ee29ebf74cf055e4e524c54821aba25513c24c03") version("4.1.2", sha256="adbc1d3aefde7649262426d471237dc96f070b93be850a6f15280ed86fd0b952") version("4.0.6", sha256="233be38035dd76f6877b1fd93d308e024e5d4ef5519d289f8e319cd6c58d0bc6") version("4.0.5", sha256="8f5eefa95f2e94a3b5f9b7f7cbaaed523862f190575ee797113b1e97deff1586") From 5b59a53545513925fc895d2ab71cffdc079a9aa4 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 22:59:04 -0600 Subject: [PATCH 077/208] py-configspace: fix homepage (#47417) --- var/spack/repos/builtin/packages/py-configspace/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-configspace/package.py b/var/spack/repos/builtin/packages/py-configspace/package.py index 6a392b6276fea6..78d56f17adfb99 100644 --- a/var/spack/repos/builtin/packages/py-configspace/package.py +++ b/var/spack/repos/builtin/packages/py-configspace/package.py @@ -12,7 +12,7 @@ class PyConfigspace(PythonPackage): maintainers("Kerilk", "mdorier") - homepage = "https://automl.github.io/ConfigSpace/master/" + homepage = "https://automl.github.io/ConfigSpace/latest/" pypi = "configspace/configspace-1.0.0.tar.gz" license("BSD-3-Clause") From 703cd6a313c09e8e5974d540721368b2931308ff Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 23:08:17 -0600 Subject: [PATCH 078/208] py-eventlet: fix url (#47418) * py-eventlet: fix url * py-eventlet: fix checksum --- var/spack/repos/builtin/packages/py-eventlet/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-eventlet/package.py b/var/spack/repos/builtin/packages/py-eventlet/package.py index 03e440844d37a2..e5bf12d6f8ca6c 100644 --- a/var/spack/repos/builtin/packages/py-eventlet/package.py +++ b/var/spack/repos/builtin/packages/py-eventlet/package.py @@ -10,11 +10,11 @@ class PyEventlet(PythonPackage): """Concurrent networking library for Python""" homepage = "https://github.com/eventlet/eventlet" - url = "https://github.com/eventlet/eventlet/releases/download/v0.22.0/eventlet-0.22.0.tar.gz" + url = "https://github.com/eventlet/eventlet/archive/refs/tags/v0.22.0.tar.gz" license("MIT") - version("0.22.0", sha256="6d22464f448fdf144a9d566c157299d686bbe324554dd7729df9ccd05ca66439") + version("0.22.0", sha256="c4cc92268b82eb94d5e0de0592159157d68122d394f480e3f9a9d6ddb695655e") depends_on("py-setuptools", type="build") depends_on("py-greenlet@0.3:") From 6822f99cc6a8f00641b27489916ee85782bcab63 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 23:12:57 -0600 Subject: [PATCH 079/208] quicksilver: fix homepage (#47419) --- var/spack/repos/builtin/packages/quicksilver/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/quicksilver/package.py b/var/spack/repos/builtin/packages/quicksilver/package.py index 61357eed447601..061619b2b75648 100644 --- a/var/spack/repos/builtin/packages/quicksilver/package.py +++ b/var/spack/repos/builtin/packages/quicksilver/package.py @@ -13,7 +13,7 @@ class Quicksilver(MakefilePackage): tags = ["proxy-app"] - homepage = "https://codesign.llnl.gov/quicksilver.php" + homepage = "https://asc.llnl.gov/codes/proxy-apps/quicksilver" url = "https://github.com/LLNL/Quicksilver/tarball/V1.0" git = "https://github.com/LLNL/Quicksilver.git" From 9310fcabd8cfcba838898735af00d789678cfb28 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 4 Nov 2024 23:13:17 -0600 Subject: [PATCH 080/208] sst-dumpi: fix homepage (#47420) --- var/spack/repos/builtin/packages/sst-dumpi/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/sst-dumpi/package.py b/var/spack/repos/builtin/packages/sst-dumpi/package.py index fd4cee80038bfc..f796e33e3e6fd5 100644 --- a/var/spack/repos/builtin/packages/sst-dumpi/package.py +++ b/var/spack/repos/builtin/packages/sst-dumpi/package.py @@ -14,7 +14,7 @@ class SstDumpi(AutotoolsPackage): information, and PAPI counters. """ - homepage = "http://sst.sandia.gov/about_dumpi.html" + homepage = "https://github.com/sstsimulator/sst-dumpi" url = "https://github.com/sstsimulator/sst-dumpi/archive/refs/tags/v13.0.0_Final.tar.gz" git = "https://github.com/sstsimulator/sst-dumpi.git" From e42e54160516d59a65ca9dc4c94cae9182aada41 Mon Sep 17 00:00:00 2001 From: Howard Pritchard Date: Mon, 4 Nov 2024 22:23:16 -0700 Subject: [PATCH 081/208] openmpi: add 4.1.7 (#47427) Signed-off-by: Howard Pritchard --- var/spack/repos/builtin/packages/openmpi/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 4b2c81dc033967..58105a90f7538f 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -63,6 +63,9 @@ class Openmpi(AutotoolsPackage, CudaPackage): version( "5.0.0", sha256="9d845ca94bc1aeb445f83d98d238cd08f6ec7ad0f73b0f79ec1668dbfdacd613" ) # libmpi.so.40.40.0 + version( + "4.1.7", sha256="54a33cb7ad81ff0976f15a6cc8003c3922f0f3d8ceed14e1813ef3603f22cd34" + ) # libmpi.so.40.30.7 version( "4.1.6", sha256="f740994485516deb63b5311af122c265179f5328a0d857a567b85db00b11e415" ) # libmpi.so.40.30.6 From 14bc900e9db7ddaaf32d79c32386a9c81f65c18b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 5 Nov 2024 07:46:49 +0100 Subject: [PATCH 082/208] spack.concretize: add type-hints, remove kwargs (#47382) Also remove find_spec, which was used by the old concretizer. Currently, it seems to be used only in tests. --- lib/spack/docs/conf.py | 1 + lib/spack/spack/cmd/__init__.py | 2 +- lib/spack/spack/concretize.py | 117 +++++++-------------- lib/spack/spack/environment/environment.py | 18 ++-- lib/spack/spack/test/concretize.py | 38 +------ 5 files changed, 52 insertions(+), 124 deletions(-) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 4873e3e104d6b3..18495d4bca51f3 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -214,6 +214,7 @@ def setup(sphinx): # Spack classes that intersphinx is unable to resolve ("py:class", "spack.version.StandardVersion"), ("py:class", "spack.spec.DependencySpec"), + ("py:class", "spack.spec.ArchSpec"), ("py:class", "spack.spec.InstallStatus"), ("py:class", "spack.spec.SpecfileReaderBase"), ("py:class", "spack.install_test.Pb"), diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 031b29f9528c79..7cf032c90749a0 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -194,7 +194,7 @@ def _concretize_spec_pairs(to_concretize, tests=False): elif unify == "when_possible": concretize_method = spack.concretize.concretize_together_when_possible - concretized = concretize_method(*to_concretize, tests=tests) + concretized = concretize_method(to_concretize, tests=tests) return [concrete for _, concrete in concretized] diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index fabfdbb523a749..122e6c59c03dbc 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -2,20 +2,17 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -""" -(DEPRECATED) Used to contain the code for the original concretizer -""" +"""High-level functions to concretize list of specs""" import sys import time from contextlib import contextmanager -from itertools import chain -from typing import Tuple +from typing import Iterable, Optional, Sequence, Tuple, Union import llnl.util.tty as tty import spack.config import spack.error -from spack.spec import Spec +from spack.spec import ArchSpec, CompilerSpec, Spec CHECK_COMPILER_EXISTENCE = True @@ -36,91 +33,59 @@ def enable_compiler_existence_check(): CHECK_COMPILER_EXISTENCE = saved -def find_spec(spec, condition, default=None): - """Searches the dag from spec in an intelligent order and looks - for a spec that matches a condition""" - # First search parents, then search children - deptype = ("build", "link") - dagiter = chain( - spec.traverse(direction="parents", deptype=deptype, root=False), - spec.traverse(direction="children", deptype=deptype, root=False), - ) - visited = set() - for relative in dagiter: - if condition(relative): - return relative - visited.add(id(relative)) - - # Then search all other relatives in the DAG *except* spec - for relative in spec.root.traverse(deptype="all"): - if relative is spec: - continue - if id(relative) in visited: - continue - if condition(relative): - return relative - - # Finally search spec itself. - if condition(spec): - return spec - - return default # Nothing matched the condition; return default. - - -def concretize_specs_together(*abstract_specs, **kwargs): +SpecPair = Tuple[Spec, Spec] +SpecLike = Union[Spec, str] +TestsType = Union[bool, Iterable[str]] + + +def concretize_specs_together( + abstract_specs: Sequence[SpecLike], tests: TestsType = False +) -> Sequence[Spec]: """Given a number of specs as input, tries to concretize them together. Args: - tests (bool or list or set): False to run no tests, True to test - all packages, or a list of package names to run tests for some - *abstract_specs: abstract specs to be concretized, given either - as Specs or strings - - Returns: - List of concretized specs + abstract_specs: abstract specs to be concretized + tests: list of package names for which to consider tests dependencies. If True, all nodes + will have test dependencies. If False, test dependencies will be disregarded. """ import spack.solver.asp allow_deprecated = spack.config.get("config:deprecated", False) solver = spack.solver.asp.Solver() - result = solver.solve( - abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated - ) + result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated) return [s.copy() for s in result.specs] -def concretize_together(*spec_list, **kwargs): +def concretize_together( + spec_list: Sequence[SpecPair], tests: TestsType = False +) -> Sequence[SpecPair]: """Given a number of specs as input, tries to concretize them together. Args: - tests (bool or list or set): False to run no tests, True to test - all packages, or a list of package names to run tests for some - *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + spec_list: list of tuples to concretize. First entry is abstract spec, second entry is already concrete spec or None if not yet concretized - - Returns: - List of tuples of abstract and concretized specs + tests: list of package names for which to consider tests dependencies. If True, all nodes + will have test dependencies. If False, test dependencies will be disregarded. """ to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list] abstract_specs = [abstract for abstract, _ in spec_list] - concrete_specs = concretize_specs_together(*to_concretize, **kwargs) + concrete_specs = concretize_specs_together(to_concretize, tests=tests) return list(zip(abstract_specs, concrete_specs)) -def concretize_together_when_possible(*spec_list, **kwargs): +def concretize_together_when_possible( + spec_list: Sequence[SpecPair], tests: TestsType = False +) -> Sequence[SpecPair]: """Given a number of specs as input, tries to concretize them together to the extent possible. See documentation for ``unify: when_possible`` concretization for the precise definition of "to the extent possible". Args: - tests (bool or list or set): False to run no tests, True to test - all packages, or a list of package names to run tests for some - *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + spec_list: list of tuples to concretize. First entry is abstract spec, second entry is already concrete spec or None if not yet concretized - - Returns: - List of tuples of abstract and concretized specs + tests: list of package names for which to consider tests dependencies. If True, all nodes + will have test dependencies. If False, test dependencies will be disregarded. """ to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list] old_concrete_to_abstract = { @@ -131,7 +96,7 @@ def concretize_together_when_possible(*spec_list, **kwargs): solver = spack.solver.asp.Solver() allow_deprecated = spack.config.get("config:deprecated", False) for result in solver.solve_in_rounds( - to_concretize, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated + to_concretize, tests=tests, allow_deprecated=allow_deprecated ): result_by_user_spec.update(result.specs_by_input) @@ -143,19 +108,17 @@ def concretize_together_when_possible(*spec_list, **kwargs): ] -def concretize_separately(*spec_list, **kwargs): - """Given a number of specs as input, tries to concretize them together. +def concretize_separately( + spec_list: Sequence[SpecPair], tests: TestsType = False +) -> Sequence[SpecPair]: + """Concretizes the input specs separately from each other. Args: - tests (bool or list or set): False to run no tests, True to test - all packages, or a list of package names to run tests for some - *spec_list: list of tuples to concretize. First entry is abstract spec, second entry is + spec_list: list of tuples to concretize. First entry is abstract spec, second entry is already concrete spec or None if not yet concretized - - Returns: - List of tuples of abstract and concretized specs + tests: list of package names for which to consider tests dependencies. If True, all nodes + will have test dependencies. If False, test dependencies will be disregarded. """ - tests = kwargs.get("tests", False) to_concretize = [abstract for abstract, concrete in spec_list if not concrete] args = [ (i, str(abstract), tests) @@ -215,7 +178,7 @@ def concretize_separately(*spec_list, **kwargs): ] -def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: +def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int, Spec, float]: index, spec_str, tests = packed_arguments with tty.SuppressOutput(msg_enabled=False): start = time.time() @@ -227,10 +190,10 @@ class UnavailableCompilerVersionError(spack.error.SpackError): """Raised when there is no available compiler that satisfies a compiler spec.""" - def __init__(self, compiler_spec, arch=None): - err_msg = "No compilers with spec {0} found".format(compiler_spec) + def __init__(self, compiler_spec: CompilerSpec, arch: Optional[ArchSpec] = None) -> None: + err_msg = f"No compilers with spec {compiler_spec} found" if arch: - err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target) + err_msg += f" for operating system {arch.os} and target {arch.target}." super().__init__( err_msg, diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index b61332a0abc92c..7cf1057fa5c62b 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -14,7 +14,7 @@ import urllib.parse import urllib.request import warnings -from typing import Any, Dict, Iterable, List, Optional, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union import llnl.util.filesystem as fs import llnl.util.tty as tty @@ -55,7 +55,7 @@ from spack.spec_list import SpecList from spack.util.path import substitute_path_variables -SpecPair = Tuple[spack.spec.Spec, spack.spec.Spec] +SpecPair = spack.concretize.SpecPair #: environment variable used to indicate the active environment spack_env_var = "SPACK_ENV" @@ -1533,9 +1533,7 @@ def _get_specs_to_concretize( ] return new_user_specs, kept_user_specs, specs_to_concretize - def _concretize_together_where_possible( - self, tests: bool = False - ) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]: + def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]: # Avoid cyclic dependency import spack.solver.asp @@ -1550,7 +1548,7 @@ def _concretize_together_where_possible( ret = [] result = spack.concretize.concretize_together_when_possible( - *specs_to_concretize, tests=tests + specs_to_concretize, tests=tests ) for abstract, concrete in result: # Only add to the environment if it's from this environment (not included in) @@ -1563,7 +1561,7 @@ def _concretize_together_where_possible( return ret - def _concretize_together(self, tests: bool = False) -> List[SpecPair]: + def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]: """Concretization strategy that concretizes all the specs in the same DAG. """ @@ -1577,8 +1575,8 @@ def _concretize_together(self, tests: bool = False) -> List[SpecPair]: self.specs_by_hash = {} try: - concretized_specs: List[SpecPair] = spack.concretize.concretize_together( - *specs_to_concretize, tests=tests + concretized_specs = spack.concretize.concretize_together( + specs_to_concretize, tests=tests ) except spack.error.UnsatisfiableSpecError as e: # "Enhance" the error message for multiple root specs, suggest a less strict @@ -1627,7 +1625,7 @@ def _concretize_separately(self, tests=False): to_concretize = [ (root, None) for root in self.user_specs if root not in old_concretized_user_specs ] - concretized_specs = spack.concretize.concretize_separately(*to_concretize, tests=tests) + concretized_specs = spack.concretize.concretize_separately(to_concretize, tests=tests) by_hash = {} for abstract, concrete in concretized_specs: diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 4d9940ea9bb815..bf96311d4499cd 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -33,7 +33,6 @@ import spack.store import spack.util.file_cache import spack.variant as vt -from spack.concretize import find_spec from spack.installer import PackageInstaller from spack.spec import CompilerSpec, Spec from spack.version import Version, VersionList, ver @@ -674,39 +673,6 @@ def test_external_and_virtual(self, mutable_config): assert spec["externaltool"].compiler.satisfies("gcc") assert spec["stuff"].compiler.satisfies("gcc") - def test_find_spec_parents(self): - """Tests the spec finding logic used by concretization.""" - s = Spec.from_literal({"a +foo": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}}) - - assert "a" == find_spec(s["b"], lambda s: "+foo" in s).name - - def test_find_spec_children(self): - s = Spec.from_literal({"a": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}}) - - assert "d" == find_spec(s["b"], lambda s: "+foo" in s).name - - s = Spec.from_literal({"a": {"b +foo": {"c+foo": None, "d": None}, "e +foo": None}}) - - assert "c" == find_spec(s["b"], lambda s: "+foo" in s).name - - def test_find_spec_sibling(self): - s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e +foo": None}}) - - assert "e" == find_spec(s["b"], lambda s: "+foo" in s).name - assert "b" == find_spec(s["e"], lambda s: "+foo" in s).name - - s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": {"f +foo": None}}}) - - assert "f" == find_spec(s["b"], lambda s: "+foo" in s).name - - def test_find_spec_self(self): - s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": None}}) - assert "b" == find_spec(s["b"], lambda s: "+foo" in s).name - - def test_find_spec_none(self): - s = Spec.from_literal({"a": {"b": {"c": None, "d": None}, "e": None}}) - assert find_spec(s["b"], lambda s: "+foo" in s) is None - def test_compiler_child(self): s = Spec("mpileaks%clang target=x86_64 ^dyninst%gcc") s.concretize() @@ -815,7 +781,7 @@ def test_regression_issue_7941(self): ) def test_simultaneous_concretization_of_specs(self, abstract_specs): abstract_specs = [Spec(x) for x in abstract_specs] - concrete_specs = spack.concretize.concretize_specs_together(*abstract_specs) + concrete_specs = spack.concretize.concretize_specs_together(abstract_specs) # Check there's only one configuration of each package in the DAG names = set(dep.name for spec in concrete_specs for dep in spec.traverse()) @@ -2137,7 +2103,7 @@ def test_external_python_extension_find_unified_python(self): spack.config.set("packages", external_conf) abstract_specs = [Spec(s) for s in ["py-extension1", "python"]] - specs = spack.concretize.concretize_specs_together(*abstract_specs) + specs = spack.concretize.concretize_specs_together(abstract_specs) assert specs[0]["python"] == specs[1]["python"] @pytest.mark.regression("36190") From afe431cfb50d0da6a10769c353d4701e6fc1c95b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 07:50:38 +0100 Subject: [PATCH 083/208] py-python-ptrace: missing forward compat bound (#47401) --- .../repos/builtin/packages/py-python-ptrace/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-python-ptrace/package.py b/var/spack/repos/builtin/packages/py-python-ptrace/package.py index 94273241f6570d..be67cc059b6792 100644 --- a/var/spack/repos/builtin/packages/py-python-ptrace/package.py +++ b/var/spack/repos/builtin/packages/py-python-ptrace/package.py @@ -14,8 +14,12 @@ class PyPythonPtrace(PythonPackage): license("GPL-2.0-only") + version("0.9.9", sha256="56bbfef44eaf3a77be48138cca5767cdf471e8278fe1499f9b72f151907f25cf") version("0.9.8", sha256="1e3bc6223f626aaacde8a7979732691c11b13012e702fee9ae16c87f71633eaa") - depends_on("c", type="build") # generated + depends_on("c", type="build") depends_on("py-setuptools", type="build") + + # uses imp + depends_on("python@:3.11", when="@:0.9.8") From 75c169d870e975fe815bad73286d0b9aaf49ed54 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 5 Nov 2024 09:04:07 +0100 Subject: [PATCH 084/208] py-tensorflow: add v2.18.0 (#47211) --- .../packages/py-tensorboard/package.py | 18 +++-- .../builtin/packages/py-tensorflow/package.py | 81 +++++++++++-------- 2 files changed, 57 insertions(+), 42 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-tensorboard/package.py b/var/spack/repos/builtin/packages/py-tensorboard/package.py index 83f3f1a464c6ed..a7d85ca17fe8e1 100644 --- a/var/spack/repos/builtin/packages/py-tensorboard/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboard/package.py @@ -17,10 +17,11 @@ class PyTensorboard(PythonPackage): # Requires tensorflow skip_modules = ["tensorboard.summary._tf"] - maintainers("aweits") - license("Apache-2.0") + maintainers("aweits") + version("2.18.0", sha256="107ca4821745f73e2aefa02c50ff70a9b694f39f790b11e6f682f7d326745eab") + version("2.17.1", sha256="253701a224000eeca01eee6f7e978aea7b408f60b91eb0babdb04e78947b773e") version("2.17.0", sha256="859a499a9b1fb68a058858964486627100b71fcb21646861c61d31846a6478fb") version("2.16.2", sha256="9f2b4e7dad86667615c0e5cd072f1ea8403fc032a299f0072d6f74855775cc45") version("2.16.1", sha256="928b62567911a8eeb2ebeb7482a9e4599b35f6713a6f2c56655259c18a139569") @@ -59,19 +60,20 @@ class PyTensorboard(PythonPackage): depends_on("py-grpcio@1.24.3:", when="@2.3:") depends_on("py-grpcio@1.23.3:", when="@2.2") depends_on("py-markdown@2.6.8:") - depends_on("py-numpy@1.12.0:") + depends_on("py-numpy@1.12:") # https://github.com/tensorflow/tensorboard/pull/6871 - depends_on("py-numpy@:1") + depends_on("py-numpy@:1", when="@:2.17") # https://github.com/tensorflow/tensorboard/pull/5138 depends_on("py-numpy@:1.23", when="@:2.5") - depends_on("py-protobuf@3.19.6:4", when="@2.17:") - depends_on("py-protobuf@3.19.6:", when="@2.15.2:2.16") + depends_on("py-packaging", when="@2.18:") + depends_on("py-protobuf@3.19.6:", when="@2.15.2:2.16,2.18:") + depends_on("py-protobuf@3.19.6:4", when="@2.17") depends_on("py-protobuf@3.19.6:4.23", when="@2.12:2.15.1") depends_on("py-protobuf@3.9.2:3", when="@2.11") depends_on("py-protobuf@3.9.2:3.19", when="@2.9:2.10") depends_on("py-protobuf@3.6.0:3.19", when="@:2.8") - depends_on("py-setuptools@41.0.0:") - depends_on("py-six@1.10.0:", when="@:2.4,2.14:") + depends_on("py-setuptools@41:") + depends_on("py-six@1.10:", when="@:2.4,2.14:") depends_on("py-tensorboard-data-server@0.7", when="@2.12:") depends_on("py-tensorboard-data-server@0.6", when="@2.5:2.11") depends_on("py-werkzeug@1.0.1:", when="@2.9:") diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index f9844ac1945711..ba1377829fd091 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -42,12 +42,13 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): homepage = "https://www.tensorflow.org" url = "https://github.com/tensorflow/tensorflow/archive/v2.3.1.tar.gz" git = "https://github.com/tensorflow/tensorflow.git" - - maintainers("adamjstewart", "aweits") import_modules = ["tensorflow"] license("Apache-2.0") + maintainers("adamjstewart", "aweits") + version("2.18.0", sha256="d7876f4bb0235cac60eb6316392a7c48676729860da1ab659fb440379ad5186d") + version("2.17.1", sha256="2d3cfb48510f92f3a52fb05b820481c6f066a342a9f5296fe26d72c4ea757700") version("2.17.0", sha256="9cc4d5773b8ee910079baaecb4086d0c28939f024dd74b33fc5e64779b6533dc") version("2.16.2", sha256="023849bf253080cb1e4f09386f5eb900492da2288274086ed6cfecd6d99da9eb") version("2.16.1", sha256="c729e56efc945c6df08efe5c9f5b8b89329c7c91b8f40ad2bb3e13900bd4876d") @@ -123,14 +124,18 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): version("2.2.1", sha256="e6a28e64236d729e598dbeaa02152219e67d0ac94d6ed22438606026a02e0f88") version("2.2.0", sha256="69cd836f87b8c53506c4f706f655d423270f5a563b76dc1cfa60fbc3184185a3") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") variant("mkl", default=False, description="Build with MKL support") variant("jemalloc", default=False, description="Build with jemalloc as malloc support") variant("gcp", default=False, description="Build with Google Cloud Platform support") - variant("hdfs", default=False, description="Build with Hadoop File System support") - variant("aws", default=False, description="Build with Amazon AWS Platform support") + variant( + "hdfs", default=False, when="@:2.17", description="Build with Hadoop File System support" + ) + variant( + "aws", default=False, when="@:2.17", description="Build with Amazon AWS Platform support" + ) variant("xla", default=sys.platform != "darwin", description="Build with XLA JIT support") variant("gdr", default=False, description="Build with GDR support") variant("verbs", default=False, description="Build with libverbs support") @@ -216,36 +221,13 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-google-pasta@0.2:0", when="@2.4:2.6") depends_on("py-google-pasta@0.1.8:", when="@2.2:2.3") depends_on("py-google-pasta@0.1.6:", when="@:2.1") - depends_on("py-h5py@3.10:", when="@2.16:") - depends_on("py-h5py@2.9:", when="@2.7:2.15") - depends_on("py-h5py@3.1", when="@2.5:2.6") - depends_on("py-h5py@2.10", when="@2.2:2.4") - depends_on("py-h5py@:2.10.0", when="@2.1.3:2.1") - # propagate the mpi variant setting for h5py/hdf5 to avoid unexpected crashes - depends_on("py-h5py+mpi", when="@2.1.3:+mpi") - depends_on("py-h5py~mpi", when="@2.1.3:~mpi") - depends_on("hdf5+mpi", when="@2.1.3:+mpi") - depends_on("hdf5~mpi", when="@2.1.3:~mpi") depends_on("py-libclang@13:", when="@2.9:") depends_on("py-libclang@9.0.1:", when="@2.7:2.8") - depends_on("py-ml-dtypes@0.3.1:0.4", when="@2.17:") - depends_on("py-ml-dtypes@0.3.1:0.3", when="@2.15.1:2.16") - depends_on("py-ml-dtypes@0.2", when="@2.15.0") - depends_on("py-ml-dtypes@0.2.0", when="@2.14") - depends_on("py-numpy@1.23.5:", when="@2.14:") - depends_on("py-numpy@1.22:1.24.3", when="@2.13:") - depends_on("py-numpy@1.22:1.23", when="@2.12") - depends_on("py-numpy@1.20:", when="@2.8:2.11") - depends_on("py-numpy@1.14.5:", when="@2.7") - depends_on("py-numpy@1.19.2:1.19", when="@2.4:2.6") - # https://github.com/tensorflow/tensorflow/issues/40688 - depends_on("py-numpy@1.16.0:1.18", when="@:2.3") - # https://github.com/tensorflow/tensorflow/issues/67291 - depends_on("py-numpy@:1") depends_on("py-opt-einsum@2.3.2:", when="@:2.3,2.7:") depends_on("py-opt-einsum@3.3", when="@2.4:2.6") depends_on("py-packaging", when="@2.9:") - depends_on("py-protobuf@3.20.3:4.20,4.21.6:4", when="@2.12:") + depends_on("py-protobuf@3.20.3:4.20,4.21.6:5", when="@2.18:") + depends_on("py-protobuf@3.20.3:4.20,4.21.6:4", when="@2.12:2.17") depends_on("py-protobuf@3.9.2:", when="@2.3:2.11") depends_on("py-protobuf@3.8.0:", when="@:2.2") # https://github.com/protocolbuffers/protobuf/issues/10051 @@ -278,13 +260,40 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-grpcio@1.32", when="@2.4") depends_on("py-grpcio@1.8.6:", when="@:2.3") - for minor_ver in range(2, 18): + for minor_ver in range(2, 19): depends_on("py-tensorboard@2.{}".format(minor_ver), when="@2.{}".format(minor_ver)) # TODO: support circular run-time dependencies - # depends_on('py-tensorflow-estimator') # depends_on('py-keras') + depends_on("py-numpy@1.26:2.0", when="@2.18:") + depends_on("py-numpy@1.23.5:", when="@2.14:2.17") + depends_on("py-numpy@1.22:1.24.3", when="@2.13") + depends_on("py-numpy@1.22:1.23", when="@2.12") + depends_on("py-numpy@1.20:", when="@2.8:2.11") + depends_on("py-numpy@1.14.5:", when="@2.7") + depends_on("py-numpy@1.19.2:1.19", when="@2.4:2.6") + # https://github.com/tensorflow/tensorflow/issues/40688 + depends_on("py-numpy@1.16.0:1.18", when="@:2.3") + # https://github.com/tensorflow/tensorflow/issues/67291 + depends_on("py-numpy@:1", when="@:2.17") + depends_on("py-h5py@3.11:", when="@2.18:") + depends_on("py-h5py@3.10:", when="@2.16:") + depends_on("py-h5py@2.9:", when="@2.7:2.15") + depends_on("py-h5py@3.1", when="@2.5:2.6") + depends_on("py-h5py@2.10", when="@2.2:2.4") + depends_on("py-h5py@:2.10.0", when="@2.1.3:2.1") + # propagate the mpi variant setting for h5py/hdf5 to avoid unexpected crashes + depends_on("py-h5py+mpi", when="@2.1.3:+mpi") + depends_on("py-h5py~mpi", when="@2.1.3:~mpi") + depends_on("hdf5+mpi", when="@2.1.3:+mpi") + depends_on("hdf5~mpi", when="@2.1.3:~mpi") + depends_on("py-ml-dtypes@0.4", when="@2.18:") + depends_on("py-ml-dtypes@0.3.1:0.4", when="@2.17") + depends_on("py-ml-dtypes@0.3.1:0.3", when="@2.15.1:2.16") + depends_on("py-ml-dtypes@0.2", when="@2.15.0") + depends_on("py-ml-dtypes@0.2.0", when="@2.14") + # Historical dependencies depends_on("py-jax@0.3.15:", when="@2.12") depends_on("py-keras-preprocessing@1.1.1:", when="@2.7:2.10") @@ -425,7 +434,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): # see https://github.com/tensorflow/tensorflow/issues/62490 # and https://github.com/abseil/abseil-cpp/issues/1665 - patch("absl_neon.patch", when="@2.16.1: target=aarch64:") + patch("absl_neon.patch", when="@2.16.1:2.17 target=aarch64:") # reverting change otherwise the c467913 commit patch won't apply patch( @@ -588,6 +597,7 @@ def setup_build_environment(self, env): # Do you wish to build TensorFlow with CUDA support? if "+cuda" in spec: env.set("TF_NEED_CUDA", "1") + env.set("CUDA_NVCC", "1") # Do you want to use clang as CUDA compiler? env.set("TF_CUDA_CLANG", "0") @@ -649,6 +659,7 @@ def setup_build_environment(self, env): # only supports compute capabilities >= 3.5 capabilities = CudaPackage.compute_capabilities(spec.variants["cuda_arch"].value) env.set("TF_CUDA_COMPUTE_CAPABILITIES", ",".join(capabilities)) + env.set("HERMETIC_CUDA_COMPUTE_CAPABILITIES", ",".join(capabilities)) else: env.set("TF_NEED_CUDA", "0") @@ -841,6 +852,8 @@ def build(self, spec, prefix): if "+cuda" in spec: args.append("--config=cuda") + if spec.satisfies("@2.18:"): + args.append("--config=cuda_wheel") if "+rocm" in spec: args.append("--config=rocm") From 1297dd7fbc2860f2722a59251c0118a4034f1f20 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 09:36:26 +0100 Subject: [PATCH 085/208] py-torchaudio, py-torchtext: rpath fixup (#47404) * py-torchaudio, py-torchtext: rpath fixup also add missing dependency on Spack ffmpeg to torchaudio. * py-torchaudio: add torio rpath --- .../builtin/packages/py-torchaudio/package.py | 25 +++++++++++++++++++ .../builtin/packages/py-torchtext/package.py | 15 +++++++++++ 2 files changed, 40 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index 2ac903565f308e..59a7e2825b223c 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -104,6 +104,9 @@ class PyTorchaudio(PythonPackage): depends_on("cmake@3.5:", when="@0.8:", type="build") depends_on("ninja", when="@0.8:", type="build") + # prior to 2.1 ffmpeg was vendored + depends_on("ffmpeg@:6", when="@2.1:") + # setup.py depends_on("py-setuptools", type="build") depends_on("py-pybind11", when="@0.12:", type=("build", "link")) @@ -118,6 +121,22 @@ class PyTorchaudio(PythonPackage): ) conflicts("^cuda@12.5:", when="@:2.1") + def patch(self): + # Add missing rpaths, which requires patching due to hardcoded cmake_args + if self.spec.satisfies("@0.8:"): + rpaths = [f"{python_platlib}/torchaudio/lib", f"{python_platlib}/torio/lib"] + cmake_args = [ + f"-DCMAKE_INSTALL_RPATH={';'.join(rpaths)}", + "-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON", + ] + cmake_str = ", ".join(f"'{arg}'" for arg in cmake_args) + filter_file( + "cmake_args = [", + f"cmake_args = [{cmake_str},", + "tools/setup_helpers/extension.py", + string=True, + ) + def flag_handler(self, name, flags): # https://github.com/pytorch/vision/issues/8653 if name == "ldflags": @@ -129,6 +148,12 @@ def setup_build_environment(self, env): # tools/setup_helpers/extension.py env.set("BUILD_SOX", 0) + if self.spec.satisfies("@2.1:"): + env.set("FFMPEG_ROOT", self.spec["ffmpeg"].prefix) + else: + # a specific ffmpeg is built but not installed, so just disable + env.set("USE_FFMPEG", "0") + if "+cuda" in self.spec["py-torch"]: env.set("USE_CUDA", 1) else: diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index 95fde8068ac14e..de5c66f20ee8d6 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -93,3 +93,18 @@ class PyTorchtext(PythonPackage): depends_on("py-pybind11", when="@0.8:", type=("build", "link")) depends_on("py-six", when="@:0.6", type=("build", "run")) depends_on("py-sentencepiece", when="@:0.7", type=("build", "run")) + + def patch(self): + # Add missing rpaths, which requires patching due to hardcoded cmake_args + if self.spec.satisfies("@0.13:"): + cmake_args = [ + f"-DCMAKE_INSTALL_RPATH={python_platlib}/torchtext/lib", + "-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON", + ] + cmake_str = ", ".join(f"'{arg}'" for arg in cmake_args) + filter_file( + "cmake_args = [", + f"cmake_args = [{cmake_str},", + "tools/setup_helpers/extension.py", + string=True, + ) From 6b5a479d1e235aca420e746c697037b83f33cb16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= <765740+giordano@users.noreply.github.com> Date: Tue, 5 Nov 2024 08:42:06 +0000 Subject: [PATCH 086/208] extrae: fix build with gcc@14 (#47407) --- var/spack/repos/builtin/packages/extrae/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py index ea7fc38bbb8496..86714923e6f36d 100644 --- a/var/spack/repos/builtin/packages/extrae/package.py +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -169,6 +169,18 @@ def flag_handler(self, name, flags): flags.append("-lintl") elif name == "ldflags": flags.append("-pthread") + + # This is to work around + # . + if self.spec.satisfies("%gcc@14:") and name == "cflags": + flags.extend( + [ + "-Wno-error=incompatible-pointer-types", + "-Wno-error=implicit-function-declaration", + "-Wno-error=int-conversion", + ] + ) + return self.build_system_flags(name, flags) def install(self, spec, prefix): From b7601f304214561e34e964ef34dbd39f573176a3 Mon Sep 17 00:00:00 2001 From: David Boehme Date: Tue, 5 Nov 2024 09:51:19 +0100 Subject: [PATCH 087/208] Add Adiak v0.4.1 (#47429) --- var/spack/repos/builtin/packages/adiak/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py index e1ad9344c17153..66aaa0a18c52e0 100644 --- a/var/spack/repos/builtin/packages/adiak/package.py +++ b/var/spack/repos/builtin/packages/adiak/package.py @@ -22,8 +22,9 @@ class Adiak(CMakePackage): license("MIT") version( - "0.4.0", commit="7e8b7233f8a148b402128ed46b2f0c643e3b397e", submodules=True, preferred=True + "0.4.1", commit="7ac997111785bee6d9391664b1d18ebc2b3c557b", submodules=True, preferred=True ) + version("0.4.0", commit="7e8b7233f8a148b402128ed46b2f0c643e3b397e", submodules=True) version("0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True) version( "0.3.0-alpha", From d45f682573e9f9b38b17fd2cbfd188da551b45f9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 10:51:12 +0100 Subject: [PATCH 088/208] Revert "Ci generate on change (#47318)" (#47431) This reverts commit 1462c357619fedf7354bc60f9178b2199258ebd2. --- lib/spack/spack/ci.py | 94 ++++++++++------------------------ lib/spack/spack/test/cmd/ci.py | 42 --------------- 2 files changed, 27 insertions(+), 109 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 8a23bc3ae1357b..5a8b2ae1e7d49a 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -37,7 +37,6 @@ import spack.error import spack.main import spack.mirror -import spack.package_base import spack.paths import spack.repo import spack.spec @@ -265,22 +264,14 @@ def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions): def get_change_revisions(): """If this is a git repo get the revisions to use when checking for changed packages and spack core modules.""" - rev1 = None - rev2 = None - - # Note: git_dir may be a file in a worktree. If it exists, attempt to use git - # to determine if there is a revision git_dir = os.path.join(spack.paths.prefix, ".git") - if os.path.exists(git_dir): - # The default will only find changed packages from the last - # commit. When the commit is a merge commit, this is will return all of the - # changes on the topic. - # TODO: Handle the case where the clone is not shallow clone of a merge commit - # using `git merge-base` - rev1 = "HEAD^" - rev2 = "HEAD" - - return rev1, rev2 + if os.path.exists(git_dir) and os.path.isdir(git_dir): + # TODO: This will only find changed packages from the last + # TODO: commit. While this may work for single merge commits + # TODO: when merging the topic branch into the base, it will + # TODO: require more thought outside of that narrow case. + return "HEAD^", "HEAD" + return None, None def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): @@ -399,7 +390,7 @@ class SpackCI: used by the CI generator(s). """ - def __init__(self, ci_config, spec_labels=None, stages=None): + def __init__(self, ci_config, spec_labels, stages): """Given the information from the ci section of the config and the staged jobs, set up meta data needed for generating Spack CI IR. @@ -417,9 +408,8 @@ def __init__(self, ci_config, spec_labels=None, stages=None): } jobs = self.ir["jobs"] - if spec_labels and stages: - for spec, dag_hash in _build_jobs(spec_labels, stages): - jobs[dag_hash] = self.__init_job(spec) + for spec, dag_hash in _build_jobs(spec_labels, stages): + jobs[dag_hash] = self.__init_job(spec) for name in self.named_jobs: # Skip the special named jobs @@ -715,53 +705,14 @@ def generate_gitlab_ci_yaml( files (spack.yaml, spack.lock), etc should be written. GitLab requires this to be within the project directory. """ - rev1, rev2 = get_change_revisions() - tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}") - - # Get the joined "ci" config with all of the current scopes resolved - ci_config = cfg.get("ci") - spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) - - changed = rev1 and rev2 - affected_pkgs = None - if spack_prune_untouched and changed: - affected_pkgs = compute_affected_packages(rev1, rev2) - tty.debug("affected pkgs:") - if affected_pkgs: - for p in affected_pkgs: - tty.debug(f" {p}") - else: - tty.debug(" no affected packages...") - - possible_builds = spack.package_base.possible_dependencies(*env.user_specs) - changed = any((spec in p for p in possible_builds.values()) for spec in affected_pkgs) - - if not changed: - spack_ci = SpackCI(ci_config) - spack_ci_ir = spack_ci.generate_ir() - - # No jobs should be generated. - noop_job = spack_ci_ir["jobs"]["noop"]["attributes"] - # If this job fails ignore the status and carry on - noop_job["retry"] = 0 - noop_job["allow_failure"] = True - - tty.msg("Skipping concretization, generating no-op job") - output_object = {"no-specs-to-rebuild": noop_job} - - # Ensure the child pipeline always runs - output_object["workflow"] = {"rules": [{"when": "always"}]} - - with open(output_file, "w") as f: - ruamel.yaml.YAML().dump(output_object, f) - - return - with spack.concretize.disable_compiler_existence_check(): with env.write_transaction(): env.concretize() env.write() + # Get the joined "ci" config with all of the current scopes resolved + ci_config = cfg.get("ci") + if not ci_config: raise SpackCIError("Environment does not have a `ci` configuration") @@ -786,13 +737,20 @@ def generate_gitlab_ci_yaml( dependent_depth = None prune_untouched_packages = False + spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true": # Requested to prune untouched packages, but assume we won't do that # unless we're actually in a git repo. - if changed: + rev1, rev2 = get_change_revisions() + tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}") + if rev1 and rev2: # If the stack file itself did not change, proceed with pruning if not get_stack_changed(env.manifest_path, rev1, rev2): prune_untouched_packages = True + affected_pkgs = compute_affected_packages(rev1, rev2) + tty.debug("affected pkgs:") + for p in affected_pkgs: + tty.debug(f" {p}") affected_specs = get_spec_filter_list( env, affected_pkgs, dependent_traverse_depth=dependent_depth ) @@ -1140,6 +1098,11 @@ def main_script_replacements(cmd): # warn only if there was actually a CDash configuration. tty.warn("Unable to populate buildgroup without CDash credentials") + service_job_retries = { + "max": 2, + "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"], + } + if copy_only_pipeline: stage_names.append("copy") sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"]) @@ -1199,10 +1162,7 @@ def main_script_replacements(cmd): ) final_job["when"] = "always" - final_job["retry"] = { - "max": 2, - "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"], - } + final_job["retry"] = service_job_retries final_job["interruptible"] = True final_job["dependencies"] = [] diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 2f0e053265c2d1..36aa992c639c9c 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -1650,45 +1650,3 @@ def fake_dyn_mapping_urlopener(*args, **kwargs): assert job.get("variables", {}).get("MY_VAR") == "hello" assert "ignored_field" not in job assert "unallowed_field" not in job - - -def test_ci_generate_noop_no_concretize( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, -): - # Write the enviroment file - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ -spack: - specs: - - pkg-a - mirrors: - buildcache-destination: https://my.fake.mirror - ci: - type: gitlab -""" - ) - - def fake_compute_affected(r1=None, r2=None): - return [] - - monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected) - monkeypatch.setenv("SPACK_PRUNE_UNTOUCHED", "TRUE") # enables pruning of untouched specs - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as of: - pipeline_doc = syaml.load(of.read()) - assert "no-specs-to-rebuild" in pipeline_doc From dadb30f0e2cf64d6189bbabae2eaf9452bd53272 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 12:30:32 +0100 Subject: [PATCH 089/208] libc.py: detect glibc also in chinese locale (#47434) --- lib/spack/spack/util/libc.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/util/libc.py b/lib/spack/spack/util/libc.py index 55e8e3d26b6e48..148c4cb13a529a 100644 --- a/lib/spack/spack/util/libc.py +++ b/lib/spack/spack/util/libc.py @@ -9,20 +9,30 @@ import shlex import sys from subprocess import PIPE, run -from typing import List, Optional +from typing import Dict, List, Optional import spack.spec import spack.util.elf +#: Pattern to distinguish glibc from other libc implementations +GLIBC_PATTERN = r"\b(?:Free Software Foundation|Roland McGrath|Ulrich Depper)\b" + + +def _env() -> Dict[str, str]: + """Currently only set LC_ALL=C without clearing further environment variables""" + return {**os.environ, "LC_ALL": "C"} + def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]: try: - result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False) + result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env()) stdout = result.stdout.decode("utf-8") except Exception: return None - if not re.search(r"\bFree Software Foundation\b", stdout): + # The string "Free Software Foundation" is sometimes translated and not detected, but the names + # of the authors are typically present. + if not re.search(GLIBC_PATTERN, stdout): return None version_str = re.match(r".+\(.+\) (.+)", stdout) @@ -38,7 +48,7 @@ def default_search_paths_from_dynamic_linker(dynamic_linker: str) -> List[str]: """If the dynamic linker is glibc at a certain version, we can query the hard-coded library search paths""" try: - result = run([dynamic_linker, "--help"], stdout=PIPE, stderr=PIPE, check=False) + result = run([dynamic_linker, "--help"], stdout=PIPE, stderr=PIPE, check=False, env=_env()) assert result.returncode == 0 out = result.stdout.decode("utf-8") except Exception: @@ -74,7 +84,9 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"] # Now try to figure out if glibc or musl, which is the only ones we support. # In recent glibc we can simply execute the dynamic loader. In musl that's always the case. try: - result = run([dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False) + result = run( + [dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env() + ) stdout = result.stdout.decode("utf-8") stderr = result.stderr.decode("utf-8") except Exception: @@ -91,7 +103,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"] return spec except Exception: return None - elif re.search(r"\bFree Software Foundation\b", stdout): + elif re.search(GLIBC_PATTERN, stdout): # output is like "ld.so (...) stable release version 2.33." match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout) if not match: From eb9ff5d7a7d47f112ece5a4c70ef603a047a2fbc Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 13:25:19 +0100 Subject: [PATCH 090/208] paraview: add forward compat bound with cuda (#47430) --- var/spack/repos/builtin/packages/paraview/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 874f8bed64155a..442bd2b12c2c0a 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -325,6 +325,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8474 depends_on("proj@8.1.0", when="@5.11:") + # Patches to vendored VTK-m are needed for forward compat with CUDA 12 (mr 2972 and 3259) + depends_on("cuda@:11", when="+cuda") + patch("stl-reader-pv440.patch", when="@4.4.0") # Broken gcc-detection - improved in 5.1.0, redundant later From 09ae2516d57e098a2f030dac01c86a796ad139f9 Mon Sep 17 00:00:00 2001 From: Martin Lang <67915889+lang-m@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:20:10 +0100 Subject: [PATCH 091/208] cgal: add v6.0.1 (#47285) --- var/spack/repos/builtin/packages/cgal/package.py | 1 + var/spack/repos/builtin/packages/dealii/package.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py index 18170077580437..c1030d42d0aba1 100644 --- a/var/spack/repos/builtin/packages/cgal/package.py +++ b/var/spack/repos/builtin/packages/cgal/package.py @@ -17,6 +17,7 @@ class Cgal(CMakePackage): homepage = "https://www.cgal.org/" url = "https://github.com/CGAL/cgal/releases/download/v5.4.1/CGAL-5.4.1.tar.xz" + version("6.0.1", sha256="0acdfbf317c556630dd526f3253780f29b6ec9713ee92903e81b5c93c0f59b7f") version("5.6", sha256="dcab9b08a50a06a7cc2cc69a8a12200f8d8f391b9b8013ae476965c10b45161f") version("5.5.3", sha256="0a04f662693256328b05babfabb5e3a5b7db2f5a58d52e3c520df9d0828ddd73") version("5.5.2", sha256="b2b05d5616ecc69facdc24417cce0b04fb4321491d107db45103add520e3d8c3") diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 94563ec32dc78e..50ff2dbb5177b7 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -180,8 +180,9 @@ class Dealii(CMakePackage, CudaPackage): depends_on("arborx+trilinos", when="@9.3:+arborx+trilinos") depends_on("arpack-ng+mpi", when="+arpack+mpi") depends_on("assimp", when="@9.0:+assimp") - depends_on("cgal", when="@9.4:+cgal") - depends_on("cgal@5:", when="@9.5:+cgal") + # cgal 6 not yet supported: https://github.com/spack/spack/pull/47285#issuecomment-2455403447 + depends_on("cgal@:5", when="@9.4:+cgal") + depends_on("cgal@5", when="@9.5:+cgal") depends_on("doxygen+graphviz", when="+doc") depends_on("graphviz", when="+doc") depends_on("ginkgo", when="@9.1:+ginkgo") From 2b9c6790f22db8161f1dca9a37bdf5d68ab90482 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 5 Nov 2024 15:50:48 +0100 Subject: [PATCH 092/208] omega-h: fix versioning and cuda compat (#47433) --- .../stacks/e4s-neoverse_v1/spack.yaml | 4 +- .../stacks/e4s-power/spack.yaml | 2 +- .../cloud_pipelines/stacks/e4s/spack.yaml | 2 +- .../repos/builtin/packages/ceed/package.py | 2 +- .../repos/builtin/packages/omega-h/package.py | 43 +++++++------------ .../repos/builtin/packages/xsdk/package.py | 2 +- 6 files changed, 22 insertions(+), 33 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index 24a488fbe921bf..d8e8091676644d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -225,7 +225,7 @@ spack: - magma +cuda cuda_arch=75 - mfem +cuda cuda_arch=75 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=75 - - omega-h +cuda cuda_arch=75 + - "omega-h@:9 +cuda cuda_arch=75" # https://github.com/SCOREC/omega_h/issues/116 - parsec +cuda cuda_arch=75 - petsc +cuda cuda_arch=75 - py-torch +cuda cuda_arch=75 @@ -274,7 +274,7 @@ spack: - magma +cuda cuda_arch=80 - mfem +cuda cuda_arch=80 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80 - - omega-h +cuda cuda_arch=80 + - "omega-h@:9 +cuda cuda_arch=80" # https://github.com/SCOREC/omega_h/issues/116 - parsec +cuda cuda_arch=80 - petsc +cuda cuda_arch=80 - py-torch +cuda cuda_arch=80 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index a770b0a299a13d..6c30947d4ffdf5 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -233,7 +233,7 @@ spack: - magma +cuda cuda_arch=70 - mfem +cuda cuda_arch=70 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=70 - - omega-h +cuda cuda_arch=70 + - "omega-h@:9 +cuda cuda_arch=70" # https://github.com/SCOREC/omega_h/issues/116 - parsec +cuda cuda_arch=70 - petsc +cuda cuda_arch=70 - raja +cuda cuda_arch=70 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 0b81e53d568d1d..a3ceb56a35b2d0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -239,7 +239,7 @@ spack: - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=80 ^cusz +cuda cuda_arch=80 - magma +cuda cuda_arch=80 - mfem +cuda cuda_arch=80 - - omega-h +cuda cuda_arch=80 + - "omega-h@:9 +cuda cuda_arch=80" # https://github.com/SCOREC/omega_h/issues/116 - parsec +cuda cuda_arch=80 - petsc +cuda cuda_arch=80 - py-torch +cuda cuda_arch=80 diff --git a/var/spack/repos/builtin/packages/ceed/package.py b/var/spack/repos/builtin/packages/ceed/package.py index d346bb667ed865..7eaf0411186840 100644 --- a/var/spack/repos/builtin/packages/ceed/package.py +++ b/var/spack/repos/builtin/packages/ceed/package.py @@ -240,7 +240,7 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage): # Omega_h # ceed-5.0 - depends_on("omega-h@scorec.10.1.0", when="@5.0.0+omega-h") + depends_on("omega-h@10.1.0", when="@5.0.0+omega-h") depends_on("omega-h~trilinos", when="@5.0.0+omega-h+quickbuild") # MFEM, Laghos, Remhos diff --git a/var/spack/repos/builtin/packages/omega-h/package.py b/var/spack/repos/builtin/packages/omega-h/package.py index e4432f2e104dcb..67221b389af2fd 100644 --- a/var/spack/repos/builtin/packages/omega-h/package.py +++ b/var/spack/repos/builtin/packages/omega-h/package.py @@ -20,22 +20,22 @@ class OmegaH(CMakePackage, CudaPackage): tags = ["e4s"] version("main", branch="main") version( - "scorec.10.8.5", + "10.8.5-scorec", commit="62026fc305356abb5e02a9fce3fead9cf5077fbe", git="https://github.com/SCOREC/omega_h.git", ) version( - "scorec.10.7.0", + "10.7.0-scorec", commit="0e5de8618c3370f702e08c1b1af476dbbc118892", git="https://github.com/SCOREC/omega_h.git", ) version( - "scorec.10.6.0", + "10.6.0-scorec", commit="f376fad4741b55a4b2482218eb3437d719b7c72e", git="https://github.com/SCOREC/omega_h.git", ) version( - "scorec.10.1.0", + "10.1.0-scorec", commit="e88912368e101d940f006019585701a704295ab0", git="https://github.com/SCOREC/omega_h.git", ) @@ -73,26 +73,16 @@ class OmegaH(CMakePackage, CudaPackage): depends_on("trilinos +kokkos", when="+trilinos") depends_on("kokkos", when="+kokkos") depends_on("zlib-api", when="+zlib") - # Note: '+cuda' and 'cuda_arch' variants are added by the CudaPackage - depends_on("cuda", when="+cuda") - conflicts( - "^cuda@11.2", - when="@scorec.10.1.0:", - msg="Thrust is broken in CUDA = 11.2.* see https://github.com/sandialabs/omega_h/issues/366", - ) - conflicts( - "^cuda@:11.3", - when="@scorec.10.8.5:", - msg="see https://github.com/SCOREC/omega_h/issues/66", - ) - # the sandia repo has a fix for cuda > 11.2 support - # see github.com/sandialabs/omega_h/pull/373 - conflicts( - "^cuda@11.2", - when="@:9.34.4", - msg="Thrust is broken in CUDA = 11.2.* see https://github.com/sandialabs/omega_h/issues/366", - ) + with when("+cuda"): + # https://github.com/SCOREC/omega_h/commit/40a2d36d0b747a7147aeed238a0323f40b227cb2 + depends_on("cuda@11.4:", when="@10.8.3:") + + # https://github.com/SCOREC/omega_h/commit/c2109d2900696974ee66c3fbe6a1ec0e93b66cb6 + depends_on("cuda@:11", when="@:10.6") + + # Single, broken CUDA version. + conflicts("^cuda@11.2", msg="See https://github.com/sandialabs/omega_h/issues/366") # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86610 conflicts("%gcc@8:8.2", when="@:9.22.1") @@ -120,9 +110,8 @@ def cmake_args(self): args.append("-DBUILD_SHARED_LIBS:BOOL=OFF") if "+mpi" in self.spec: args.append("-DOmega_h_USE_MPI:BOOL=ON") - ver = self.spec.version # old versions don't call find_package(MPI) - if ver < Version("9.33.2") and "scorec" not in str(ver): + if self.spec.satisfies("@:9.33.1"): args.append("-DCMAKE_CXX_COMPILER:FILEPATH={0}".format(self.spec["mpi"].mpicxx)) else: args.append("-DOmega_h_USE_MPI:BOOL=OFF") @@ -131,7 +120,7 @@ def cmake_args(self): cuda_arch_list = self.spec.variants["cuda_arch"].value cuda_arch = cuda_arch_list[0] if cuda_arch != "none": - if "scorec" in str(self.spec.version): + if self.spec.satisfies("@10:"): args.append("-DOmega_h_CUDA_ARCH={0}".format(cuda_arch)) else: args.append("-DCMAKE_CUDA_FLAGS=-arch=sm_{0}".format(cuda_arch)) @@ -174,7 +163,7 @@ def flag_handler(self, name, flags): def test_mesh(self): """test construction, adaptation, and conversion of a mesh""" - if self.spec.satisfies("@:9.34.0") and not self.spec.satisfies("@:scorec"): + if self.spec.satisfies("@:9.34.0"): raise SkipTest("Package must be installed as version 9.34.1 or later") with test_part(self, "test_mesh_create", purpose="mesh construction"): diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index 85bf27ae2b092b..29cfc1cec8ceca 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -216,7 +216,7 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("omega-h +trilinos", when="+trilinos +omega-h") xsdk_depends_on("omega-h ~trilinos", when="~trilinos +omega-h") - xsdk_depends_on("omega-h@scorec.10.6.0", when="@1.0.0 +omega-h") + xsdk_depends_on("omega-h@10.6.0", when="@1.0.0 +omega-h") xsdk_depends_on("omega-h@9.34.13", when="@0.8.0 +omega-h") xsdk_depends_on("strumpack ~cuda", when="~cuda +strumpack") From c601692bc794943b08ac108ce44d6f3428bb565a Mon Sep 17 00:00:00 2001 From: wspear Date: Tue, 5 Nov 2024 12:08:10 -0800 Subject: [PATCH 093/208] Fix filter_compiler_wrappers with mpi (#47448) --- var/spack/repos/builtin/packages/tau/package.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 94d939c03fd6f5..b2bcac2891be96 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -427,8 +427,14 @@ def install(self, spec, prefix): # Link arch-specific directories into prefix since there is # only one arch per prefix the way spack installs. self.link_tau_arch_dirs() - # TAU may capture Spack's internal compiler wrapper. Replace - # it with the correct compiler. + # TAU may capture Spack's internal compiler wrapper. Fixed + # by filter_compiler_wrappers. Switch back the environment + # variables the filter uses. + if "+mpi" in spec: + env["CC"] = spack_cc + env["CXX"] = spack_cxx + env["FC"] = spack_fc + env["F77"] = spack_f77 def link_tau_arch_dirs(self): for subdir in os.listdir(self.prefix): From 801390f6becf1a0411ff45008a544e866bd91bd8 Mon Sep 17 00:00:00 2001 From: Jon Rood Date: Tue, 5 Nov 2024 13:08:21 -0700 Subject: [PATCH 094/208] masa: add versions and update dependencies (#47447) * masa: add versions * masa: update dependencies --- .../repos/builtin/packages/masa/package.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/masa/package.py b/var/spack/repos/builtin/packages/masa/package.py index 3145f4d4f2e6f1..c7086a54a1a887 100644 --- a/var/spack/repos/builtin/packages/masa/package.py +++ b/var/spack/repos/builtin/packages/masa/package.py @@ -19,20 +19,27 @@ class Masa(AutotoolsPackage): license("LGPL-2.1-or-later") version("master", branch="master") + version("0.51.0", tag="0.51.0") + version("0.50.0", tag="0.50.0") + version("0.44.0", tag="0.44.0") + version("0.43.1", tag="0.43.1") + version("0.43.0", tag="0.43.0") + version("0.42.0", tag="0.42.0") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated - depends_on("fortran", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") + depends_on("fortran", type="build", when="+fortran") - variant("fortran", default=True, description="Compile with Fortran interfaces") - variant("python", default=True, description="Compile with Python interfaces") + variant("fortran", default=False, description="Compile with Fortran interfaces") + variant("python", default=False, description="Compile with Python interfaces") + depends_on("gettext") + depends_on("metaphysicl") + depends_on("python") depends_on("autoconf", type="build") depends_on("automake", type="build") depends_on("libtool", type="build") - depends_on("swig", type="build") - depends_on("python", when="+python") - depends_on("metaphysicl") + depends_on("swig", type="build", when="+python") def configure_args(self): options = [] From 0c164d274067f0500c0edd37a4e9bec18374d2bf Mon Sep 17 00:00:00 2001 From: Richarda Butler <39577672+RikkiButler20@users.noreply.github.com> Date: Wed, 6 Nov 2024 00:53:52 -0800 Subject: [PATCH 095/208] Feature: Allow variants to propagate if not available in source pkg (#42931) Variants can now be propagated from a dependent package to (transitive) dependencies, even if the source or transitive dependencies have the propagated variants. For example, here `zlib` doesn't have a `guile` variant, but `gmake` does: ``` $ spack spec zlib++guile - zlib@1.3%gcc@12.2.0+optimize+pic+shared build_system=makefile arch=linux-rhel8-broadwell - ^gcc-runtime@12.2.0%gcc@12.2.0 build_system=generic arch=linux-rhel8-broadwell - ^gmake@4.4.1%gcc@12.2.0+guile build_system=generic arch=linux-rhel8-broadwell ``` Adding this property has some strange ramifications for `satisfies()`. In particular: * The abstract specs `pkg++variant` and `pkg+variant` do not intersect, because `+variant` implies that `pkg` *has* the variant, but `++variant` does not. * This means that `spec.satisfies("++foo")` is `True` if: * for concrete specs: `spec` and its dependencies all have `foo` set if it exists * for abstract specs: no dependency of `spec` has `~foo` (i.e. no dependency contradicts `++foo`). * This also means that `Spec("++foo").satisfies("+foo")` is `False` -- we only know after concretization. The `satisfies()` semantics may be surprising, but this is the cost of introducing non-subset semantics (which are more useful than proper subsets here). - [x] Change checks for variants - [x] Resolve conflicts - [x] Add tests - [x] Add documentation --------- Co-authored-by: Gregory Becker Co-authored-by: Massimiliano Culpo --- lib/spack/docs/basic_usage.rst | 4 + lib/spack/spack/solver/asp.py | 5 +- lib/spack/spack/solver/concretize.lp | 71 ++++++-- lib/spack/spack/spec.py | 76 ++++++++- lib/spack/spack/test/concretize.py | 154 ++++++++++++++++-- lib/spack/spack/test/spec_semantics.py | 55 ++++++- lib/spack/spack/variant.py | 2 +- .../packages/dependency-foo-bar/package.py | 2 + .../packages/direct-dep-foo-bar/package.py | 22 +++ .../builtin.mock/packages/openblas/package.py | 2 + .../packages/parent-foo-bar-fee/package.py | 23 +++ .../packages/parent-foo-bar/package.py | 1 + .../builtin.mock/packages/perl/package.py | 2 + .../builtin.mock/packages/pkg-a/package.py | 8 + .../second-dependency-foo-bar-fee/package.py | 21 +++ 15 files changed, 413 insertions(+), 35 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/direct-dep-foo-bar/package.py create mode 100644 var/spack/repos/builtin.mock/packages/parent-foo-bar-fee/package.py create mode 100644 var/spack/repos/builtin.mock/packages/second-dependency-foo-bar-fee/package.py diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 171a82b6ea1e37..21733a256633b0 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1359,6 +1359,10 @@ For example, for the ``stackstart`` variant: mpileaks stackstart==4 # variant will be propagated to dependencies mpileaks stackstart=4 # only mpileaks will have this variant value +Spack also allows variants to be propagated from a package that does +not have that variant. + + ^^^^^^^^^^^^^^ Compiler Flags ^^^^^^^^^^^^^^ diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index cb4799a45f37bf..56014717ddc940 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -2032,9 +2032,12 @@ def _spec_clauses( for variant_def in variant_defs: self.variant_values_from_specs.add((spec.name, id(variant_def), value)) - clauses.append(f.variant_value(spec.name, vname, value)) if variant.propagate: clauses.append(f.propagate(spec.name, fn.variant_value(vname, value))) + if self.pkg_class(spec.name).has_variant(vname): + clauses.append(f.variant_value(spec.name, vname, value)) + else: + clauses.append(f.variant_value(spec.name, vname, value)) # compiler and compiler version if spec.compiler: diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 2195cd6b08b4d1..f4695be9b90fbf 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -57,6 +57,12 @@ internal_error("provider with no virtual node"). :- provider(PackageNode, _), not attr("node", PackageNode), internal_error("provider with no real node"). +:- node_has_variant(PackageNode, _, _), not attr("node", PackageNode), + internal_error("node has variant for a non-node"). +:- attr("variant_set", PackageNode, _, _), not attr("node", PackageNode), + internal_error("variant_set for a non-node"). +:- variant_is_propagated(PackageNode, _), not attr("node", PackageNode), + internal_error("variant_is_propagated for a non-node"). :- attr("root", node(ID, PackageNode)), ID > min_dupe_id, internal_error("root with a non-minimal duplicate ID"). @@ -575,7 +581,8 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual) % or used somewhere :- attr("virtual_node", node(_, Virtual)), not attr("virtual_on_incoming_edges", _, Virtual), - not attr("virtual_root", node(_, Virtual)). + not attr("virtual_root", node(_, Virtual)), + internal_error("virtual node does not match incoming edge"). attr("virtual_on_incoming_edges", ProviderNode, Virtual) :- attr("virtual_on_edge", _, ProviderNode, Virtual). @@ -629,7 +636,8 @@ do_not_impose(EffectID, node(X, Package)) virtual_condition_holds(_, PossibleProvider, Virtual), PossibleProvider != ProviderNode, explicitly_requested_root(PossibleProvider), - not explicitly_requested_root(ProviderNode). + not explicitly_requested_root(ProviderNode), + internal_error("If a root can provide a virtual, it must be the provider"). % A package cannot be the actual provider for a virtual if it does not % fulfill the conditions to provide that virtual @@ -772,7 +780,8 @@ required_provider(Provider, Virtual) pkg_fact(Virtual, condition_effect(ConditionID, EffectID)), imposed_constraint(EffectID, "node", Provider). -:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider. +:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider, + internal_error("If a provider is required the concretizer must use it"). % TODO: the following choice rule allows the solver to add compiler % flags if their only source is from a requirement. This is overly-specific @@ -852,7 +861,8 @@ variant_defined(PackageNode, Name) :- variant_definition(PackageNode, Name, _). % for two or more variant definitions, this prefers the last one defined. :- node_has_variant(node(NodeID, Package), Name, SelectedVariantID), variant_definition(node(NodeID, Package), Name, VariantID), - VariantID > SelectedVariantID. + VariantID > SelectedVariantID, + internal_error("If the solver picks a variant descriptor it must use that variant descriptor"). % B: Associating applicable package rules with nodes @@ -969,6 +979,7 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr :- attr("variant_set", node(ID, Package), Variant, Value), not attr("variant_value", node(ID, Package), Variant, Value). + internal_error("If a variant is set to a value it must have that value"). % The rules below allow us to prefer default values for variants % whenever possible. If a variant is set in a spec, or if it is @@ -979,7 +990,7 @@ variant_not_default(node(ID, Package), Variant, Value) % variants set explicitly on the CLI don't count as non-default not attr("variant_set", node(ID, Package), Variant, Value), % variant values forced by propagation don't count as non-default - not propagate(node(ID, Package), variant_value(Variant, Value)), + not propagate(node(ID, Package), variant_value(Variant, Value, _)), % variants set on externals that we could use don't count as non-default % this makes spack prefer to use an external over rebuilding with the % default configuration @@ -991,7 +1002,7 @@ variant_default_not_used(node(ID, Package), Variant, Value) :- variant_default_value(node(ID, Package), Variant, Value), node_has_variant(node(ID, Package), Variant, _), not attr("variant_value", node(ID, Package), Variant, Value), - not propagate(node(ID, Package), variant_value(Variant, _)), + not propagate(node(ID, Package), variant_value(Variant, _, _)), attr("node", node(ID, Package)). % The variant is set in an external spec @@ -1036,10 +1047,14 @@ variant_single_value(PackageNode, Variant) % Propagation semantics %----------------------------------------------------------------------------- +non_default_propagation(variant_value(Name, Value)) :- attr("propagate", RootNode, variant_value(Name, Value)). + % Propagation roots have a corresponding attr("propagate", ...) -propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute). +propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute), not non_default_propagation(PropagatedAttribute). propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes). +% Special case variants, to inject the source node in the propagated attribute +propagate(RootNode, variant_value(Name, Value, RootNode)) :- attr("propagate", RootNode, variant_value(Name, Value)). % Propagate an attribute along edges to child nodes propagate(ChildNode, PropagatedAttribute) :- @@ -1061,21 +1076,53 @@ propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :- % If a variant is propagated, and can be accepted, set its value attr("variant_selected", PackageNode, Variant, Value, VariantType, VariantID) :- - propagate(PackageNode, variant_value(Variant, Value)), + propagate(PackageNode, variant_value(Variant, Value, _)), node_has_variant(PackageNode, Variant, VariantID), variant_type(VariantID, VariantType), - variant_possible_value(PackageNode, Variant, Value), - not attr("variant_set", PackageNode, Variant). + variant_possible_value(PackageNode, Variant, Value). % If a variant is propagated, we cannot have extraneous values variant_is_propagated(PackageNode, Variant) :- attr("variant_value", PackageNode, Variant, Value), - propagate(PackageNode, variant_value(Variant, Value)), + propagate(PackageNode, variant_value(Variant, Value, _)), not attr("variant_set", PackageNode, Variant). :- variant_is_propagated(PackageNode, Variant), attr("variant_selected", PackageNode, Variant, Value, _, _), - not propagate(PackageNode, variant_value(Variant, Value)). + not propagate(PackageNode, variant_value(Variant, Value, _)). + +error(100, "{0} and {1} cannot both propagate variant '{2}' to the shared dependency: {3}", + Package1, Package2, Variant, Dependency) :- + % The variant is a singlevalued variant + variant_single_value(node(X, Package1), Variant), + % Dependency is trying to propagate Variant with different values and is not the source package + propagate(node(Z, Dependency), variant_value(Variant, Value1, node(X, Package1))), + propagate(node(Z, Dependency), variant_value(Variant, Value2, node(Y, Package2))), + % Package1 and Package2 and their values are different + Package1 > Package2, Value1 != Value2, + not propagate(node(Z, Dependency), variant_value(Variant, _, node(Z, Dependency))). + +% Cannot propagate the same variant from two different packages if one is a dependency of the other +error(100, "{0} and {1} cannot both propagate variant '{2}'", Package1, Package2, Variant) :- + % The variant is a single-valued variant + variant_single_value(node(X, Package1), Variant), + % Package1 and Package2 and their values are different + Package1 != Package2, Value1 != Value2, + % Package2 is set to propagate the value from Package1 + propagate(node(Y, Package2), variant_value(Variant, Value2, node(X, Package2))), + propagate(node(Y, Package2), variant_value(Variant, Value1, node(X, Package1))), + variant_is_propagated(node(Y, Package2), Variant). + +% Cannot propagate a variant if a different value was set for it in a dependency +error(100, "Cannot propagate the variant '{0}' from the package: {1} because package: {2} is set to exclude it", Variant, Source, Package) :- + % Package has a Variant and Source is propagating Variant + attr("variant_set", node(X, Package), Variant, Value1), + % The packages and values are different + Source != Package, Value1 != Value2, + % The variant is a single-valued variant + variant_single_value(node(X, Package1), Variant), + % A different value is being propagated from somewhere else + propagate(node(X, Package), variant_value(Variant, Value2, node(Y, Source))). %---- % Flags diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index ba3a0f9c379080..3d92879484f8a7 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -3020,7 +3020,12 @@ def ensure_valid_variants(spec): pkg_variants = pkg_cls.variant_names() # reserved names are variants that may be set on any package # but are not necessarily recorded by the package's class - not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names)) + propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate] + + not_existing = set(spec.variants) - ( + set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants) + ) + if not_existing: raise vt.UnknownVariantError( f"No such variant {not_existing} for spec: '{spec}'", list(not_existing) @@ -3047,6 +3052,10 @@ def constrain(self, other, deps=True): raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec") other = self._autospec(other) + if other.concrete and other.satisfies(self): + self._dup(other) + return True + if other.abstract_hash: if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash): self.abstract_hash = other.abstract_hash @@ -4525,8 +4534,69 @@ def substitute(self, vspec): # Set the item super().__setitem__(vspec.name, vspec) - def satisfies(self, other): - return all(k in self and self[k].satisfies(other[k]) for k in other) + def partition_variants(self): + non_prop, prop = lang.stable_partition(self.values(), lambda x: not x.propagate) + # Just return the names + non_prop = [x.name for x in non_prop] + prop = [x.name for x in prop] + return non_prop, prop + + def satisfies(self, other: "VariantMap") -> bool: + if self.spec.concrete: + return self._satisfies_when_self_concrete(other) + return self._satisfies_when_self_abstract(other) + + def _satisfies_when_self_concrete(self, other: "VariantMap") -> bool: + non_propagating, propagating = other.partition_variants() + result = all( + name in self and self[name].satisfies(other[name]) for name in non_propagating + ) + if not propagating: + return result + + for node in self.spec.traverse(): + if not all( + node.variants[name].satisfies(other[name]) + for name in propagating + if name in node.variants + ): + return False + return result + + def _satisfies_when_self_abstract(self, other: "VariantMap") -> bool: + other_non_propagating, other_propagating = other.partition_variants() + self_non_propagating, self_propagating = self.partition_variants() + + # First check variants without propagation set + result = all( + name in self_non_propagating + and (self[name].propagate or self[name].satisfies(other[name])) + for name in other_non_propagating + ) + if result is False or (not other_propagating and not self_propagating): + return result + + # Check that self doesn't contradict variants propagated by other + if other_propagating: + for node in self.spec.traverse(): + if not all( + node.variants[name].satisfies(other[name]) + for name in other_propagating + if name in node.variants + ): + return False + + # Check that other doesn't contradict variants propagated by self + if self_propagating: + for node in other.spec.traverse(): + if not all( + node.variants[name].satisfies(self[name]) + for name in self_propagating + if name in node.variants + ): + return False + + return result def intersects(self, other): return all(self[k].intersects(other[k]) for k in other if k in self) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index bf96311d4499cd..dd2444df0a7a1f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -540,21 +540,17 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): @pytest.mark.parametrize( "spec_str,expected_propagation", [ - ("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]), # Propagates past a node that doesn't have the variant ("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]), + # Propagates from root node to all nodes ( "ascent~~shared +adios2", [("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")], ), - # Propagates below a node that uses the other value explicitly + # Propagate from a node that is not the root node ( - "ascent~~shared +adios2 ^adios2+shared", - [("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")], - ), - ( - "ascent++shared +adios2 ^adios2~shared", - [("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")], + "ascent +adios2 ^adios2~~shared", + [("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "~shared")], ), ], ) @@ -564,21 +560,109 @@ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagat for key, expected_satisfies in expected_propagation: spec[key].satisfies(expected_satisfies) - def test_concretize_propagated_variant_is_not_passed_to_dependent(self): - """Test a package variant value was passed from its parent.""" - spec = Spec("ascent~~shared +adios2 ^adios2+shared") + def test_concretize_propagate_variant_not_dependencies(self): + """Test that when propagating a variant it is not propagated to dependencies that + do not have that variant""" + spec = Spec("quantum-espresso~~invino") spec.concretize() - assert spec.satisfies("^adios2+shared") - assert spec.satisfies("^bzip2~shared") + for dep in spec.traverse(root=False): + assert "invino" not in dep.variants.keys() + + def test_concretize_propagate_variant_exclude_dependency_fail(self): + """Tests that a propagating variant cannot be allowed to be excluded by any of + the source package's dependencies""" + spec = Spec("hypre ~~shared ^openblas +shared") + with pytest.raises(spack.error.UnsatisfiableSpecError): + spec.concretize() + + def test_concretize_propagate_same_variant_from_direct_dep_fail(self): + """Test that when propagating a variant from the source package and a direct + dependency also propagates the same variant with a different value. Raises error""" + spec = Spec("ascent +adios2 ++shared ^adios2 ~~shared") + with pytest.raises(spack.error.UnsatisfiableSpecError): + spec.concretize() + + def test_concretize_propagate_same_variant_in_dependency_fail(self): + """Test that when propagating a variant from the source package, none of it's + dependencies can propagate that variant with a different value. Raises error.""" + spec = Spec("ascent +adios2 ++shared ^bzip2 ~~shared") + with pytest.raises(spack.error.UnsatisfiableSpecError): + spec.concretize() + + def test_concretize_propagate_same_variant_virtual_dependency_fail(self): + """Test that when propagating a variant from the source package and a direct + dependency (that is a virtual pkg) also propagates the same variant with a + different value. Raises error""" + spec = Spec("hypre ++shared ^openblas ~~shared") + with pytest.raises(spack.error.UnsatisfiableSpecError): + spec.concretize() + + def test_concretize_propagate_same_variant_multiple_sources_diamond_dep_fail(self): + """Test that fails when propagating the same variant with different values from multiple + sources that share a dependency""" + spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~bar") + with pytest.raises(spack.error.UnsatisfiableSpecError): + spec.concretize() def test_concretize_propagate_specified_variant(self): """Test that only the specified variant is propagated to the dependencies""" spec = Spec("parent-foo-bar ~~foo") spec.concretize() - assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo") - assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar") + assert spec.satisfies("^dependency-foo-bar~foo") + assert spec.satisfies("^second-dependency-foo-bar-fee~foo") + assert spec.satisfies("^direct-dep-foo-bar~foo") + + assert not spec.satisfies("^dependency-foo-bar+bar") + assert not spec.satisfies("^second-dependency-foo-bar-fee+bar") + assert not spec.satisfies("^direct-dep-foo-bar+bar") + + def test_concretize_propagate_one_variant(self): + """Test that you can specify to propagate one variant and not all""" + spec = Spec("parent-foo-bar ++bar ~foo") + spec.concretize() + + assert spec.satisfies("~foo") and not spec.satisfies("^dependency-foo-bar~foo") + assert spec.satisfies("+bar") and spec.satisfies("^dependency-foo-bar+bar") + + def test_concretize_propagate_through_first_level_deps(self): + """Test that boolean valued variants can be propagated past first level + dependecies even if the first level dependency does have the variant""" + spec = Spec("parent-foo-bar-fee ++fee") + spec.concretize() + + assert spec.satisfies("+fee") and not spec.satisfies("dependency-foo-bar+fee") + assert spec.satisfies("^second-dependency-foo-bar-fee+fee") + + def test_concretize_propagate_multiple_variants(self): + """Test that multiple boolean valued variants can be propagated from + the same source package""" + spec = Spec("parent-foo-bar-fee ~~foo ++bar") + spec.concretize() + + assert spec.satisfies("~foo") and spec.satisfies("+bar") + assert spec.satisfies("^dependency-foo-bar ~foo +bar") + assert spec.satisfies("^second-dependency-foo-bar-fee ~foo +bar") + + def test_concretize_propagate_multiple_variants_mulitple_sources(self): + """Test the propagates multiple different variants for multiple sources + in a diamond dependency""" + spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~foo") + spec.concretize() + + assert spec.satisfies("^second-dependency-foo-bar-fee+bar") + assert spec.satisfies("^second-dependency-foo-bar-fee~foo") + assert not spec.satisfies("^dependency-foo-bar~foo") + assert not spec.satisfies("^direct-dep-foo-bar+bar") + + def test_concretize_propagate_single_valued_variant(self): + """Test propagation for single valued variants""" + spec = Spec("multivalue-variant libs==static") + spec.concretize() + + assert spec.satisfies("libs=static") + assert spec.satisfies("^pkg-a libs=static") def test_concretize_propagate_multivalue_variant(self): """Test that multivalue variants are propagating the specified value(s) @@ -591,6 +675,46 @@ def test_concretize_propagate_multivalue_variant(self): assert not spec.satisfies("^pkg-a foo=bar") assert not spec.satisfies("^pkg-b foo=bar") + def test_concretize_propagate_multiple_multivalue_variant(self): + """Tests propagating the same mulitvalued variant from different sources allows + the dependents to accept all propagated values""" + spec = Spec("multivalue-variant foo==bar ^pkg-a foo==baz") + spec.concretize() + + assert spec.satisfies("multivalue-variant foo=bar") + assert spec.satisfies("^pkg-a foo=bar,baz") + assert spec.satisfies("^pkg-b foo=bar,baz") + + def test_concretize_propagate_variant_not_in_source(self): + """Test that variant is still propagated even if the source pkg + doesn't have the variant""" + spec = Spec("callpath++debug") + spec.concretize() + + assert spec.satisfies("^mpich+debug") + assert not spec.satisfies("callpath+debug") + assert not spec.satisfies("^dyninst+debug") + + def test_concretize_propagate_variant_multiple_deps_not_in_source(self): + """Test that a variant can be propagated to multiple dependencies + when the variant is not in the source package""" + spec = Spec("netlib-lapack++shared") + spec.concretize() + + assert spec.satisfies("^openblas+shared") + assert spec.satisfies("^perl+shared") + assert not spec.satisfies("netlib-lapack+shared") + + def test_concretize_propagate_variant_second_level_dep_not_in_source(self): + """Test that a variant can be propagated past first level dependencies + when the variant is not in the source package or any of the first level + dependencies""" + spec = Spec("parent-foo-bar ++fee") + spec.concretize() + + assert spec.satisfies("^second-dependency-foo-bar-fee +fee") + assert not spec.satisfies("parent-foo-bar +fee") + def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed with spack.concretize.enable_compiler_existence_check(): diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 1b12a8a80315c3..6342325364a3a0 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -512,9 +512,6 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs): ("mpich", "mpich +foo"), ("mpich", "mpich~foo"), ("mpich", "mpich foo=1"), - ("mpich", "mpich++foo"), - ("mpich", "mpich~~foo"), - ("mpich", "mpich foo==1"), ("multivalue-variant foo=bar", "multivalue-variant +foo"), ("multivalue-variant foo=bar", "multivalue-variant ~foo"), ("multivalue-variant fee=bar", "multivalue-variant fee=baz"), @@ -536,6 +533,58 @@ def test_concrete_specs_which_do_not_satisfy_abstract( with pytest.raises(UnsatisfiableSpecError): assert rhs.constrain(lhs) + @pytest.mark.parametrize( + "lhs,rhs", [("mpich", "mpich++foo"), ("mpich", "mpich~~foo"), ("mpich", "mpich foo==1")] + ) + def test_concrete_specs_which_satisfy_abstract(self, lhs, rhs, default_mock_concretization): + lhs, rhs = default_mock_concretization(lhs), Spec(rhs) + + assert lhs.intersects(rhs) + assert rhs.intersects(lhs) + assert lhs.satisfies(rhs) + + s1 = lhs.copy() + s1.constrain(rhs) + assert s1 == lhs and s1.satisfies(lhs) + + s2 = rhs.copy() + s2.constrain(lhs) + assert s2 == lhs and s2.satisfies(lhs) + + @pytest.mark.parametrize( + "lhs,rhs,expected,constrained", + [ + # hdf5++mpi satisfies hdf5, and vice versa, because of the non-contradiction semantic + ("hdf5++mpi", "hdf5", True, "hdf5++mpi"), + ("hdf5", "hdf5++mpi", True, "hdf5++mpi"), + # Same holds true for arbitrary propagated variants + ("hdf5++mpi", "hdf5++shared", True, "hdf5++mpi++shared"), + # Here hdf5+mpi satisfies hdf5++mpi but not vice versa + ("hdf5++mpi", "hdf5+mpi", False, "hdf5+mpi"), + ("hdf5+mpi", "hdf5++mpi", True, "hdf5+mpi"), + # Non contradiction is violated + ("hdf5 ^foo~mpi", "hdf5++mpi", False, "hdf5++mpi ^foo~mpi"), + ("hdf5++mpi", "hdf5 ^foo~mpi", False, "hdf5++mpi ^foo~mpi"), + ], + ) + def test_abstract_specs_with_propagation(self, lhs, rhs, expected, constrained): + """Tests (and documents) behavior of variant propagation on abstract specs. + + Propagated variants do not comply with subset semantic, making it difficult to give + precise definitions. Here we document the behavior that has been decided for the + practical cases we face. + """ + lhs, rhs, constrained = Spec(lhs), Spec(rhs), Spec(constrained) + assert lhs.satisfies(rhs) is expected + + c = lhs.copy() + c.constrain(rhs) + assert c == constrained + + c = rhs.copy() + c.constrain(lhs) + assert c == constrained + def test_satisfies_single_valued_variant(self): """Tests that the case reported in https://github.com/spack/spack/pull/2386#issuecomment-282147639 diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index 3cc5ba2e0ba25d..bce2015c1207bc 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -830,7 +830,7 @@ def prevalidate_variant_value( only if the variant is a reserved variant. """ # don't validate wildcards or variants with reserved names - if variant.value == ("*",) or variant.name in reserved_names: + if variant.value == ("*",) or variant.name in reserved_names or variant.propagate: return [] # raise if there is no definition at all diff --git a/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py index 5d7f5e98170c78..9a8646baece5ed 100644 --- a/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py +++ b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py @@ -18,3 +18,5 @@ class DependencyFooBar(Package): variant("foo", default=True, description="") variant("bar", default=False, description="") + + depends_on("second-dependency-foo-bar-fee") diff --git a/var/spack/repos/builtin.mock/packages/direct-dep-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/direct-dep-foo-bar/package.py new file mode 100644 index 00000000000000..9f4fc5d8034da5 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/direct-dep-foo-bar/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class DirectDepFooBar(Package): + """This package has a variant "bar", which is False by default, and + variant "foo" which is True by default. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/direct-dep-foo-bar-1.0.tar.gz" + + version("1.0", md5="567890abcdefg12345678900987654321") + + variant("foo", default=True, description="") + variant("bar", default=False, description="") + + depends_on("second-dependency-foo-bar-fee") diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index db288b9a3b62e5..d6ecb25019aa4c 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -25,4 +25,6 @@ class Openblas(Package): # To ensure test works with newer gcc versions conflicts("%gcc@:10.1", when="@0.2.16:") + depends_on("perl") + provides("blas") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo-bar-fee/package.py b/var/spack/repos/builtin.mock/packages/parent-foo-bar-fee/package.py new file mode 100644 index 00000000000000..32636df6ab599b --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/parent-foo-bar-fee/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ParentFooBarFee(Package): + """This package has a variant "bar", which is True by default, and depends on another + package which has the same variant defaulting to False. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/parent-foo-bar-fee-1.0.tar.gz" + + version("1.0", md5="abcdefg01234567890123abcdefghfed") + + variant("foo", default=True, description="") + variant("bar", default=True, description="") + variant("fee", default=False, description="") + + depends_on("dependency-foo-bar") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py index 064c5740571140..a03d09da2fbd8e 100644 --- a/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py +++ b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py @@ -19,4 +19,5 @@ class ParentFooBar(Package): variant("foo", default=True, description="") variant("bar", default=True, description="") + depends_on("direct-dep-foo-bar") depends_on("dependency-foo-bar") diff --git a/var/spack/repos/builtin.mock/packages/perl/package.py b/var/spack/repos/builtin.mock/packages/perl/package.py index 8d86dec8f32017..2c3f810e036de2 100644 --- a/var/spack/repos/builtin.mock/packages/perl/package.py +++ b/var/spack/repos/builtin.mock/packages/perl/package.py @@ -14,3 +14,5 @@ class Perl(Package): extendable = True version("0.0.0", md5="abcdef1234567890abcdef1234567890") + + variant("shared", default=True, description="Build shared libraries") diff --git a/var/spack/repos/builtin.mock/packages/pkg-a/package.py b/var/spack/repos/builtin.mock/packages/pkg-a/package.py index d1ecba835dfc20..646172b778d129 100644 --- a/var/spack/repos/builtin.mock/packages/pkg-a/package.py +++ b/var/spack/repos/builtin.mock/packages/pkg-a/package.py @@ -25,6 +25,14 @@ class PkgA(AutotoolsPackage): variant("bvv", default=True, description="The good old BV variant") + variant( + "libs", + default="shared", + values=("shared", "static"), + multi=True, + description="Type of libraries to install", + ) + depends_on("pkg-b", when="foobar=bar") depends_on("test-dependency", type="test") diff --git a/var/spack/repos/builtin.mock/packages/second-dependency-foo-bar-fee/package.py b/var/spack/repos/builtin.mock/packages/second-dependency-foo-bar-fee/package.py new file mode 100644 index 00000000000000..4439639bf0863d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/second-dependency-foo-bar-fee/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class SecondDependencyFooBarFee(Package): + """This package has a variant "foo", which is True by default, a variant "bar" which + is False by default, and variant "foo" which is True by default. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/second-dependency-foo-bar-fee-1.0.tar.gz" + + version("1.0", md5="2101234567890abcdefg1234567890abc") + + variant("foo", default=True, description="") + variant("bar", default=False, description="") + variant("fee", default=False, description="") From 2aa5a1643321d57326540e548d98b790d375b47d Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 6 Nov 2024 11:42:34 +0100 Subject: [PATCH 096/208] edm4hep: Add json variant for newer versions (#47180) * edm4hep: Add json variant for newer versions Explicit option has been added to EDM4hep so we now expose it via a variant as well. We keep the old behavior where we unconditionally depended on nlohmann-json and implicitly built JSON support if we could detect it cmake stage * Fix condition statement in when clause * Use open version range to avoid fixing to single version --------- Co-authored-by: Valentin Volkl --- var/spack/repos/builtin/packages/edm4hep/package.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py index 838c11b101c8f8..97c32283b061b8 100644 --- a/var/spack/repos/builtin/packages/edm4hep/package.py +++ b/var/spack/repos/builtin/packages/edm4hep/package.py @@ -61,12 +61,20 @@ class Edm4hep(CMakePackage): description="Use the specified C++ standard when building.", ) + variant( + "json", + default=True, + description="Build edm4hep with JSON support and edm4hep2json", + when="@0.99.2:", + ) + depends_on("cmake@3.3:", type="build") depends_on("cmake@3.23:", type="build", when="@0.10.3:") depends_on("python", type="build") depends_on("root@6.08:") - depends_on("nlohmann-json@3.10.5:") + depends_on("nlohmann-json@3.10.5:", when="@0.99.2: +json") + depends_on("nlohmann-json@3.10.5:", when="@:0.99.1") depends_on("podio@1:", when="@0.99:") depends_on("podio@0.15:", when="@:0.10.5") for _std in _cxxstd_values: @@ -88,6 +96,8 @@ def cmake_args(self): # C++ Standard args.append(self.define("CMAKE_CXX_STANDARD", self.spec.variants["cxxstd"].value)) args.append(self.define("BUILD_TESTING", self.run_tests)) + if self.spec.satisfies("@0.99.2: +json"): + args.append(self.define_from_variant("EDM4HEP_WITH_JSON", "json")) return args def setup_run_environment(self, env): From a31c525778773b8c6a6fc35617454d954a05d74d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 6 Nov 2024 11:49:14 +0100 Subject: [PATCH 097/208] llnl.util.filesystem.find: restore old error handling (#47463) --- lib/spack/llnl/util/filesystem.py | 31 +++++++++++-------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index b63b6e94b39a2e..24055c902b6225 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -1741,6 +1741,11 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): return result +def _log_file_access_issue(e: OSError, path: str) -> None: + errno_name = errno.errorcode.get(e.errno, "UNKNOWN") + tty.debug(f"find must skip {path}: {errno_name} {e}") + + @system_path_filter(arg_slice=slice(1)) def find_max_depth(root, globs, max_depth: Optional[int] = None): """Given a set of non-recursive glob file patterns, finds all @@ -1754,19 +1759,10 @@ def find_max_depth(root, globs, max_depth: Optional[int] = None): If ``globs`` is a list, files matching earlier entries are placed in the return value before files matching later entries. """ - # If root doesn't exist, then we say we found nothing. If it - # exists but is not a dir, we assume the user would want to - # know; likewise if it exists but we do not have permission to - # access it. try: stat_root = os.stat(root) - except OSError as e: - if e.errno == errno.ENOENT: - return [] - else: - raise - if not stat.S_ISDIR(stat_root.st_mode): - raise ValueError(f"{root} is not a directory") + except OSError: + return [] if max_depth is None: max_depth = sys.maxsize @@ -1790,10 +1786,6 @@ def _dir_id(stat_info): # https://github.com/python/cpython/blob/3.9/Python/fileutils.c return (stat_info.st_ino, stat_info.st_dev) - def _log_file_access_issue(e): - errno_name = errno.errorcode.get(e.errno, "UNKNOWN") - tty.debug(f"find must skip {dir_entry.path}: {errno_name} {str(e)}") - visited_dirs = set([_dir_id(stat_root)]) # Each queue item stores the depth and path @@ -1808,9 +1800,8 @@ def _log_file_access_issue(e): depth, next_dir = dir_queue.pop() try: dir_iter = os.scandir(next_dir) - except OSError: - # Most commonly, this would be a permissions issue, for - # example if we are scanning an external directory like /usr + except OSError as e: + _log_file_access_issue(e, next_dir) continue with dir_iter: @@ -1821,7 +1812,7 @@ def _log_file_access_issue(e): except OSError as e: # Possible permission issue, or a symlink that cannot # be resolved (ELOOP). - _log_file_access_issue(e) + _log_file_access_issue(e, dir_entry.path) continue if it_is_a_dir and (depth < max_depth): @@ -1837,7 +1828,7 @@ def _log_file_access_issue(e): else: stat_info = dir_entry.stat(follow_symlinks=True) except OSError as e: - _log_file_access_issue(e) + _log_file_access_issue(e, dir_entry.path) continue dir_id = _dir_id(stat_info) From d09b185522d663d24273d2e34b8d7cb0d877eb76 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 6 Nov 2024 15:35:04 +0100 Subject: [PATCH 098/208] Fix various bootstrap/concretizer import issues (#47467) --- lib/spack/spack/cmd/__init__.py | 1 + lib/spack/spack/concretize.py | 13 ++++++++----- lib/spack/spack/environment/environment.py | 2 -- lib/spack/spack/solver/asp.py | 3 +-- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 7cf032c90749a0..e9df5fc18955b8 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -17,6 +17,7 @@ from llnl.util.tty.colify import colify from llnl.util.tty.color import colorize +import spack.concretize import spack.config # breaks a cycle. import spack.environment as ev import spack.error diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 122e6c59c03dbc..cccfc02bd046a3 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -10,8 +10,11 @@ import llnl.util.tty as tty +import spack.compilers import spack.config import spack.error +import spack.repo +import spack.util.parallel from spack.spec import ArchSpec, CompilerSpec, Spec CHECK_COMPILER_EXISTENCE = True @@ -87,6 +90,8 @@ def concretize_together_when_possible( tests: list of package names for which to consider tests dependencies. If True, all nodes will have test dependencies. If False, test dependencies will be disregarded. """ + import spack.solver.asp + to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list] old_concrete_to_abstract = { concrete: abstract for (abstract, concrete) in spec_list if concrete @@ -119,6 +124,8 @@ def concretize_separately( tests: list of package names for which to consider tests dependencies. If True, all nodes will have test dependencies. If False, test dependencies will be disregarded. """ + import spack.bootstrap + to_concretize = [abstract for abstract, concrete in spec_list if not concrete] args = [ (i, str(abstract), tests) @@ -155,11 +162,7 @@ def concretize_separately( for j, (i, concrete, duration) in enumerate( spack.util.parallel.imap_unordered( - spack.concretize._concretize_task, - args, - processes=num_procs, - debug=tty.is_debug(), - maxtaskperchild=1, + _concretize_task, args, processes=num_procs, debug=tty.is_debug(), maxtaskperchild=1 ) ): ret.append((i, concrete)) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 7cf1057fa5c62b..de4bc851006e8c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -24,7 +24,6 @@ import spack import spack.caches -import spack.compilers import spack.concretize import spack.config import spack.deptypes as dt @@ -43,7 +42,6 @@ import spack.util.environment import spack.util.hash import spack.util.lock as lk -import spack.util.parallel import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 56014717ddc940..b723b6bbb22023 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -27,7 +27,6 @@ import spack import spack.binary_distribution -import spack.bootstrap.core import spack.compilers import spack.concretize import spack.config @@ -816,7 +815,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre solve, and the internal statistics from clingo. """ # avoid circular import - import spack.bootstrap + import spack.bootstrap.core output = output or DEFAULT_OUTPUT_CONFIGURATION timer = spack.util.timer.Timer() From ee2723dc46055efb225df1437f1e60de85f77432 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Wed, 6 Nov 2024 10:09:40 -0600 Subject: [PATCH 099/208] rivet: add through v4.0.2 (incl yoda: add through v2.0.2) (#47383) * yoda: add v2.0.1, v2.0.2 * rivet: add v3.1.9, v3.1.10, v4.0.0, v4.0.1, v4.0.2 * rivet: yoda@:1 when @:3; conflicts hepmc3@3.3.0 when @:4.0.0 * rivet: fix style * rivet: hepmc=2 only when @:3; use libs.directories[0] * hepmc3: def libs * [@spackbot] updating style on behalf of wdconinc --------- Co-authored-by: wdconinc --- .../repos/builtin/packages/hepmc3/package.py | 4 +++ .../repos/builtin/packages/rivet/package.py | 27 +++++++++++++++++-- .../repos/builtin/packages/yoda/package.py | 8 +++--- 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/hepmc3/package.py b/var/spack/repos/builtin/packages/hepmc3/package.py index 52759ac037d66d..03da34d3f8c545 100644 --- a/var/spack/repos/builtin/packages/hepmc3/package.py +++ b/var/spack/repos/builtin/packages/hepmc3/package.py @@ -58,6 +58,10 @@ class Hepmc3(CMakePackage): conflicts("%gcc@9.3.0", when="@:3.1.1") patch("ba38f14d8f56c16cc4105d98f6d4540c928c6150.patch", when="@3.1.2:3.2.1 %gcc@9.3.0") + @property + def libs(self): + return find_libraries(["libHepMC3", "libHepMC3Search"], root=self.prefix, recursive=True) + def cmake_args(self): spec = self.spec from_variant = self.define_from_variant diff --git a/var/spack/repos/builtin/packages/rivet/package.py b/var/spack/repos/builtin/packages/rivet/package.py index 00ae0783dca58e..573888d0c47c22 100644 --- a/var/spack/repos/builtin/packages/rivet/package.py +++ b/var/spack/repos/builtin/packages/rivet/package.py @@ -19,6 +19,11 @@ class Rivet(AutotoolsPackage): license("GPL-3.0-or-later") + version("4.0.2", sha256="65a3b36f42bff782ed2767930e669e09b140899605d7972fc8f77785b4a882c0") + version("4.0.1", sha256="4e8692d6e8a53961c77983eb6ba4893c3765cf23f705789e4d865be4892eff79") + version("4.0.0", sha256="d3c42d9b83ede3e7f4b534535345c2e06e6dafb851454c2b0a5d2331ab0f04d0") + version("3.1.10", sha256="458b8e0df1de738e9972d24b260eaa087df12c99d4fe9dee5377d47ea6a49919") + version("3.1.9", sha256="f6532045da61eeb2adc20a9abc4166b4b2d41ab2c1ca5b500cd616bb1b92e7b1") version("3.1.8", sha256="75b3f3d419ca6388d1fd2ec0eda7e1f90f324b996ccf0591f48a5d2e28dccc13") version("3.1.7", sha256="27c7dbbcb5fd7ee81caf136daf4e960bca0ec255d9fa1abe602f4d430861b27a") version("3.1.6", sha256="1cf6ebb6a79d181c441d1d0c7c6d623c423817c61093f36f21adaae23e679090") @@ -34,7 +39,12 @@ class Rivet(AutotoolsPackage): depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated - variant("hepmc", default="2", values=("2", "3"), description="HepMC version to link against") + variant( + "hepmc", + default="2", + values=(conditional("2", when="@:3"), "3"), + description="HepMC version to link against", + ) # According to A. Buckley (main Rivet developer): # "typically a given Rivet version will work with @@ -48,8 +58,13 @@ class Rivet(AutotoolsPackage): depends_on("yoda@1.8.2", when="@3.1.1") depends_on("yoda@1.8.3", when="@3.1.2") depends_on("yoda@1.8.5:", when="@3.1.3:") - depends_on("yoda@1.9.5:", when="@3.1.6:") + depends_on("yoda@1.9.6:", when="@3.1.6:") depends_on("yoda@1.9.7:", when="@3.1.7:") + depends_on("yoda@1.9.8:", when="@3.1.8:") + depends_on("yoda@1.9.9:", when="@3.1.9:") + depends_on("yoda@1.9.10:", when="@3.1.10:") + depends_on("yoda@:1", when="@:3") + depends_on("yoda@2.0.1:", when="@4.0.0:") # The following versions were not a part of LCG stack # and thus the exact version of YODA is unknown @@ -57,9 +72,13 @@ class Rivet(AutotoolsPackage): depends_on("hepmc", when="hepmc=2") depends_on("hepmc3", when="hepmc=3") + conflicts( + "hepmc@3.3.0", when="@:4.0.0 hepmc=3", msg="patch-level zero requires at least 4.0.1" + ) depends_on("fastjet plugins=cxx") depends_on("fastjet@3.4.0:", when="@3.1.7:") depends_on("fjcontrib") + depends_on("highfive", when="@4:") depends_on("python", type=("build", "run")) depends_on("py-cython@0.24.0:", type="build") depends_on("swig", type="build") @@ -104,12 +123,16 @@ def configure_args(self): args += ["--with-hepmc=" + self.spec["hepmc"].prefix] else: args += ["--with-hepmc3=" + self.spec["hepmc3"].prefix] + args += ["--with-hepmc3-libpath=" + self.spec["hepmc3"].libs.directories[0]] args += ["--with-fastjet=" + self.spec["fastjet"].prefix] args += ["--with-yoda=" + self.spec["yoda"].prefix] args += ["--with-fjcontrib=" + self.spec["fjcontrib"].prefix] + if self.spec.satisfies("^highfive"): + args += ["--with-highfive=" + self.spec["highfive"].prefix] + args += ["--disable-pdfmanual"] return args diff --git a/var/spack/repos/builtin/packages/yoda/package.py b/var/spack/repos/builtin/packages/yoda/package.py index a545be413a6d75..34bcda42d3c7fd 100644 --- a/var/spack/repos/builtin/packages/yoda/package.py +++ b/var/spack/repos/builtin/packages/yoda/package.py @@ -17,12 +17,10 @@ class Yoda(AutotoolsPackage): license("GPL-3.0-or-later") + version("2.0.2", sha256="31a41413641189814ff3c6bbb96ac5d17d2b68734fe327d06794cdbd3a540399") + version("2.0.1", sha256="ae5a78eaae5574a5159d4058839d0983c9923558bfc88fbce21d251fd925d260") version("2.0.0", sha256="680f43dabebb3167ce1c5dee72d1c2c285c3190751245aa51e3260a005a99575") - version( - "1.9.10", - sha256="0a708ee9d704945d3387cc437b15ffddf382c70fe5bab39ed2bdbf83c2c28c6f", - preferred=True, - ) + version("1.9.10", sha256="0a708ee9d704945d3387cc437b15ffddf382c70fe5bab39ed2bdbf83c2c28c6f") version("1.9.9", sha256="ebcad55369a1cedcee3a2de059407c851652ba44495113f5c09d8c2e57f516aa") version("1.9.8", sha256="7bc3062468abba50aff3ecb8b22ce677196036009890688ef4533aaa7f92e6e4") version("1.9.7", sha256="8d07bb04dcb79364858718a18203452d8d9fa00029fa94239eafa8529032b8ff") From e62cf9c45b213dcfc88e5f33b99bbf14340c472e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 6 Nov 2024 17:18:58 +0100 Subject: [PATCH 100/208] Fix `spack -c ` when env active (#47403) Set command line scopes last in _main, so they are higher scopes Restore the global configuration in a spawned process by inspecting the result of ctx.get_start_method() Add the ability to pass a mp.context to PackageInstallContext. Add shell-tests to check overriding the configuration: - Using both -c and -C from command line - With and without an environment active --- lib/spack/spack/config.py | 15 ++----- lib/spack/spack/main.py | 13 +++--- lib/spack/spack/subprocess_context.py | 57 ++++++++++++++------------- share/spack/qa/config_state.py | 36 +++++++++++++++++ share/spack/qa/run-unit-tests | 2 +- share/spack/qa/setup-env-test.sh | 17 ++++++++ 6 files changed, 94 insertions(+), 46 deletions(-) create mode 100644 share/spack/qa/config_state.py diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index afd8f30baccbb9..1cafc1f738a337 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -427,6 +427,10 @@ def __init__(self, *scopes: ConfigScope) -> None: self.push_scope(scope) self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list) + def ensure_unwrapped(self) -> "Configuration": + """Ensure we unwrap this object from any dynamic wrapper (like Singleton)""" + return self + @_config_mutator def push_scope(self, scope: ConfigScope) -> None: """Add a higher precedence scope to the Configuration.""" @@ -752,10 +756,6 @@ def override( assert scope is overrides -#: configuration scopes added on the command line set by ``spack.main.main()`` -COMMAND_LINE_SCOPES: List[str] = [] - - def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None: """Add a platform-specific subdirectory for the current platform.""" platform = spack.platforms.host().name @@ -860,13 +860,6 @@ def create() -> Configuration: # Each scope can have per-platfom overrides in subdirectories _add_platform_scope(cfg, name, path) - # add command-line scopes - _add_command_line_scopes(cfg, COMMAND_LINE_SCOPES) - - # we make a special scope for spack commands so that they can - # override configuration options. - cfg.push_scope(InternalConfigScope("command_line")) - return cfg diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index fc5423b5a26a98..7cab47d77f7bf1 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -911,13 +911,6 @@ def _main(argv=None): # Make spack load / env activate work on macOS restore_macos_dyld_vars() - # make spack.config aware of any command line configuration scopes - if args.config_scopes: - spack.config.COMMAND_LINE_SCOPES = args.config_scopes - - # ensure options on spack command come before everything - setup_main_options(args) - # activate an environment if one was specified on the command line env_format_error = None if not args.no_env: @@ -931,6 +924,12 @@ def _main(argv=None): e.print_context() env_format_error = e + # Push scopes from the command line last + if args.config_scopes: + spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes) + spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line")) + setup_main_options(args) + # ------------------------------------------------------------------------ # Things that require configuration should go below here # ------------------------------------------------------------------------ diff --git a/lib/spack/spack/subprocess_context.py b/lib/spack/spack/subprocess_context.py index c823e657036fad..507045e42faf5b 100644 --- a/lib/spack/spack/subprocess_context.py +++ b/lib/spack/spack/subprocess_context.py @@ -17,7 +17,6 @@ import multiprocessing import pickle import pydoc -import sys from types import ModuleType import spack.config @@ -27,9 +26,6 @@ import spack.repo import spack.store -_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin") - - patches = None @@ -56,7 +52,7 @@ def _restore_and_run(self, fn, test_state): fn() def create(self): - test_state = TestState() + test_state = GlobalStateMarshaler() return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state)) @@ -65,49 +61,56 @@ class PackageInstallContext: needs to be transmitted to a child process. """ - def __init__(self, pkg): - if _SERIALIZE: + def __init__(self, pkg, *, ctx=None): + ctx = ctx or multiprocessing.get_context() + self.serialize = ctx.get_start_method() != "fork" + if self.serialize: self.serialized_pkg = serialize(pkg) + self.global_state = GlobalStateMarshaler() self.serialized_env = serialize(spack.environment.active_environment()) else: self.pkg = pkg + self.global_state = None self.env = spack.environment.active_environment() self.spack_working_dir = spack.paths.spack_working_dir - self.test_state = TestState() def restore(self): - self.test_state.restore() spack.paths.spack_working_dir = self.spack_working_dir - env = pickle.load(self.serialized_env) if _SERIALIZE else self.env + env = pickle.load(self.serialized_env) if self.serialize else self.env + # Activating the environment modifies the global configuration, so globals have to + # be restored afterward, in case other modifications were applied on top (e.g. from + # command line) if env: spack.environment.activate(env) + + if self.serialize: + self.global_state.restore() + # Order of operation is important, since the package might be retrieved # from a repo defined within the environment configuration - pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg + pkg = pickle.load(self.serialized_pkg) if self.serialize else self.pkg return pkg -class TestState: - """Spack tests may modify state that is normally read from disk in memory; - this object is responsible for properly serializing that state to be - applied to a subprocess. This isn't needed outside of a testing environment - but this logic is designed to behave the same inside or outside of tests. +class GlobalStateMarshaler: + """Class to serialize and restore global state for child processes. + + Spack may modify state that is normally read from disk or command line in memory; + this object is responsible for properly serializing that state to be applied to a subprocess. """ def __init__(self): - if _SERIALIZE: - self.config = spack.config.CONFIG - self.platform = spack.platforms.host - self.test_patches = store_patches() - self.store = spack.store.STORE + self.config = spack.config.CONFIG.ensure_unwrapped() + self.platform = spack.platforms.host + self.test_patches = store_patches() + self.store = spack.store.STORE def restore(self): - if _SERIALIZE: - spack.config.CONFIG = self.config - spack.repo.PATH = spack.repo.create(self.config) - spack.platforms.host = self.platform - spack.store.STORE = self.store - self.test_patches.restore() + spack.config.CONFIG = self.config + spack.repo.PATH = spack.repo.create(self.config) + spack.platforms.host = self.platform + spack.store.STORE = self.store + self.test_patches.restore() class TestPatches: diff --git a/share/spack/qa/config_state.py b/share/spack/qa/config_state.py new file mode 100644 index 00000000000000..0c77c31cc6ac28 --- /dev/null +++ b/share/spack/qa/config_state.py @@ -0,0 +1,36 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Used to test correct application of config line scopes in various cases. + +The option `config:cache` is supposed to be False, and overridden to True +from the command line. +""" +import multiprocessing as mp + +import spack.config +import spack.subprocess_context + + +def show_config(serialized_state): + _ = serialized_state.restore() + result = spack.config.CONFIG.get("config:ccache") + if result is not True: + raise RuntimeError(f"Expected config:ccache:true, but got {result}") + + +if __name__ == "__main__": + print("Testing spawn") + ctx = mp.get_context("spawn") + serialized_state = spack.subprocess_context.PackageInstallContext(None, ctx=ctx) + p = ctx.Process(target=show_config, args=(serialized_state,)) + p.start() + p.join() + + print("Testing fork") + ctx = mp.get_context("fork") + serialized_state = spack.subprocess_context.PackageInstallContext(None, ctx=ctx) + p = ctx.Process(target=show_config, args=(serialized_state,)) + p.start() + p.join() diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests index 28e34a71208995..71d2979ead22ab 100755 --- a/share/spack/qa/run-unit-tests +++ b/share/spack/qa/run-unit-tests @@ -52,7 +52,7 @@ if [[ "$UNIT_TEST_COVERAGE" != "true" ]] && python -m pytest -VV 2>&1 | grep xdi fi # We are running pytest-cov after the addition of pytest-xdist, since it integrates -# other pugins for pytest automatically. We still need to use "coverage" explicitly +# other plugins for pytest automatically. We still need to use "coverage" explicitly # for the commands above. # # There is a need to pass the configuration file explicitly due to a bug: diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index ec24166d52dae6..734835e07aafd0 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -207,3 +207,20 @@ fails spack env deactivate echo "Correct error exit codes for unit-test when it fails" fails spack unit-test fail + +title "Testing config override from command line, outside of an environment" +contains 'True' spack -c config:ccache:true python -c "import spack.config;print(spack.config.CONFIG.get('config:ccache'))" +contains 'True' spack -C "$SHARE_DIR/qa/configuration" python -c "import spack.config;print(spack.config.CONFIG.get('config:ccache'))" +succeeds spack -c config:ccache:true python "$SHARE_DIR/qa/config_state.py" +succeeds spack -C "$SHARE_DIR/qa/configuration" python "$SHARE_DIR/qa/config_state.py" + +title "Testing config override from command line, inside an environment" +spack env activate --temp +spack config add "config:ccache:false" + +contains 'True' spack -c config:ccache:true python -c "import spack.config;print(spack.config.CONFIG.get('config:ccache'))" +contains 'True' spack -C "$SHARE_DIR/qa/configuration" python -c "import spack.config;print(spack.config.CONFIG.get('config:ccache'))" +succeeds spack -c config:ccache:true python "$SHARE_DIR/qa/config_state.py" +succeeds spack -C "$SHARE_DIR/qa/configuration" python "$SHARE_DIR/qa/config_state.py" + +spack env deactivate From d1f313342e21e5236e915628cad20d4f9f468644 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 6 Nov 2024 09:15:52 -0800 Subject: [PATCH 101/208] tau: add v2.34 (#47471) --- var/spack/repos/builtin/packages/tau/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index b2bcac2891be96..9081b5965ad6fd 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -28,6 +28,7 @@ class Tau(Package): license("MIT") version("master", branch="master") + version("2.34", sha256="229ab425e0532e635a0be76d60b8aa613adf7596d15a9ced0b87e7f243bb2132") version("2.33.2", sha256="8ee81fe75507612379f70033183bed2a90e1245554b2a78196b6c5145da44f27") version("2.33.1", sha256="13cc5138e110932f34f02ddf548db91d8219ccb7ff9a84187f0790e40a502403") version("2.33", sha256="04d9d67adb495bc1ea56561f33c5ce5ba44f51cc7f64996f65bd446fac5483d9") From 9049ffdc7ac806f6197e092af10ace25af203a06 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Wed, 6 Nov 2024 13:52:30 -0600 Subject: [PATCH 102/208] gsoap: add v2.8.135 (#47415) * gsoap: add v2.8.135 --- .../repos/builtin/packages/gsoap/package.py | 31 ++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/gsoap/package.py b/var/spack/repos/builtin/packages/gsoap/package.py index 6eface4e36b683..d1d8b3d1e4deae 100644 --- a/var/spack/repos/builtin/packages/gsoap/package.py +++ b/var/spack/repos/builtin/packages/gsoap/package.py @@ -17,13 +17,30 @@ class Gsoap(AutotoolsPackage, SourceforgePackage): maintainers("greenc-FNAL", "gartung", "marcmengel", "vitodb") - version("2.8.127", sha256="25ecad1bbc363494eb7ea95a68508e4c93cc20596fad9ebc196c6572bbbd3c08") - version("2.8.124", sha256="4b798780989338f665ef8e171bbcc422a271004d62d5852666d5eeca33a6a636") - version("2.8.119", sha256="8997c43b599a2bfe4a788e303a5dd24bbf5992fd06d56f606ca680ca5b0070cf") - version("2.8.114", sha256="aa70a999258100c170a3f8750c1f91318a477d440f6a28117f68bc1ded32327f") - version("2.8.113", sha256="e73782b618303cf55ea6a45751b75ba96797a7a12967ed9d02e6d5761977e73a") - version("2.8.112", sha256="05345312e0bb4d81c98ae63b97cff9eb097f38dafe09356189f9d8e235c54095") - version("2.8.111", sha256="f1670c7e3aeaa66bc5658539fbd162e5099f022666855ef2b2c2bac07fec4bd3") + version("2.8.135", sha256="b11757e405d55d4674dfbf88c4fa6d7e24155cf64ed8ed578ccad2f2b555e98d") + with default_args(deprecated=True): + # Unavailable for direct download anymore + version( + "2.8.127", sha256="25ecad1bbc363494eb7ea95a68508e4c93cc20596fad9ebc196c6572bbbd3c08" + ) + version( + "2.8.124", sha256="4b798780989338f665ef8e171bbcc422a271004d62d5852666d5eeca33a6a636" + ) + version( + "2.8.119", sha256="8997c43b599a2bfe4a788e303a5dd24bbf5992fd06d56f606ca680ca5b0070cf" + ) + version( + "2.8.114", sha256="aa70a999258100c170a3f8750c1f91318a477d440f6a28117f68bc1ded32327f" + ) + version( + "2.8.113", sha256="e73782b618303cf55ea6a45751b75ba96797a7a12967ed9d02e6d5761977e73a" + ) + version( + "2.8.112", sha256="05345312e0bb4d81c98ae63b97cff9eb097f38dafe09356189f9d8e235c54095" + ) + version( + "2.8.111", sha256="f1670c7e3aeaa66bc5658539fbd162e5099f022666855ef2b2c2bac07fec4bd3" + ) depends_on("openssl") depends_on("pkgconfig", type="build") From 8f4a0718bf60eb3eb5284331d93b28233e22d19f Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 6 Nov 2024 17:36:59 -0500 Subject: [PATCH 103/208] omega-h: new version and cuda conflicts for prior versions (#47473) * omegah: add version 10.8.6 * omegah: cuda without kokkos conflict * omegah: test with latest version in ci --- .../cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml | 4 ++-- .../gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml | 2 +- share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 2 +- var/spack/repos/builtin/packages/omega-h/package.py | 8 ++++++++ 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index d8e8091676644d..24a488fbe921bf 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -225,7 +225,7 @@ spack: - magma +cuda cuda_arch=75 - mfem +cuda cuda_arch=75 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=75 - - "omega-h@:9 +cuda cuda_arch=75" # https://github.com/SCOREC/omega_h/issues/116 + - omega-h +cuda cuda_arch=75 - parsec +cuda cuda_arch=75 - petsc +cuda cuda_arch=75 - py-torch +cuda cuda_arch=75 @@ -274,7 +274,7 @@ spack: - magma +cuda cuda_arch=80 - mfem +cuda cuda_arch=80 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80 - - "omega-h@:9 +cuda cuda_arch=80" # https://github.com/SCOREC/omega_h/issues/116 + - omega-h +cuda cuda_arch=80 - parsec +cuda cuda_arch=80 - petsc +cuda cuda_arch=80 - py-torch +cuda cuda_arch=80 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 6c30947d4ffdf5..a770b0a299a13d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -233,7 +233,7 @@ spack: - magma +cuda cuda_arch=70 - mfem +cuda cuda_arch=70 - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=70 - - "omega-h@:9 +cuda cuda_arch=70" # https://github.com/SCOREC/omega_h/issues/116 + - omega-h +cuda cuda_arch=70 - parsec +cuda cuda_arch=70 - petsc +cuda cuda_arch=70 - raja +cuda cuda_arch=70 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index a3ceb56a35b2d0..0b81e53d568d1d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -239,7 +239,7 @@ spack: - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=80 ^cusz +cuda cuda_arch=80 - magma +cuda cuda_arch=80 - mfem +cuda cuda_arch=80 - - "omega-h@:9 +cuda cuda_arch=80" # https://github.com/SCOREC/omega_h/issues/116 + - omega-h +cuda cuda_arch=80 - parsec +cuda cuda_arch=80 - petsc +cuda cuda_arch=80 - py-torch +cuda cuda_arch=80 diff --git a/var/spack/repos/builtin/packages/omega-h/package.py b/var/spack/repos/builtin/packages/omega-h/package.py index 67221b389af2fd..a1e3a2363df611 100644 --- a/var/spack/repos/builtin/packages/omega-h/package.py +++ b/var/spack/repos/builtin/packages/omega-h/package.py @@ -19,6 +19,11 @@ class OmegaH(CMakePackage, CudaPackage): maintainers("cwsmith") tags = ["e4s"] version("main", branch="main") + version( + "10.8.6-scorec", + commit="a730c78e516d7f6cca4f8b4e4e0a5eb8020f9ad9", + git="https://github.com/SCOREC/omega_h.git", + ) version( "10.8.5-scorec", commit="62026fc305356abb5e02a9fce3fead9cf5077fbe", @@ -84,6 +89,9 @@ class OmegaH(CMakePackage, CudaPackage): # Single, broken CUDA version. conflicts("^cuda@11.2", msg="See https://github.com/sandialabs/omega_h/issues/366") + # https://github.com/SCOREC/omega_h/pull/118 + conflicts("@10.5:10.8.5 +cuda~kokkos") + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86610 conflicts("%gcc@8:8.2", when="@:9.22.1") From 57a90c91a451256bae441bd2097924374e9584b8 Mon Sep 17 00:00:00 2001 From: Jon Rood Date: Wed, 6 Nov 2024 15:47:44 -0700 Subject: [PATCH 104/208] nalu-wind: fix hypre constraints (#47474) --- var/spack/repos/builtin/packages/nalu-wind/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/nalu-wind/package.py b/var/spack/repos/builtin/packages/nalu-wind/package.py index 3ebf50b3af02d5..f7a481f2d60fca 100644 --- a/var/spack/repos/builtin/packages/nalu-wind/package.py +++ b/var/spack/repos/builtin/packages/nalu-wind/package.py @@ -93,16 +93,16 @@ class NaluWind(CMakePackage, CudaPackage, ROCmPackage): when="+cuda cuda_arch={0}".format(_arch), ) depends_on( - "hypre@develop +mpi+cuda~int64~superlu-dist cuda_arch={0}".format(_arch), + "hypre@2.30.0: +cuda cuda_arch={0}".format(_arch), when="+hypre+cuda cuda_arch={0}".format(_arch), ) for _arch in ROCmPackage.amdgpu_targets: depends_on( - "trilinos@13.4: ~shared+rocm+rocm_rdc amdgpu_target={0}".format(_arch), + "trilinos~shared+rocm+rocm_rdc amdgpu_target={0}".format(_arch), when="+rocm amdgpu_target={0}".format(_arch), ) depends_on( - "hypre+rocm amdgpu_target={0}".format(_arch), + "hypre@2.30.0: +rocm amdgpu_target={0}".format(_arch), when="+hypre+rocm amdgpu_target={0}".format(_arch), ) From 73219e4b02e6561bbeef379081f63efb0dc78817 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 7 Nov 2024 00:22:26 +0100 Subject: [PATCH 105/208] `llnl.util.filesystem.find`: multiple entrypoints (#47436) You can now provide multiple roots to a single `find()` call and all of them will be searched. The roots can overlap (e.g. can be parents of one another). This also adds a library function for taking a set of regular expression patterns and creating a single OR expression (and that library function is used in `find` to improve its performance). --- lib/spack/llnl/util/filesystem.py | 169 +++++++++---------- lib/spack/llnl/util/lang.py | 28 +++ lib/spack/spack/test/llnl/util/filesystem.py | 96 ++++++----- lib/spack/spack/test/llnl/util/lang.py | 15 ++ 4 files changed, 178 insertions(+), 130 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 24055c902b6225..a8f07824c9660e 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -20,11 +20,11 @@ import tempfile from contextlib import contextmanager from itertools import accumulate -from typing import Callable, Iterable, List, Match, Optional, Tuple, Union +from typing import Callable, Deque, Dict, Iterable, List, Match, Optional, Set, Tuple, Union import llnl.util.symlink from llnl.util import tty -from llnl.util.lang import dedupe, memoized +from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink from ..path import path_to_os_path, system_path_filter @@ -1673,32 +1673,40 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() -def find(root, files, recursive=True, max_depth: Optional[int] = None): - """Search for ``files`` starting from the ``root`` directory. - - Like GNU/BSD find but written entirely in Python. - - Specifically this behaves like `find -type f`: it only returns - results that are files. When searching recursively, this behaves - as `find` with the `-L` option (follows symlinks). +def find( + root: Union[str, List[str]], + files: Union[str, List[str]], + recursive: bool = True, + max_depth: Optional[int] = None, +) -> List[str]: + """Finds all non-directory files matching the filename patterns from ``files`` starting from + ``root``. This function returns a deterministic result for the same input and directory + structure when run multiple times. Symlinked directories are followed, and unique directories + are searched only once. Each matching file is returned only once at lowest depth in case + multiple paths exist due to symlinked directories. The function has similarities to the Unix + ``find`` utility. Examples: .. code-block:: console - $ find -L /usr -name python + $ find -L /usr -name python3 -type f - is equivalent to: + is roughly equivalent to + + >>> find("/usr", "python3") - >>> find('/usr', 'python') + with the notable difference that this function only lists a single path to each file in case of + symlinked directories. .. code-block:: console - $ find /usr/local/bin -maxdepth 1 -name python + $ find -L /usr/local/bin /usr/local/sbin -maxdepth 1 '(' -name python3 -o -name getcap \\ + ')' -type f - is equivalent to: + is roughly equivalent to: - >>> find('/usr/local/bin', 'python', recursive=False) + >>> find(["/usr/local/bin", "/usr/local/sbin"], ["python3", "getcap"], recursive=False) Accepts any glob characters accepted by fnmatch: @@ -1712,17 +1720,17 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): ========== ==================================== Parameters: - root (str): The root directory to start searching from - files (str or collections.abc.Sequence): Library name(s) to search for - recursive (bool): if False search only root folder, - if True descends top-down from the root. Defaults to True. - max_depth (int): if set, don't search below this depth. Cannot be set - if recursive is False + root: One or more root directories to start searching from + files: One or more filename patterns to search for + recursive: if False search only root, if True descends from roots. Defaults to True. + max_depth: if set, don't search below this depth. Cannot be set if recursive is False - Returns: - list: The files that have been found + Returns a list of absolute, matching file paths. """ - if isinstance(files, str): + if not isinstance(root, list): + root = [root] + + if not isinstance(files, list): files = [files] # If recursive is false, max_depth can only be None or 0 @@ -1734,10 +1742,9 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): elif max_depth is None: max_depth = sys.maxsize - tty.debug(f"Find (max depth = {max_depth}): {root} {str(files)}") - result = find_max_depth(root, files, max_depth) - - tty.debug(f"Find complete: {root} {str(files)}") + tty.debug(f"Find (max depth = {max_depth}): {root} {files}") + result = _find_max_depth(root, files, max_depth) + tty.debug(f"Find complete: {root} {files}") return result @@ -1746,56 +1753,36 @@ def _log_file_access_issue(e: OSError, path: str) -> None: tty.debug(f"find must skip {path}: {errno_name} {e}") -@system_path_filter(arg_slice=slice(1)) -def find_max_depth(root, globs, max_depth: Optional[int] = None): - """Given a set of non-recursive glob file patterns, finds all - files matching those patterns up to a maximum specified depth. - - If a directory has a name which matches an input pattern, it will - not be included in the results. +def _dir_id(s: os.stat_result) -> Tuple[int, int]: + # Note: on windows, st_ino is the file index and st_dev is the volume serial number. See + # https://github.com/python/cpython/blob/3.9/Python/fileutils.c + return (s.st_ino, s.st_dev) - If ``max_depth`` is specified, does not search below that depth. - If ``globs`` is a list, files matching earlier entries are placed - in the return value before files matching later entries. - """ - try: - stat_root = os.stat(root) - except OSError: - return [] +def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.maxsize) -> List[str]: + """See ``find`` for the public API.""" + # Apply normcase to file patterns and filenames to respect case insensitive filesystems + regex, groups = fnmatch_translate_multiple([os.path.normcase(x) for x in globs]) + # Ordered dictionary that keeps track of the files found for each pattern + capture_group_to_paths: Dict[str, List[str]] = {group: [] for group in groups} + # Ensure returned paths are always absolute + roots = [os.path.abspath(r) for r in roots] + # Breadth-first search queue. Each element is a tuple of (depth, directory) + dir_queue: Deque[Tuple[int, str]] = collections.deque() + # Set of visited directories. Each element is a tuple of (inode, device) + visited_dirs: Set[Tuple[int, int]] = set() - if max_depth is None: - max_depth = sys.maxsize + for root in roots: + try: + stat_root = os.stat(root) + except OSError as e: + _log_file_access_issue(e, root) + continue + dir_id = _dir_id(stat_root) + if dir_id not in visited_dirs: + dir_queue.appendleft((0, root)) + visited_dirs.add(dir_id) - if isinstance(globs, str): - globs = [globs] - # Apply normcase to regular expressions and to the filenames: - # this respects case-sensitivity semantics of different OSes - # (e.g. file search is typically case-insensitive on Windows) - regexes = [re.compile(fnmatch.translate(os.path.normcase(x))) for x in globs] - - # Note later calls to os.scandir etc. return abspaths if the - # input is absolute, see https://docs.python.org/3/library/os.html#os.DirEntry.path - root = os.path.abspath(root) - - found_files = collections.defaultdict(list) - - def _dir_id(stat_info): - # Note: on windows, st_ino is the file index and st_dev - # is the volume serial number. See - # https://github.com/python/cpython/blob/3.9/Python/fileutils.c - return (stat_info.st_ino, stat_info.st_dev) - - visited_dirs = set([_dir_id(stat_root)]) - - # Each queue item stores the depth and path - # This achieves a consistent traversal order by iterating through - # each directory in alphabetical order. - # This also traverses in BFS order to ensure finding the shortest - # path to any file (or one of the shortest paths, if there are - # several - the one returned will be consistent given the prior - # point). - dir_queue = collections.deque([(0, root)]) while dir_queue: depth, next_dir = dir_queue.pop() try: @@ -1810,20 +1797,18 @@ def _dir_id(stat_info): try: it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) except OSError as e: - # Possible permission issue, or a symlink that cannot - # be resolved (ELOOP). + # Possible permission issue, or a symlink that cannot be resolved (ELOOP). _log_file_access_issue(e, dir_entry.path) continue - if it_is_a_dir and (depth < max_depth): + if it_is_a_dir and depth < max_depth: try: - # The stat should be performed in a try/except block. - # We repeat that here vs. moving to the above block - # because we only want to call `stat` if we haven't - # exceeded our max_depth + # The stat should be performed in a try/except block. We repeat that here + # vs. moving to the above block because we only want to call `stat` if we + # haven't exceeded our max_depth if sys.platform == "win32": - # Note: st_ino/st_dev on DirEntry.stat are not set on - # Windows, so we have to call os.stat + # Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we + # have to call os.stat stat_info = os.stat(dir_entry.path, follow_symlinks=True) else: stat_info = dir_entry.stat(follow_symlinks=True) @@ -1836,15 +1821,15 @@ def _dir_id(stat_info): dir_queue.appendleft((depth + 1, dir_entry.path)) visited_dirs.add(dir_id) else: - fname = os.path.basename(dir_entry.path) - for pattern in regexes: - if pattern.match(os.path.normcase(fname)): - found_files[pattern].append(os.path.join(next_dir, fname)) - - # TODO: for fully-recursive searches, we can print a warning after - # after having searched everything up to some fixed depth + m = regex.match(os.path.normcase(os.path.basename(dir_entry.path))) + if not m: + continue + for group in capture_group_to_paths: + if m.group(group): + capture_group_to_paths[group].append(dir_entry.path) + break - return list(itertools.chain(*[found_files[x] for x in regexes])) + return [path for paths in capture_group_to_paths.values() for path in paths] # Utilities for libraries and headers diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index f43773346a948d..6641a727dde7af 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -5,12 +5,14 @@ import collections.abc import contextlib +import fnmatch import functools import itertools import os import re import sys import traceback +import typing import warnings from datetime import datetime, timedelta from typing import Callable, Iterable, List, Tuple, TypeVar @@ -859,6 +861,32 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]: return line_list +if sys.version_info >= (3, 9): + PatternStr = re.Pattern[str] +else: + PatternStr = typing.Pattern[str] + + +def fnmatch_translate_multiple(patterns: List[str]) -> Tuple[PatternStr, List[str]]: + """Same as fnmatch.translate, but creates a single regex of the form + ``(?P...)|(?P...)|...`` for each pattern in the iterable, where + ``patternN`` is a named capture group that matches the corresponding pattern translated by + ``fnmatch.translate``. This can be used to match multiple patterns in a single pass. No case + normalization is performed on the patterns. + + Args: + patterns: list of fnmatch patterns + + Returns: + Tuple of the combined regex and the list of named capture groups corresponding to each + pattern in the input list. + """ + groups = [f"pattern{i}" for i in range(len(patterns))] + regexes = (fnmatch.translate(p) for p in patterns) + combined = re.compile("|".join(f"(?P<{g}>{r})" for g, r in zip(groups, regexes))) + return combined, groups + + @contextlib.contextmanager def nullcontext(*args, **kwargs): """Empty context manager. diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index 01379be94c0614..03e1f30dd30996 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -1072,16 +1072,16 @@ def test_find_max_depth(dir_structure_with_things_to_find): # Make sure the paths we use to verify are absolute assert os.path.isabs(locations["file_one"]) - assert set(fs.find_max_depth(root, "file_*", 0)) == {locations["file_four"]} - assert set(fs.find_max_depth(root, "file_*", 1)) == { + assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]} + assert set(fs.find(root, "file_*", max_depth=1)) == { locations["file_one"], locations["file_three"], locations["file_four"], } - assert set(fs.find_max_depth(root, "file_two", 2)) == {locations["file_two"]} - assert not set(fs.find_max_depth(root, "file_two", 1)) - assert set(fs.find_max_depth(root, "file_two")) == {locations["file_two"]} - assert set(fs.find_max_depth(root, "file_*")) == set(locations.values()) + assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]} + assert not set(fs.find(root, "file_two", max_depth=1)) + assert set(fs.find(root, "file_two")) == {locations["file_two"]} + assert set(fs.find(root, "file_*")) == set(locations.values()) def test_find_max_depth_relative(dir_structure_with_things_to_find): @@ -1090,8 +1090,8 @@ def test_find_max_depth_relative(dir_structure_with_things_to_find): """ root, locations = dir_structure_with_things_to_find with fs.working_dir(root): - assert set(fs.find_max_depth(".", "file_*", 0)) == {locations["file_four"]} - assert set(fs.find_max_depth(".", "file_two", 2)) == {locations["file_two"]} + assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]} + assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]} @pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)]) @@ -1105,7 +1105,8 @@ def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth): fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth) -def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): +@pytest.fixture(params=[True, False]) +def complex_dir_structure(request, tmpdir): """ "lx-dy" means "level x, directory y" "lx-fy" means "level x, file y" @@ -1128,8 +1129,11 @@ def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): l1-s3 -> l3-d4 # a link that "skips" a directory level l1-s4 -> l2-s3 # a link to a link to a dir """ - if sys.platform == "win32" and (not use_junctions) and (not _windows_can_symlink()): + use_junctions = request.param + if sys.platform == "win32" and not use_junctions and not _windows_can_symlink(): pytest.skip("This Windows instance is not configured with symlink support") + elif sys.platform != "win32" and use_junctions: + pytest.skip("Junctions are a Windows-only feature") l1_d1 = tmpdir.join("l1-d1").ensure(dir=True) l2_d1 = l1_d1.join("l2-d1").ensure(dir=True) @@ -1150,44 +1154,60 @@ def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): link_fn(l2_d2, l2_s3) link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4") - locations = {} - locations["l4-f1"] = str(l3_d2.join("l4-f1").ensure()) - locations["l4-f2-full"] = str(l3_d4.join("l4-f2").ensure()) - locations["l4-f2-link"] = str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2") - locations["l2-f1"] = str(l1_d2.join("l2-f1").ensure()) - locations["l2-f1-link"] = str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1") - locations["l3-f3-full"] = str(l2_d2.join("l3-f3").ensure()) - locations["l3-f3-link-l1"] = str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3") + locations = { + "l4-f1": str(l3_d2.join("l4-f1").ensure()), + "l4-f2-full": str(l3_d4.join("l4-f2").ensure()), + "l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"), + "l2-f1": str(l1_d2.join("l2-f1").ensure()), + "l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"), + "l3-f3-full": str(l2_d2.join("l3-f3").ensure()), + "l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"), + } return str(tmpdir), locations -def _check_find_links(root, locations): +def test_find_max_depth_symlinks(complex_dir_structure): + root, locations = complex_dir_structure root = pathlib.Path(root) - assert set(fs.find_max_depth(root, "l4-f1")) == {locations["l4-f1"]} - assert set(fs.find_max_depth(root / "l1-s3", "l4-f2", 0)) == {locations["l4-f2-link"]} - assert set(fs.find_max_depth(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} + assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]} + assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]} + assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} # File is accessible via symlink and subdir, the link path will be # searched first, and the directory will not be searched again when # it is encountered the second time (via not-link) in the traversal - assert set(fs.find_max_depth(root, "l4-f2")) == {locations["l4-f2-link"]} + assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]} # File is accessible only via the dir, so the full file path should # be reported - assert set(fs.find_max_depth(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} + assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} # Check following links to links - assert set(fs.find_max_depth(root, "l3-f3")) == {locations["l3-f3-link-l1"]} + assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]} -@pytest.mark.parametrize( - "use_junctions", - [ - False, - pytest.param( - True, - marks=pytest.mark.skipif(sys.platform != "win32", reason="Only Windows has junctions"), - ), - ], -) -def test_find_max_depth_symlinks(tmpdir, use_junctions): - root, locations = dir_structure_with_things_to_find_links(tmpdir, use_junctions=use_junctions) - _check_find_links(root, locations) +def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure): + root, locations = complex_dir_structure + + fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1") + snd = str(pathlib.Path(root) / "l1-d2") + nonexistent = str(pathlib.Path(root) / "nonexistent") + + assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == { + locations["l2-f1"], + locations["l4-f1"], + locations["l4-f2-full"], + locations["l3-f3-full"], + } + + +def test_multiple_patterns(complex_dir_structure): + root, _ = complex_dir_structure + paths = fs.find(root, ["l2-f1", "l3-f3", "*"]) + # There shouldn't be duplicate results with multiple, overlapping patterns + assert len(set(paths)) == len(paths) + # All files should be found + filenames = [os.path.basename(p) for p in paths] + assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"} + # They are ordered by first matching pattern (this is a bit of an implementation detail, + # and we could decide to change the exact order in the future) + assert filenames[0] == "l2-f1" + assert filenames[1] == "l3-f3" diff --git a/lib/spack/spack/test/llnl/util/lang.py b/lib/spack/spack/test/llnl/util/lang.py index 52dcf3950a452b..0d5aaab81a2e25 100644 --- a/lib/spack/spack/test/llnl/util/lang.py +++ b/lib/spack/spack/test/llnl/util/lang.py @@ -373,3 +373,18 @@ class _SomeClass: _SomeClass.deprecated.error_lvl = 2 with pytest.raises(AttributeError): _ = s.deprecated + + +def test_fnmatch_multiple(): + regex, groups = llnl.util.lang.fnmatch_translate_multiple(["libf*o.so", "libb*r.so"]) + + a = regex.match("libfoo.so") + assert a and a.group(groups[0]) == "libfoo.so" + + b = regex.match("libbar.so") + assert b and b.group(groups[1]) == "libbar.so" + + assert not regex.match("libfoo.so.1") + assert not regex.match("libbar.so.1") + assert not regex.match("libfoo.solibbar.so") + assert not regex.match("libbaz.so") From 3665c5c01b71525102646d3c3b1d015a72753c4f Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 6 Nov 2024 16:25:47 -0800 Subject: [PATCH 106/208] slate %oneapi@2025: cxxflags: add -Wno-error=missing-template-arg-list-after-template-kw (#47476) --- var/spack/repos/builtin/packages/slate/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 82829b2edcb5f3..879c773603fe07 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -127,6 +127,12 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): conflicts("+sycl", when="@:2022.07.00", msg="SYCL support requires SLATE version 2023.08.25") conflicts("^hip@5.6.0:", when="@:2023.08.25", msg="Incompatible version of HIP/ROCm") + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("%oneapi@2025:"): + flags.append("-Wno-error=missing-template-arg-list-after-template-kw") + return (flags, None, None) + def cmake_args(self): spec = self.spec backend_config = "-Duse_cuda=%s" % ("+cuda" in spec) From dd26732897aa2a0d3f5e4897aa87cc200acece00 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 6 Nov 2024 16:26:04 -0800 Subject: [PATCH 107/208] legion%oneapi@2025: cxxflags add -Wno-error=missing-template-arg-list-after-template-kw (#47478) --- var/spack/repos/builtin/packages/legion/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/legion/package.py b/var/spack/repos/builtin/packages/legion/package.py index 74f46d380ae1f3..4e1a9cccf7ff1b 100644 --- a/var/spack/repos/builtin/packages/legion/package.py +++ b/var/spack/repos/builtin/packages/legion/package.py @@ -310,6 +310,12 @@ def validate_gasnet_root(value): "sysomp", default=False, description="Use system OpenMP implementation instead of Realm's" ) + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("%oneapi@2025:"): + flags.append("-Wno-error=missing-template-arg-list-after-template-kw") + return (flags, None, None) + def cmake_args(self): spec = self.spec from_variant = self.define_from_variant From 074b845cd31b5b954163eee15e2a73243c3a3b12 Mon Sep 17 00:00:00 2001 From: "Marc T. Henry de Frahan" Date: Wed, 6 Nov 2024 17:30:29 -0700 Subject: [PATCH 108/208] Add amr-wind versions (#47479) --- var/spack/repos/builtin/packages/amr-wind/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/amr-wind/package.py b/var/spack/repos/builtin/packages/amr-wind/package.py index 47919489006e4e..c2b96b163bc81b 100644 --- a/var/spack/repos/builtin/packages/amr-wind/package.py +++ b/var/spack/repos/builtin/packages/amr-wind/package.py @@ -21,6 +21,8 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("main", branch="main", submodules=True) + version("3.2.0", tag="v3.2.0", submodules=True) + version("3.1.7", tag="v3.1.7", submodules=True) version("3.1.6", tag="v3.1.6", submodules=True) version("3.1.5", tag="v3.1.5", submodules=True) version("3.1.4", tag="v3.1.4", submodules=True) From bf11fb037b799d8643d096dfb0991c33b801a716 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:49:35 -0800 Subject: [PATCH 109/208] loki%oneapi@2025: -Wno-error=missing-template-arg-list-after-template-kw (#47475) --- var/spack/repos/builtin/packages/loki/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/loki/package.py b/var/spack/repos/builtin/packages/loki/package.py index 91bf32a1dec74e..360d2ad587d3ef 100644 --- a/var/spack/repos/builtin/packages/loki/package.py +++ b/var/spack/repos/builtin/packages/loki/package.py @@ -24,6 +24,8 @@ class Loki(MakefilePackage): def flag_handler(self, name, flags): if name == "cxxflags": + if self.spec.satisfies("%oneapi@2025:"): + flags.append("-Wno-error=missing-template-arg-list-after-template-kw") if self.spec.satisfies("%oneapi@2023.0.0:"): flags.append("-Wno-error=dynamic-exception-spec") if self.spec.satisfies("@0.1.7 %gcc@11:"): From 0d817878ea25ecd2e0c7a5754f2ac877a1da7c59 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 7 Nov 2024 20:29:37 +0100 Subject: [PATCH 110/208] spec.py: fix comparison with multivalued variants (#47485) --- lib/spack/spack/test/spec_semantics.py | 4 ++++ lib/spack/spack/variant.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 6342325364a3a0..38424e951c55cc 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -1975,3 +1975,7 @@ def test_equality_discriminate_on_propagation(lhs, rhs): s, t = Spec(lhs), Spec(rhs) assert s != t assert len({s, t}) == 2 + + +def test_comparison_multivalued_variants(): + assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c") diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index bce2015c1207bc..e5a5ddfa3c904a 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -378,8 +378,8 @@ def _value_setter(self, value: ValueType) -> None: def _cmp_iter(self) -> Iterable: yield self.name - yield from (str(v) for v in self.value_as_tuple) yield self.propagate + yield from (str(v) for v in self.value_as_tuple) def copy(self) -> "AbstractVariant": """Returns an instance of a variant equivalent to self From 754408ca2b170e5c70980783900515d248bfd079 Mon Sep 17 00:00:00 2001 From: "Marc T. Henry de Frahan" Date: Thu, 7 Nov 2024 13:21:38 -0700 Subject: [PATCH 111/208] Add fast farm variant to openfast (#47486) --- var/spack/repos/builtin/packages/openfast/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/openfast/package.py b/var/spack/repos/builtin/packages/openfast/package.py index 3d768d3c6fd632..2fd434465054e3 100644 --- a/var/spack/repos/builtin/packages/openfast/package.py +++ b/var/spack/repos/builtin/packages/openfast/package.py @@ -50,6 +50,7 @@ class Openfast(CMakePackage): variant("openmp", default=False, description="Enable OpenMP support") variant("netcdf", default=False, description="Enable NetCDF support") variant("rosco", default=False, description="Build ROSCO controller") + variant("fastfarm", default=False, description="Enable FAST.Farm capabilities") depends_on("blas") depends_on("lapack") @@ -78,6 +79,7 @@ def cmake_args(self): self.define_from_variant("BUILD_OPENFAST_CPP_API", "cxx"), self.define_from_variant("BUILD_OPENFAST_CPP_DRIVER", "cxx"), self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), + self.define_from_variant("BUILD_FASTFARM", "fastfarm"), ] ) From 0a4563fd0253c829fb14fd576ad0368954028d5d Mon Sep 17 00:00:00 2001 From: Chris White Date: Thu, 7 Nov 2024 12:49:26 -0800 Subject: [PATCH 112/208] silo package: update patch (#47457) Update patch based on LLNL/Silo#319 to fix build of 4.10.2 --- .../packages/silo/H5EPR_SEMI_COLON.patch | 30 ++++++------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch b/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch index ae741450810e25..023f0bbe662a3e 100644 --- a/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch +++ b/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch @@ -1,50 +1,38 @@ diff --git a/src/hdf5_drv/H5FDsilo.c b/src/hdf5_drv/H5FDsilo.c +index 840dfd0..0153e18 100644 --- a/src/hdf5_drv/H5FDsilo.c +++ b/src/hdf5_drv/H5FDsilo.c -@@ -243,6 +243,12 @@ - return tmp; - } - -+#if HDF5_VERSION_GE(1,10,8) -+#define H5EPR_SEMI_COLON ; -+#else -+#define H5EPR_SEMI_COLON -+#endif -+ - - #ifdef H5_HAVE_SNPRINTF - #define H5E_PUSH_HELPER(Func,Cls,Maj,Min,Msg,Ret,Errno) \ -@@ -252,13 +258,13 @@ +@@ -255,13 +255,13 @@ static const char *flavors(H5F_mem_t m) snprintf(msg, sizeof(msg), Msg "(errno=%d, \"%s\")", \ Errno, strerror(Errno)); \ ret_value = Ret; \ - H5Epush_ret(Func, Cls, Maj, Min, msg, Ret) \ -+ H5Epush_ret(Func, Cls, Maj, Min, msg, Ret) H5EPR_SEMI_COLON \ ++ H5Epush_ret(Func, Cls, Maj, Min, msg, Ret) ; \ } #else #define H5E_PUSH_HELPER(Func,Cls,Maj,Min,Msg,Ret,Errno) \ { \ ret_value = Ret; \ - H5Epush_ret(Func, Cls, Maj, Min, Msg, Ret) \ -+ H5Epush_ret(Func, Cls, Maj, Min, Msg, Ret) H5EPR_SEMI_COLON \ ++ H5Epush_ret(Func, Cls, Maj, Min, Msg, Ret) ; \ } #endif -@@ -1355,7 +1368,7 @@ +@@ -1308,7 +1308,7 @@ H5FD_silo_sb_encode(H5FD_t *_file, char *name/*out*/, assert(sizeof(hsize_t)<=8); memcpy(p, &file->block_size, sizeof(hsize_t)); if (H5Tconvert(H5T_NATIVE_HSIZE, H5T_STD_U64LE, 1, buf+8, NULL, H5P_DEFAULT)<0) - H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) -+ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) H5EPR_SEMI_COLON ++ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) ; return 0; } -@@ -1383,14 +1396,14 @@ +@@ -1336,14 +1336,14 @@ H5FD_silo_sb_decode(H5FD_t *_file, const char *name, const unsigned char *buf) /* Make sure the name/version number is correct */ if (strcmp(name, "LLNLsilo")) - H5Epush_ret(func, H5E_ERR_CLS, H5E_FILE, H5E_BADVALUE, "invalid silo superblock", -1) -+ H5Epush_ret(func, H5E_ERR_CLS, H5E_FILE, H5E_BADVALUE, "invalid silo superblock", -1) H5EPR_SEMI_COLON ++ H5Epush_ret(func, H5E_ERR_CLS, H5E_FILE, H5E_BADVALUE, "invalid silo superblock", -1) ; buf += 8; /* Decode block size */ @@ -52,7 +40,7 @@ diff --git a/src/hdf5_drv/H5FDsilo.c b/src/hdf5_drv/H5FDsilo.c memcpy(x, buf, 8); if (H5Tconvert(H5T_STD_U64LE, H5T_NATIVE_HSIZE, 1, x, NULL, H5P_DEFAULT)<0) - H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) -+ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) H5EPR_SEMI_COLON ++ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) ; ap = (hsize_t*)x; /*file->block_size = *ap; ignore stored value for now */ From 60ba61f6b2b063d37ccbb5c39cf52e75d41d8333 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 7 Nov 2024 20:53:26 +0100 Subject: [PATCH 113/208] Revert "`llnl.util.filesystem.find`: multiple entrypoints (#47436)" This reverts commit 73219e4b02e6561bbeef379081f63efb0dc78817. --- lib/spack/llnl/util/filesystem.py | 169 ++++++++++--------- lib/spack/llnl/util/lang.py | 28 --- lib/spack/spack/test/llnl/util/filesystem.py | 96 +++++------ lib/spack/spack/test/llnl/util/lang.py | 15 -- 4 files changed, 130 insertions(+), 178 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index a8f07824c9660e..24055c902b6225 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -20,11 +20,11 @@ import tempfile from contextlib import contextmanager from itertools import accumulate -from typing import Callable, Deque, Dict, Iterable, List, Match, Optional, Set, Tuple, Union +from typing import Callable, Iterable, List, Match, Optional, Tuple, Union import llnl.util.symlink from llnl.util import tty -from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized +from llnl.util.lang import dedupe, memoized from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink from ..path import path_to_os_path, system_path_filter @@ -1673,40 +1673,32 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() -def find( - root: Union[str, List[str]], - files: Union[str, List[str]], - recursive: bool = True, - max_depth: Optional[int] = None, -) -> List[str]: - """Finds all non-directory files matching the filename patterns from ``files`` starting from - ``root``. This function returns a deterministic result for the same input and directory - structure when run multiple times. Symlinked directories are followed, and unique directories - are searched only once. Each matching file is returned only once at lowest depth in case - multiple paths exist due to symlinked directories. The function has similarities to the Unix - ``find`` utility. +def find(root, files, recursive=True, max_depth: Optional[int] = None): + """Search for ``files`` starting from the ``root`` directory. + + Like GNU/BSD find but written entirely in Python. + + Specifically this behaves like `find -type f`: it only returns + results that are files. When searching recursively, this behaves + as `find` with the `-L` option (follows symlinks). Examples: .. code-block:: console - $ find -L /usr -name python3 -type f + $ find -L /usr -name python - is roughly equivalent to - - >>> find("/usr", "python3") + is equivalent to: - with the notable difference that this function only lists a single path to each file in case of - symlinked directories. + >>> find('/usr', 'python') .. code-block:: console - $ find -L /usr/local/bin /usr/local/sbin -maxdepth 1 '(' -name python3 -o -name getcap \\ - ')' -type f + $ find /usr/local/bin -maxdepth 1 -name python - is roughly equivalent to: + is equivalent to: - >>> find(["/usr/local/bin", "/usr/local/sbin"], ["python3", "getcap"], recursive=False) + >>> find('/usr/local/bin', 'python', recursive=False) Accepts any glob characters accepted by fnmatch: @@ -1720,17 +1712,17 @@ def find( ========== ==================================== Parameters: - root: One or more root directories to start searching from - files: One or more filename patterns to search for - recursive: if False search only root, if True descends from roots. Defaults to True. - max_depth: if set, don't search below this depth. Cannot be set if recursive is False + root (str): The root directory to start searching from + files (str or collections.abc.Sequence): Library name(s) to search for + recursive (bool): if False search only root folder, + if True descends top-down from the root. Defaults to True. + max_depth (int): if set, don't search below this depth. Cannot be set + if recursive is False - Returns a list of absolute, matching file paths. + Returns: + list: The files that have been found """ - if not isinstance(root, list): - root = [root] - - if not isinstance(files, list): + if isinstance(files, str): files = [files] # If recursive is false, max_depth can only be None or 0 @@ -1742,9 +1734,10 @@ def find( elif max_depth is None: max_depth = sys.maxsize - tty.debug(f"Find (max depth = {max_depth}): {root} {files}") - result = _find_max_depth(root, files, max_depth) - tty.debug(f"Find complete: {root} {files}") + tty.debug(f"Find (max depth = {max_depth}): {root} {str(files)}") + result = find_max_depth(root, files, max_depth) + + tty.debug(f"Find complete: {root} {str(files)}") return result @@ -1753,36 +1746,56 @@ def _log_file_access_issue(e: OSError, path: str) -> None: tty.debug(f"find must skip {path}: {errno_name} {e}") -def _dir_id(s: os.stat_result) -> Tuple[int, int]: - # Note: on windows, st_ino is the file index and st_dev is the volume serial number. See - # https://github.com/python/cpython/blob/3.9/Python/fileutils.c - return (s.st_ino, s.st_dev) +@system_path_filter(arg_slice=slice(1)) +def find_max_depth(root, globs, max_depth: Optional[int] = None): + """Given a set of non-recursive glob file patterns, finds all + files matching those patterns up to a maximum specified depth. + If a directory has a name which matches an input pattern, it will + not be included in the results. -def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.maxsize) -> List[str]: - """See ``find`` for the public API.""" - # Apply normcase to file patterns and filenames to respect case insensitive filesystems - regex, groups = fnmatch_translate_multiple([os.path.normcase(x) for x in globs]) - # Ordered dictionary that keeps track of the files found for each pattern - capture_group_to_paths: Dict[str, List[str]] = {group: [] for group in groups} - # Ensure returned paths are always absolute - roots = [os.path.abspath(r) for r in roots] - # Breadth-first search queue. Each element is a tuple of (depth, directory) - dir_queue: Deque[Tuple[int, str]] = collections.deque() - # Set of visited directories. Each element is a tuple of (inode, device) - visited_dirs: Set[Tuple[int, int]] = set() + If ``max_depth`` is specified, does not search below that depth. - for root in roots: - try: - stat_root = os.stat(root) - except OSError as e: - _log_file_access_issue(e, root) - continue - dir_id = _dir_id(stat_root) - if dir_id not in visited_dirs: - dir_queue.appendleft((0, root)) - visited_dirs.add(dir_id) + If ``globs`` is a list, files matching earlier entries are placed + in the return value before files matching later entries. + """ + try: + stat_root = os.stat(root) + except OSError: + return [] + + if max_depth is None: + max_depth = sys.maxsize + if isinstance(globs, str): + globs = [globs] + # Apply normcase to regular expressions and to the filenames: + # this respects case-sensitivity semantics of different OSes + # (e.g. file search is typically case-insensitive on Windows) + regexes = [re.compile(fnmatch.translate(os.path.normcase(x))) for x in globs] + + # Note later calls to os.scandir etc. return abspaths if the + # input is absolute, see https://docs.python.org/3/library/os.html#os.DirEntry.path + root = os.path.abspath(root) + + found_files = collections.defaultdict(list) + + def _dir_id(stat_info): + # Note: on windows, st_ino is the file index and st_dev + # is the volume serial number. See + # https://github.com/python/cpython/blob/3.9/Python/fileutils.c + return (stat_info.st_ino, stat_info.st_dev) + + visited_dirs = set([_dir_id(stat_root)]) + + # Each queue item stores the depth and path + # This achieves a consistent traversal order by iterating through + # each directory in alphabetical order. + # This also traverses in BFS order to ensure finding the shortest + # path to any file (or one of the shortest paths, if there are + # several - the one returned will be consistent given the prior + # point). + dir_queue = collections.deque([(0, root)]) while dir_queue: depth, next_dir = dir_queue.pop() try: @@ -1797,18 +1810,20 @@ def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.max try: it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) except OSError as e: - # Possible permission issue, or a symlink that cannot be resolved (ELOOP). + # Possible permission issue, or a symlink that cannot + # be resolved (ELOOP). _log_file_access_issue(e, dir_entry.path) continue - if it_is_a_dir and depth < max_depth: + if it_is_a_dir and (depth < max_depth): try: - # The stat should be performed in a try/except block. We repeat that here - # vs. moving to the above block because we only want to call `stat` if we - # haven't exceeded our max_depth + # The stat should be performed in a try/except block. + # We repeat that here vs. moving to the above block + # because we only want to call `stat` if we haven't + # exceeded our max_depth if sys.platform == "win32": - # Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we - # have to call os.stat + # Note: st_ino/st_dev on DirEntry.stat are not set on + # Windows, so we have to call os.stat stat_info = os.stat(dir_entry.path, follow_symlinks=True) else: stat_info = dir_entry.stat(follow_symlinks=True) @@ -1821,15 +1836,15 @@ def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.max dir_queue.appendleft((depth + 1, dir_entry.path)) visited_dirs.add(dir_id) else: - m = regex.match(os.path.normcase(os.path.basename(dir_entry.path))) - if not m: - continue - for group in capture_group_to_paths: - if m.group(group): - capture_group_to_paths[group].append(dir_entry.path) - break + fname = os.path.basename(dir_entry.path) + for pattern in regexes: + if pattern.match(os.path.normcase(fname)): + found_files[pattern].append(os.path.join(next_dir, fname)) + + # TODO: for fully-recursive searches, we can print a warning after + # after having searched everything up to some fixed depth - return [path for paths in capture_group_to_paths.values() for path in paths] + return list(itertools.chain(*[found_files[x] for x in regexes])) # Utilities for libraries and headers diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 6641a727dde7af..f43773346a948d 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -5,14 +5,12 @@ import collections.abc import contextlib -import fnmatch import functools import itertools import os import re import sys import traceback -import typing import warnings from datetime import datetime, timedelta from typing import Callable, Iterable, List, Tuple, TypeVar @@ -861,32 +859,6 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]: return line_list -if sys.version_info >= (3, 9): - PatternStr = re.Pattern[str] -else: - PatternStr = typing.Pattern[str] - - -def fnmatch_translate_multiple(patterns: List[str]) -> Tuple[PatternStr, List[str]]: - """Same as fnmatch.translate, but creates a single regex of the form - ``(?P...)|(?P...)|...`` for each pattern in the iterable, where - ``patternN`` is a named capture group that matches the corresponding pattern translated by - ``fnmatch.translate``. This can be used to match multiple patterns in a single pass. No case - normalization is performed on the patterns. - - Args: - patterns: list of fnmatch patterns - - Returns: - Tuple of the combined regex and the list of named capture groups corresponding to each - pattern in the input list. - """ - groups = [f"pattern{i}" for i in range(len(patterns))] - regexes = (fnmatch.translate(p) for p in patterns) - combined = re.compile("|".join(f"(?P<{g}>{r})" for g, r in zip(groups, regexes))) - return combined, groups - - @contextlib.contextmanager def nullcontext(*args, **kwargs): """Empty context manager. diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index 03e1f30dd30996..01379be94c0614 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -1072,16 +1072,16 @@ def test_find_max_depth(dir_structure_with_things_to_find): # Make sure the paths we use to verify are absolute assert os.path.isabs(locations["file_one"]) - assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]} - assert set(fs.find(root, "file_*", max_depth=1)) == { + assert set(fs.find_max_depth(root, "file_*", 0)) == {locations["file_four"]} + assert set(fs.find_max_depth(root, "file_*", 1)) == { locations["file_one"], locations["file_three"], locations["file_four"], } - assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]} - assert not set(fs.find(root, "file_two", max_depth=1)) - assert set(fs.find(root, "file_two")) == {locations["file_two"]} - assert set(fs.find(root, "file_*")) == set(locations.values()) + assert set(fs.find_max_depth(root, "file_two", 2)) == {locations["file_two"]} + assert not set(fs.find_max_depth(root, "file_two", 1)) + assert set(fs.find_max_depth(root, "file_two")) == {locations["file_two"]} + assert set(fs.find_max_depth(root, "file_*")) == set(locations.values()) def test_find_max_depth_relative(dir_structure_with_things_to_find): @@ -1090,8 +1090,8 @@ def test_find_max_depth_relative(dir_structure_with_things_to_find): """ root, locations = dir_structure_with_things_to_find with fs.working_dir(root): - assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]} - assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]} + assert set(fs.find_max_depth(".", "file_*", 0)) == {locations["file_four"]} + assert set(fs.find_max_depth(".", "file_two", 2)) == {locations["file_two"]} @pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)]) @@ -1105,8 +1105,7 @@ def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth): fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth) -@pytest.fixture(params=[True, False]) -def complex_dir_structure(request, tmpdir): +def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): """ "lx-dy" means "level x, directory y" "lx-fy" means "level x, file y" @@ -1129,11 +1128,8 @@ def complex_dir_structure(request, tmpdir): l1-s3 -> l3-d4 # a link that "skips" a directory level l1-s4 -> l2-s3 # a link to a link to a dir """ - use_junctions = request.param - if sys.platform == "win32" and not use_junctions and not _windows_can_symlink(): + if sys.platform == "win32" and (not use_junctions) and (not _windows_can_symlink()): pytest.skip("This Windows instance is not configured with symlink support") - elif sys.platform != "win32" and use_junctions: - pytest.skip("Junctions are a Windows-only feature") l1_d1 = tmpdir.join("l1-d1").ensure(dir=True) l2_d1 = l1_d1.join("l2-d1").ensure(dir=True) @@ -1154,60 +1150,44 @@ def complex_dir_structure(request, tmpdir): link_fn(l2_d2, l2_s3) link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4") - locations = { - "l4-f1": str(l3_d2.join("l4-f1").ensure()), - "l4-f2-full": str(l3_d4.join("l4-f2").ensure()), - "l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"), - "l2-f1": str(l1_d2.join("l2-f1").ensure()), - "l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"), - "l3-f3-full": str(l2_d2.join("l3-f3").ensure()), - "l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"), - } + locations = {} + locations["l4-f1"] = str(l3_d2.join("l4-f1").ensure()) + locations["l4-f2-full"] = str(l3_d4.join("l4-f2").ensure()) + locations["l4-f2-link"] = str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2") + locations["l2-f1"] = str(l1_d2.join("l2-f1").ensure()) + locations["l2-f1-link"] = str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1") + locations["l3-f3-full"] = str(l2_d2.join("l3-f3").ensure()) + locations["l3-f3-link-l1"] = str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3") return str(tmpdir), locations -def test_find_max_depth_symlinks(complex_dir_structure): - root, locations = complex_dir_structure +def _check_find_links(root, locations): root = pathlib.Path(root) - assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]} - assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]} - assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} + assert set(fs.find_max_depth(root, "l4-f1")) == {locations["l4-f1"]} + assert set(fs.find_max_depth(root / "l1-s3", "l4-f2", 0)) == {locations["l4-f2-link"]} + assert set(fs.find_max_depth(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} # File is accessible via symlink and subdir, the link path will be # searched first, and the directory will not be searched again when # it is encountered the second time (via not-link) in the traversal - assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]} + assert set(fs.find_max_depth(root, "l4-f2")) == {locations["l4-f2-link"]} # File is accessible only via the dir, so the full file path should # be reported - assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} + assert set(fs.find_max_depth(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} # Check following links to links - assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]} - - -def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure): - root, locations = complex_dir_structure - - fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1") - snd = str(pathlib.Path(root) / "l1-d2") - nonexistent = str(pathlib.Path(root) / "nonexistent") - - assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == { - locations["l2-f1"], - locations["l4-f1"], - locations["l4-f2-full"], - locations["l3-f3-full"], - } + assert set(fs.find_max_depth(root, "l3-f3")) == {locations["l3-f3-link-l1"]} -def test_multiple_patterns(complex_dir_structure): - root, _ = complex_dir_structure - paths = fs.find(root, ["l2-f1", "l3-f3", "*"]) - # There shouldn't be duplicate results with multiple, overlapping patterns - assert len(set(paths)) == len(paths) - # All files should be found - filenames = [os.path.basename(p) for p in paths] - assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"} - # They are ordered by first matching pattern (this is a bit of an implementation detail, - # and we could decide to change the exact order in the future) - assert filenames[0] == "l2-f1" - assert filenames[1] == "l3-f3" +@pytest.mark.parametrize( + "use_junctions", + [ + False, + pytest.param( + True, + marks=pytest.mark.skipif(sys.platform != "win32", reason="Only Windows has junctions"), + ), + ], +) +def test_find_max_depth_symlinks(tmpdir, use_junctions): + root, locations = dir_structure_with_things_to_find_links(tmpdir, use_junctions=use_junctions) + _check_find_links(root, locations) diff --git a/lib/spack/spack/test/llnl/util/lang.py b/lib/spack/spack/test/llnl/util/lang.py index 0d5aaab81a2e25..52dcf3950a452b 100644 --- a/lib/spack/spack/test/llnl/util/lang.py +++ b/lib/spack/spack/test/llnl/util/lang.py @@ -373,18 +373,3 @@ class _SomeClass: _SomeClass.deprecated.error_lvl = 2 with pytest.raises(AttributeError): _ = s.deprecated - - -def test_fnmatch_multiple(): - regex, groups = llnl.util.lang.fnmatch_translate_multiple(["libf*o.so", "libb*r.so"]) - - a = regex.match("libfoo.so") - assert a and a.group(groups[0]) == "libfoo.so" - - b = regex.match("libbar.so") - assert b and b.group(groups[1]) == "libbar.so" - - assert not regex.match("libfoo.so.1") - assert not regex.match("libbar.so.1") - assert not regex.match("libfoo.solibbar.so") - assert not regex.match("libbaz.so") From 4fbdf2f2c08e52d354bc22e1838d9e5dc6d1d332 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 7 Nov 2024 20:53:28 +0100 Subject: [PATCH 114/208] Revert "llnl.util.filesystem.find: restore old error handling (#47463)" This reverts commit a31c525778773b8c6a6fc35617454d954a05d74d. --- lib/spack/llnl/util/filesystem.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 24055c902b6225..b63b6e94b39a2e 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -1741,11 +1741,6 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): return result -def _log_file_access_issue(e: OSError, path: str) -> None: - errno_name = errno.errorcode.get(e.errno, "UNKNOWN") - tty.debug(f"find must skip {path}: {errno_name} {e}") - - @system_path_filter(arg_slice=slice(1)) def find_max_depth(root, globs, max_depth: Optional[int] = None): """Given a set of non-recursive glob file patterns, finds all @@ -1759,10 +1754,19 @@ def find_max_depth(root, globs, max_depth: Optional[int] = None): If ``globs`` is a list, files matching earlier entries are placed in the return value before files matching later entries. """ + # If root doesn't exist, then we say we found nothing. If it + # exists but is not a dir, we assume the user would want to + # know; likewise if it exists but we do not have permission to + # access it. try: stat_root = os.stat(root) - except OSError: - return [] + except OSError as e: + if e.errno == errno.ENOENT: + return [] + else: + raise + if not stat.S_ISDIR(stat_root.st_mode): + raise ValueError(f"{root} is not a directory") if max_depth is None: max_depth = sys.maxsize @@ -1786,6 +1790,10 @@ def _dir_id(stat_info): # https://github.com/python/cpython/blob/3.9/Python/fileutils.c return (stat_info.st_ino, stat_info.st_dev) + def _log_file_access_issue(e): + errno_name = errno.errorcode.get(e.errno, "UNKNOWN") + tty.debug(f"find must skip {dir_entry.path}: {errno_name} {str(e)}") + visited_dirs = set([_dir_id(stat_root)]) # Each queue item stores the depth and path @@ -1800,8 +1808,9 @@ def _dir_id(stat_info): depth, next_dir = dir_queue.pop() try: dir_iter = os.scandir(next_dir) - except OSError as e: - _log_file_access_issue(e, next_dir) + except OSError: + # Most commonly, this would be a permissions issue, for + # example if we are scanning an external directory like /usr continue with dir_iter: @@ -1812,7 +1821,7 @@ def _dir_id(stat_info): except OSError as e: # Possible permission issue, or a symlink that cannot # be resolved (ELOOP). - _log_file_access_issue(e, dir_entry.path) + _log_file_access_issue(e) continue if it_is_a_dir and (depth < max_depth): @@ -1828,7 +1837,7 @@ def _dir_id(stat_info): else: stat_info = dir_entry.stat(follow_symlinks=True) except OSError as e: - _log_file_access_issue(e, dir_entry.path) + _log_file_access_issue(e) continue dir_id = _dir_id(stat_info) From ed916ffe6ce9bd6af94cbd4538b06f258a8d766c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 7 Nov 2024 20:53:29 +0100 Subject: [PATCH 115/208] Revert "filesystem.py: add `max_depth` argument to `find` (#41945)" This reverts commit 38c8069ab42f44aa9f4779968937fc6842dc2109. --- lib/spack/llnl/util/filesystem.py | 175 +++++------------- lib/spack/spack/test/llnl/util/file_list.py | 32 +++- lib/spack/spack/test/llnl/util/filesystem.py | 158 +--------------- .../packages/attributes-foo/package.py | 4 +- 4 files changed, 77 insertions(+), 292 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index b63b6e94b39a2e..00bb270151908c 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -1673,20 +1673,16 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() -def find(root, files, recursive=True, max_depth: Optional[int] = None): +def find(root, files, recursive=True): """Search for ``files`` starting from the ``root`` directory. Like GNU/BSD find but written entirely in Python. - Specifically this behaves like `find -type f`: it only returns - results that are files. When searching recursively, this behaves - as `find` with the `-L` option (follows symlinks). - Examples: .. code-block:: console - $ find -L /usr -name python + $ find /usr -name python is equivalent to: @@ -1716,8 +1712,6 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): files (str or collections.abc.Sequence): Library name(s) to search for recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to True. - max_depth (int): if set, don't search below this depth. Cannot be set - if recursive is False Returns: list: The files that have been found @@ -1725,135 +1719,59 @@ def find(root, files, recursive=True, max_depth: Optional[int] = None): if isinstance(files, str): files = [files] - # If recursive is false, max_depth can only be None or 0 - if max_depth and not recursive: - raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") - - if not recursive: - max_depth = 0 - elif max_depth is None: - max_depth = sys.maxsize - - tty.debug(f"Find (max depth = {max_depth}): {root} {str(files)}") - result = find_max_depth(root, files, max_depth) + if recursive: + tty.debug(f"Find (recursive): {root} {str(files)}") + result = _find_recursive(root, files) + else: + tty.debug(f"Find (not recursive): {root} {str(files)}") + result = _find_non_recursive(root, files) tty.debug(f"Find complete: {root} {str(files)}") return result -@system_path_filter(arg_slice=slice(1)) -def find_max_depth(root, globs, max_depth: Optional[int] = None): - """Given a set of non-recursive glob file patterns, finds all - files matching those patterns up to a maximum specified depth. - - If a directory has a name which matches an input pattern, it will - not be included in the results. - - If ``max_depth`` is specified, does not search below that depth. +@system_path_filter +def _find_recursive(root, search_files): + # The variable here is **on purpose** a defaultdict. The idea is that + # we want to poke the filesystem as little as possible, but still maintain + # stability in the order of the answer. Thus we are recording each library + # found in a key, and reconstructing the stable order later. + found_files = collections.defaultdict(list) - If ``globs`` is a list, files matching earlier entries are placed - in the return value before files matching later entries. - """ - # If root doesn't exist, then we say we found nothing. If it - # exists but is not a dir, we assume the user would want to - # know; likewise if it exists but we do not have permission to - # access it. - try: - stat_root = os.stat(root) - except OSError as e: - if e.errno == errno.ENOENT: - return [] - else: - raise - if not stat.S_ISDIR(stat_root.st_mode): - raise ValueError(f"{root} is not a directory") + # Make the path absolute to have os.walk also return an absolute path + root = os.path.abspath(root) + for path, _, list_files in os.walk(root): + for search_file in search_files: + matches = glob.glob(os.path.join(path, search_file)) + matches = [os.path.join(path, x) for x in matches] + found_files[search_file].extend(matches) - if max_depth is None: - max_depth = sys.maxsize + answer = [] + for search_file in search_files: + answer.extend(found_files[search_file]) - if isinstance(globs, str): - globs = [globs] - # Apply normcase to regular expressions and to the filenames: - # this respects case-sensitivity semantics of different OSes - # (e.g. file search is typically case-insensitive on Windows) - regexes = [re.compile(fnmatch.translate(os.path.normcase(x))) for x in globs] + return answer - # Note later calls to os.scandir etc. return abspaths if the - # input is absolute, see https://docs.python.org/3/library/os.html#os.DirEntry.path - root = os.path.abspath(root) +@system_path_filter +def _find_non_recursive(root, search_files): + # The variable here is **on purpose** a defaultdict as os.list_dir + # can return files in any order (does not preserve stability) found_files = collections.defaultdict(list) - def _dir_id(stat_info): - # Note: on windows, st_ino is the file index and st_dev - # is the volume serial number. See - # https://github.com/python/cpython/blob/3.9/Python/fileutils.c - return (stat_info.st_ino, stat_info.st_dev) - - def _log_file_access_issue(e): - errno_name = errno.errorcode.get(e.errno, "UNKNOWN") - tty.debug(f"find must skip {dir_entry.path}: {errno_name} {str(e)}") - - visited_dirs = set([_dir_id(stat_root)]) - - # Each queue item stores the depth and path - # This achieves a consistent traversal order by iterating through - # each directory in alphabetical order. - # This also traverses in BFS order to ensure finding the shortest - # path to any file (or one of the shortest paths, if there are - # several - the one returned will be consistent given the prior - # point). - dir_queue = collections.deque([(0, root)]) - while dir_queue: - depth, next_dir = dir_queue.pop() - try: - dir_iter = os.scandir(next_dir) - except OSError: - # Most commonly, this would be a permissions issue, for - # example if we are scanning an external directory like /usr - continue - - with dir_iter: - ordered_entries = sorted(dir_iter, key=lambda x: x.name) - for dir_entry in ordered_entries: - try: - it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) - except OSError as e: - # Possible permission issue, or a symlink that cannot - # be resolved (ELOOP). - _log_file_access_issue(e) - continue + # Make the path absolute to have absolute path returned + root = os.path.abspath(root) - if it_is_a_dir and (depth < max_depth): - try: - # The stat should be performed in a try/except block. - # We repeat that here vs. moving to the above block - # because we only want to call `stat` if we haven't - # exceeded our max_depth - if sys.platform == "win32": - # Note: st_ino/st_dev on DirEntry.stat are not set on - # Windows, so we have to call os.stat - stat_info = os.stat(dir_entry.path, follow_symlinks=True) - else: - stat_info = dir_entry.stat(follow_symlinks=True) - except OSError as e: - _log_file_access_issue(e) - continue - - dir_id = _dir_id(stat_info) - if dir_id not in visited_dirs: - dir_queue.appendleft((depth + 1, dir_entry.path)) - visited_dirs.add(dir_id) - else: - fname = os.path.basename(dir_entry.path) - for pattern in regexes: - if pattern.match(os.path.normcase(fname)): - found_files[pattern].append(os.path.join(next_dir, fname)) + for search_file in search_files: + matches = glob.glob(os.path.join(root, search_file)) + matches = [os.path.join(root, x) for x in matches] + found_files[search_file].extend(matches) - # TODO: for fully-recursive searches, we can print a warning after - # after having searched everything up to some fixed depth + answer = [] + for search_file in search_files: + answer.extend(found_files[search_file]) - return list(itertools.chain(*[found_files[x] for x in regexes])) + return answer # Utilities for libraries and headers @@ -2292,9 +2210,7 @@ def find_system_libraries(libraries, shared=True): return libraries_found -def find_libraries( - libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None -): +def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): """Returns an iterable of full paths to libraries found in a root dir. Accepts any glob characters accepted by fnmatch: @@ -2315,8 +2231,6 @@ def find_libraries( otherwise for static. Defaults to True. recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. - max_depth (int): if set, don't search below this depth. Cannot be set - if recursive is False runtime (bool): Windows only option, no-op elsewhere. If true, search for runtime shared libs (.DLL), otherwise, search for .Lib files. If shared is false, this has no meaning. @@ -2325,7 +2239,6 @@ def find_libraries( Returns: LibraryList: The libraries that have been found """ - if isinstance(libraries, str): libraries = [libraries] elif not isinstance(libraries, collections.abc.Sequence): @@ -2358,10 +2271,8 @@ def find_libraries( libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes] if not recursive: - if max_depth: - raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") # If not recursive, look for the libraries directly in root - return LibraryList(find(root, libraries, recursive=False)) + return LibraryList(find(root, libraries, False)) # To speedup the search for external packages configured e.g. in /usr, # perform first non-recursive search in root/lib then in root/lib64 and @@ -2379,7 +2290,7 @@ def find_libraries( if found_libs: break else: - found_libs = find(root, libraries, recursive=True, max_depth=max_depth) + found_libs = find(root, libraries, True) return LibraryList(found_libs) diff --git a/lib/spack/spack/test/llnl/util/file_list.py b/lib/spack/spack/test/llnl/util/file_list.py index e2ff5a82109510..75ba3ae89d9aca 100644 --- a/lib/spack/spack/test/llnl/util/file_list.py +++ b/lib/spack/spack/test/llnl/util/file_list.py @@ -9,7 +9,7 @@ import pytest -from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries +from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries import spack.paths @@ -324,3 +324,33 @@ def test_searching_order(search_fn, search_list, root, kwargs): # List should be empty here assert len(rlist) == 0 + + +@pytest.mark.parametrize( + "root,search_list,kwargs,expected", + [ + ( + search_dir, + "*/*bar.tx?", + {"recursive": False}, + [ + os.path.join(search_dir, os.path.join("a", "foobar.txt")), + os.path.join(search_dir, os.path.join("b", "bar.txp")), + os.path.join(search_dir, os.path.join("c", "bar.txt")), + ], + ), + ( + search_dir, + "*/*bar.tx?", + {"recursive": True}, + [ + os.path.join(search_dir, os.path.join("a", "foobar.txt")), + os.path.join(search_dir, os.path.join("b", "bar.txp")), + os.path.join(search_dir, os.path.join("c", "bar.txt")), + ], + ), + ], +) +def test_find_with_globbing(root, search_list, kwargs, expected): + matches = find(root, search_list, **kwargs) + assert sorted(matches) == sorted(expected) diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index 01379be94c0614..a0c98747698b20 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -6,7 +6,6 @@ """Tests for ``llnl/util/filesystem.py``""" import filecmp import os -import pathlib import shutil import stat import sys @@ -15,8 +14,7 @@ import pytest import llnl.util.filesystem as fs -import llnl.util.symlink -from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink +from llnl.util.symlink import islink, readlink, symlink import spack.paths @@ -1037,157 +1035,3 @@ def test_windows_sfn(tmpdir): assert "d\\LONGER~1" in fs.windows_sfn(d) assert "d\\LONGER~2" in fs.windows_sfn(e) shutil.rmtree(tmpdir.join("d")) - - -@pytest.fixture -def dir_structure_with_things_to_find(tmpdir): - """ - / - dir_one/ - file_one - dir_two/ - dir_three/ - dir_four/ - file_two - file_three - file_four - """ - dir_one = tmpdir.join("dir_one").ensure(dir=True) - tmpdir.join("dir_two").ensure(dir=True) - dir_three = tmpdir.join("dir_three").ensure(dir=True) - dir_four = dir_three.join("dir_four").ensure(dir=True) - - locations = {} - locations["file_one"] = str(dir_one.join("file_one").ensure()) - locations["file_two"] = str(dir_four.join("file_two").ensure()) - locations["file_three"] = str(dir_three.join("file_three").ensure()) - locations["file_four"] = str(tmpdir.join("file_four").ensure()) - - return str(tmpdir), locations - - -def test_find_max_depth(dir_structure_with_things_to_find): - root, locations = dir_structure_with_things_to_find - - # Make sure the paths we use to verify are absolute - assert os.path.isabs(locations["file_one"]) - - assert set(fs.find_max_depth(root, "file_*", 0)) == {locations["file_four"]} - assert set(fs.find_max_depth(root, "file_*", 1)) == { - locations["file_one"], - locations["file_three"], - locations["file_four"], - } - assert set(fs.find_max_depth(root, "file_two", 2)) == {locations["file_two"]} - assert not set(fs.find_max_depth(root, "file_two", 1)) - assert set(fs.find_max_depth(root, "file_two")) == {locations["file_two"]} - assert set(fs.find_max_depth(root, "file_*")) == set(locations.values()) - - -def test_find_max_depth_relative(dir_structure_with_things_to_find): - """find_max_depth should return absolute paths even if - the provided path is relative. - """ - root, locations = dir_structure_with_things_to_find - with fs.working_dir(root): - assert set(fs.find_max_depth(".", "file_*", 0)) == {locations["file_four"]} - assert set(fs.find_max_depth(".", "file_two", 2)) == {locations["file_two"]} - - -@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)]) -def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth): - root = str(tmpdir) - error_str = "cannot be set if recursive is False" - with pytest.raises(ValueError, match=error_str): - fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth) - - with pytest.raises(ValueError, match=error_str): - fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth) - - -def dir_structure_with_things_to_find_links(tmpdir, use_junctions=False): - """ - "lx-dy" means "level x, directory y" - "lx-fy" means "level x, file y" - "lx-sy" means "level x, symlink y" - - / - l1-d1/ - l2-d1/ - l3-s1 -> l1-d2 # points to directory above l2-d1 - l3-d2/ - l4-f1 - l3-s3 -> l1-d1 # cyclic link - l3-d4/ - l4-f2 - l1-d2/ - l2-f1 - l2-d2/ - l3-f3 - l2-s3 -> l2-d2 - l1-s3 -> l3-d4 # a link that "skips" a directory level - l1-s4 -> l2-s3 # a link to a link to a dir - """ - if sys.platform == "win32" and (not use_junctions) and (not _windows_can_symlink()): - pytest.skip("This Windows instance is not configured with symlink support") - - l1_d1 = tmpdir.join("l1-d1").ensure(dir=True) - l2_d1 = l1_d1.join("l2-d1").ensure(dir=True) - l3_d2 = l2_d1.join("l3-d2").ensure(dir=True) - l3_d4 = l2_d1.join("l3-d4").ensure(dir=True) - l1_d2 = tmpdir.join("l1-d2").ensure(dir=True) - l2_d2 = l1_d2.join("l1-d2").ensure(dir=True) - - if use_junctions: - link_fn = llnl.util.symlink._windows_create_junction - else: - link_fn = os.symlink - - link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1") - link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3") - link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3") - l2_s3 = pathlib.Path(l1_d2) / "l2-s3" - link_fn(l2_d2, l2_s3) - link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4") - - locations = {} - locations["l4-f1"] = str(l3_d2.join("l4-f1").ensure()) - locations["l4-f2-full"] = str(l3_d4.join("l4-f2").ensure()) - locations["l4-f2-link"] = str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2") - locations["l2-f1"] = str(l1_d2.join("l2-f1").ensure()) - locations["l2-f1-link"] = str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1") - locations["l3-f3-full"] = str(l2_d2.join("l3-f3").ensure()) - locations["l3-f3-link-l1"] = str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3") - - return str(tmpdir), locations - - -def _check_find_links(root, locations): - root = pathlib.Path(root) - assert set(fs.find_max_depth(root, "l4-f1")) == {locations["l4-f1"]} - assert set(fs.find_max_depth(root / "l1-s3", "l4-f2", 0)) == {locations["l4-f2-link"]} - assert set(fs.find_max_depth(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} - # File is accessible via symlink and subdir, the link path will be - # searched first, and the directory will not be searched again when - # it is encountered the second time (via not-link) in the traversal - assert set(fs.find_max_depth(root, "l4-f2")) == {locations["l4-f2-link"]} - # File is accessible only via the dir, so the full file path should - # be reported - assert set(fs.find_max_depth(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} - # Check following links to links - assert set(fs.find_max_depth(root, "l3-f3")) == {locations["l3-f3-link-l1"]} - - -@pytest.mark.parametrize( - "use_junctions", - [ - False, - pytest.param( - True, - marks=pytest.mark.skipif(sys.platform != "win32", reason="Only Windows has junctions"), - ), - ], -) -def test_find_max_depth_symlinks(tmpdir, use_junctions): - root, locations = dir_structure_with_things_to_find_links(tmpdir, use_junctions=use_junctions) - _check_find_links(root, locations) diff --git a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py index b882fc9b6595b6..31c88f4b08564a 100644 --- a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py +++ b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py @@ -44,7 +44,7 @@ def libs(self): # Header provided by the bar virutal package @property def bar_headers(self): - return find_headers("bar", root=self.home.include, recursive=True) + return find_headers("bar/bar", root=self.home.include, recursive=False) # Libary provided by the bar virtual package @property @@ -59,7 +59,7 @@ def baz_home(self): # Header provided by the baz virtual package @property def baz_headers(self): - return find_headers("baz", root=self.baz_home.include, recursive=True) + return find_headers("baz/baz", root=self.baz_home.include, recursive=False) # Library provided by the baz virtual package @property From ff26d2f8331c900123f6659f762e0cfa1aba47e2 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Fri, 8 Nov 2024 03:16:01 -0500 Subject: [PATCH 116/208] `spack env track` command (#41897) This PR adds a sub-command to `spack env` (`track`) which allows users to add/link anonymous environments into their installation as named environments. This allows users to more easily track their installed packages and the environments they're dependencies of. For example, with the addition of #41731 it's now easier to remove all packages not required by any environments with, ``` spack gc -bE ``` #### Usage ``` spack env track /path/to/env ==> Linked environment in /path/to/env ==> You can activate this environment with: ==> spack env activate env ``` By default `track /path/to/env` will use the last directory in the path as the name of the environment. However users may customize the name of the linked environment with `-n | --name`. Shown below. ``` spack env track /path/to/env --name foo ==> Tracking environment in /path/to/env ==> You can activate this environment with: ==> spack env activate foo ``` When removing a linked environment, Spack will remove the link to the environment but will keep the structure of the environment within the directory. This will allow users to remove a linked environment from their installation without deleting it from a shared repository. There is a `spack env untrack` command that can be used to *only* untrack a tracked environment -- it will fail if it is used on a managed environment. Users can also use `spack env remove` to untrack an environment. This allows users to continue to share environments in git repositories while also having the dependencies of those environments be remembered by Spack. --------- Co-authored-by: Todd Gamblin --- lib/spack/spack/cmd/env.py | 237 +++++++++++++++++---- lib/spack/spack/environment/environment.py | 4 +- lib/spack/spack/test/cmd/env.py | 100 ++++++++- share/spack/spack-completion.bash | 20 +- share/spack/spack-completion.fish | 22 ++ 5 files changed, 331 insertions(+), 52 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 2136bb1305c253..5a80f0e1a8a8e3 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -10,11 +10,12 @@ import sys import tempfile from pathlib import Path -from typing import List, Optional +from typing import List, Optional, Set import llnl.string as string import llnl.util.filesystem as fs import llnl.util.tty as tty +from llnl.util.symlink import islink, symlink from llnl.util.tty.colify import colify from llnl.util.tty.color import cescape, colorize @@ -50,6 +51,8 @@ "update", "revert", "depfile", + "track", + "untrack", ] @@ -447,78 +450,220 @@ def env_deactivate(args): # -# env remove +# env track # -def env_remove_setup_parser(subparser): - """remove managed environment(s) +def env_track_setup_parser(subparser): + """track an environment from a directory in Spack""" + subparser.add_argument("-n", "--name", help="custom environment name") + subparser.add_argument("dir", help="path to environment") + arguments.add_common_arguments(subparser, ["yes_to_all"]) - remove existing environment(s) managed by Spack - directory environments and manifests embedded in repositories must be - removed manually - """ - subparser.add_argument( - "rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed" - ) - arguments.add_common_arguments(subparser, ["yes_to_all"]) - subparser.add_argument( - "-f", - "--force", - action="store_true", - help="force removal even when included in other environment(s)", +def env_track(args): + src_path = os.path.abspath(args.dir) + if not ev.is_env_dir(src_path): + tty.die("Cannot track environment. Path doesn't contain an environment") + + if args.name: + name = args.name + else: + name = os.path.basename(src_path) + + try: + dst_path = ev.environment_dir_from_name(name, exists_ok=False) + except ev.SpackEnvironmentError: + tty.die( + f"An environment named {name} already exists. Set a name with:" + "\n\n" + f" spack env track --name NAME {src_path}\n" + ) + + symlink(src_path, dst_path) + + tty.msg(f"Tracking environment in {src_path}") + tty.msg( + "You can now activate this environment with the following command:\n\n" + f" spack env activate {name}\n" ) -def env_remove(args): - """remove existing environment(s)""" - remove_envs = [] - valid_envs = [] - bad_envs = [] +# +# env remove & untrack helpers +# +def filter_managed_env_names(env_names: Set[str]) -> Set[str]: + tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))} + managed_env_names = env_names - set(tracked_env_names) + + num_managed_envs = len(managed_env_names) + managed_envs_str = " ".join(managed_env_names) + if num_managed_envs >= 2: + tty.error( + f"The following are not tracked environments. " + "To remove them completely run," + "\n\n" + f" spack env rm {managed_envs_str}\n" + ) + + elif num_managed_envs > 0: + tty.error( + f"'{managed_envs_str}' is not a tracked env. " + "To remove it completely run," + "\n\n" + f" spack env rm {managed_envs_str}\n" + ) + + return tracked_env_names + - for env_name in ev.all_environment_names(): +def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]: + valid_envs = set() + for env_name in env_names: try: env = ev.read(env_name) - valid_envs.append(env) + valid_envs.add(env) - if env_name in args.rm_env: - remove_envs.append(env) except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError): - if env_name in args.rm_env: - bad_envs.append(env_name) + pass - # Check if remove_env is included from another env before trying to remove - for env in valid_envs: - for remove_env in remove_envs: - # don't check if environment is included to itself + return valid_envs + + +def _env_untrack_or_remove( + env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False +): + all_env_names = set(ev.all_environment_names()) + known_env_names = set(env_names).intersection(all_env_names) + unknown_env_names = set(env_names) - known_env_names + + # print error for unknown environments + for env_name in unknown_env_names: + tty.error(f"Environment '{env_name}' does not exist") + + # if only unlinking is allowed, remove all environments + # which do not point internally at symlinks + if not remove: + env_names_to_remove = filter_managed_env_names(known_env_names) + else: + env_names_to_remove = known_env_names + + # initalize all environments with valid spack.yaml configs + all_valid_envs = get_valid_envs(all_env_names) + + # build a task list of environments and bad env names to remove + envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove] + bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove} + for remove_env in envs_to_remove: + for env in all_valid_envs: + # don't check if an environment is included to itself if env.name == remove_env.name: continue + # check if an environment is included un another if remove_env.path in env.included_concrete_envs: - msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"' - if args.force: + msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'" + if force: tty.warn(msg) else: - tty.die(msg) - - if not args.yes_to_all: - environments = string.plural(len(args.rm_env), "environment", show_n=False) - envs = string.comma_and(args.rm_env) - answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False) + tty.error(msg) + envs_to_remove.remove(remove_env) + + # ask the user if they really want to remove the known environments + # force should do the same as yes to all here following the symantics of rm + if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove): + environments = string.plural(len(env_names_to_remove), "environment", show_n=False) + envs = string.comma_and(list(env_names_to_remove)) + answer = tty.get_yes_or_no( + f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False + ) if not answer: tty.die("Will not remove any environments") - for env in remove_envs: + # keep track of the environments we remove for later printing the exit code + removed_env_names = [] + for env in envs_to_remove: name = env.name - if env.active: - tty.die(f"Environment {name} can't be removed while activated.") - env.destroy() - tty.msg(f"Successfully removed environment '{name}'") + if not force and env.active: + tty.error( + f"Environment '{name}' can't be " + f"{'removed' if remove else 'untracked'} while activated." + ) + continue + # Get path to check if environment is a tracked / symlinked environment + if islink(env.path): + real_env_path = os.path.realpath(env.path) + os.unlink(env.path) + tty.msg( + f"Sucessfully untracked environment '{name}', " + "but it can still be found at:\n\n" + f" {real_env_path}\n" + ) + else: + env.destroy() + tty.msg(f"Successfully removed environment '{name}'") + + removed_env_names.append(env.name) - for bad_env_name in bad_envs: + for bad_env_name in bad_env_names_to_remove: shutil.rmtree( spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True) ) tty.msg(f"Successfully removed environment '{bad_env_name}'") + removed_env_names.append(env.name) + + # Following the design of linux rm we should exit with a status of 1 + # anytime we cannot delete every environment the user asks for. + # However, we should still process all the environments we know about + # and delete them instead of failing on the first unknown enviornment. + if len(removed_env_names) < len(known_env_names): + sys.exit(1) + + +# +# env untrack +# +def env_untrack_setup_parser(subparser): + """track an environment from a directory in Spack""" + subparser.add_argument("env", nargs="+", help="tracked environment name") + subparser.add_argument( + "-f", "--force", action="store_true", help="force unlink even when environment is active" + ) + arguments.add_common_arguments(subparser, ["yes_to_all"]) + + +def env_untrack(args): + _env_untrack_or_remove( + env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False + ) + + +# +# env remove +# +def env_remove_setup_parser(subparser): + """remove managed environment(s) + + remove existing environment(s) managed by Spack + + directory environments and manifests embedded in repositories must be + removed manually + """ + subparser.add_argument( + "rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed" + ) + arguments.add_common_arguments(subparser, ["yes_to_all"]) + subparser.add_argument( + "-f", + "--force", + action="store_true", + help="force removal even when included in other environment(s)", + ) + + +def env_remove(args): + """remove existing environment(s)""" + _env_untrack_or_remove( + env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all + ) # diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index de4bc851006e8c..9a3361c7347466 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -20,7 +20,7 @@ import llnl.util.tty as tty import llnl.util.tty.color as clr from llnl.util.link_tree import ConflictingSpecsError -from llnl.util.symlink import readlink, symlink +from llnl.util.symlink import islink, readlink, symlink import spack import spack.caches @@ -668,7 +668,7 @@ def from_dict(base_path, d): @property def _current_root(self): - if not os.path.islink(self.root): + if not islink(self.root): return None root = readlink(self.root) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 87941de137ec4a..099e6306ac507f 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -117,6 +117,99 @@ def check_viewdir_removal(viewdir): ) == ["projections.yaml"] +def test_env_track_nonexistant_path_fails(capfd): + with pytest.raises(spack.main.SpackCommandError): + env("track", "path/does/not/exist") + + out, _ = capfd.readouterr() + assert "doesn't contain an environment" in out + + +def test_env_track_existing_env_fails(capfd): + env("create", "track_test") + + with pytest.raises(spack.main.SpackCommandError): + env("track", "--name", "track_test", ev.environment_dir_from_name("track_test")) + + out, _ = capfd.readouterr() + assert "environment named track_test already exists" in out + + +def test_env_track_valid(tmp_path): + with fs.working_dir(str(tmp_path)): + # create an independent environment + env("create", "-d", ".") + + # test tracking an environment in known store + env("track", "--name", "test1", ".") + + # test removing environment to ensure independent isn't deleted + env("rm", "-y", "test1") + + assert os.path.isfile("spack.yaml") + + +def test_env_untrack_valid(tmp_path): + with fs.working_dir(str(tmp_path)): + # create an independent environment + env("create", "-d", ".") + + # test tracking an environment in known store + env("track", "--name", "test_untrack", ".") + env("untrack", "--yes-to-all", "test_untrack") + + # check that environment was sucessfully untracked + out = env("ls") + assert "test_untrack" not in out + + +def test_env_untrack_invalid_name(): + # test untracking an environment that doesn't exist + env_name = "invalid_enviornment_untrack" + + out = env("untrack", env_name) + + assert f"Environment '{env_name}' does not exist" in out + + +def test_env_untrack_when_active(tmp_path, capfd): + env_name = "test_untrack_active" + + with fs.working_dir(str(tmp_path)): + # create an independent environment + env("create", "-d", ".") + + # test tracking an environment in known store + env("track", "--name", env_name, ".") + + active_env = ev.read(env_name) + with active_env: + with pytest.raises(spack.main.SpackCommandError): + env("untrack", "--yes-to-all", env_name) + + # check that environment could not be untracked while active + out, _ = capfd.readouterr() + assert f"'{env_name}' can't be untracked while activated" in out + + env("untrack", "-f", env_name) + out = env("ls") + assert env_name not in out + + +def test_env_untrack_managed(tmp_path, capfd): + env_name = "test_untrack_managed" + + # create an managed environment + env("create", env_name) + + with pytest.raises(spack.main.SpackCommandError): + env("untrack", env_name) + + # check that environment could not be untracked while active + out, _ = capfd.readouterr() + assert f"'{env_name}' is not a tracked env" in out + + def test_add(): e = ev.create("test") e.add("mpileaks") @@ -128,6 +221,7 @@ def test_change_match_spec(): e = ev.read("test") with e: + add("mpileaks@2.1") add("mpileaks@2.2") @@ -688,7 +782,7 @@ def test_force_remove_included_env(): rm_output = env("remove", "-f", "-y", "test") list_output = env("list") - assert '"test" is being used by environment "combined_env"' in rm_output + assert "'test' is used by environment 'combined_env'" in rm_output assert "test" not in list_output @@ -4239,13 +4333,13 @@ def test_spack_package_ids_variable(tmpdir, mock_packages): # Include in Makefile and create target that depend on SPACK_PACKAGE_IDS with open(makefile_path, "w") as f: f.write( - r""" + """ all: post-install include include.mk example/post-install/%: example/install/% - $(info post-install: $(HASH)) # noqa: W191,E101 +\t$(info post-install: $(HASH)) # noqa: W191,E101 post-install: $(addprefix example/post-install/,$(example/SPACK_PACKAGE_IDS)) """ diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index d8c58143c97a34..8946bf1dcc888d 100644 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1023,7 +1023,7 @@ _spack_env() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="activate deactivate create remove rm rename mv list ls status st loads view update revert depfile" + SPACK_COMPREPLY="activate deactivate create remove rm rename mv list ls status st loads view update revert depfile track untrack" fi } @@ -1141,6 +1141,24 @@ _spack_env_depfile() { fi } +_spack_env_track() { + if $list_options + then + SPACK_COMPREPLY="-h --help -n --name -y --yes-to-all" + else + SPACK_COMPREPLY="" + fi +} + +_spack_env_untrack() { + if $list_options + then + SPACK_COMPREPLY="-h --help -f --force -y --yes-to-all" + else + _environments + fi +} + _spack_extensions() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index afea0b1a57af6c..17b7cd42e46f34 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1488,6 +1488,8 @@ complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a view -d 'manag complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a update -d 'update the environment manifest to the latest schema format' complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a revert -d 'restore the environment manifest to its previous format' complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a depfile -d 'generate a depfile to exploit parallel builds across specs' +complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a track -d 'track an environment from a directory in Spack' +complete -c spack -n '__fish_spack_using_command_pos 0 env' -f -a untrack -d 'track an environment from a directory in Spack' complete -c spack -n '__fish_spack_using_command env' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command env' -s h -l help -d 'show this help message and exit' @@ -1669,6 +1671,26 @@ complete -c spack -n '__fish_spack_using_command env depfile' -s o -l output -r complete -c spack -n '__fish_spack_using_command env depfile' -s G -l generator -r -f -a make complete -c spack -n '__fish_spack_using_command env depfile' -s G -l generator -r -d 'specify the depfile type (only supports `make`)' +# spack env track +set -g __fish_spack_optspecs_spack_env_track h/help n/name= y/yes-to-all +complete -c spack -n '__fish_spack_using_command_pos 0 env track' -f -a '(__fish_spack_environments)' +complete -c spack -n '__fish_spack_using_command env track' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command env track' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command env track' -s n -l name -r -f -a name +complete -c spack -n '__fish_spack_using_command env track' -s n -l name -r -d 'custom environment name' +complete -c spack -n '__fish_spack_using_command env track' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command env track' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' + +# spack env untrack +set -g __fish_spack_optspecs_spack_env_untrack h/help f/force y/yes-to-all +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 env untrack' -f -a '(__fish_spack_environments)' +complete -c spack -n '__fish_spack_using_command env untrack' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command env untrack' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command env untrack' -s f -l force -f -a force +complete -c spack -n '__fish_spack_using_command env untrack' -s f -l force -d 'force unlink even when environment is active' +complete -c spack -n '__fish_spack_using_command env untrack' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command env untrack' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' + # spack extensions set -g __fish_spack_optspecs_spack_extensions h/help l/long L/very-long d/deps p/paths s/show= complete -c spack -n '__fish_spack_using_command_pos_remainder 0 extensions' -f -a '(__fish_spack_extensions)' From eb256476d2b367ac02a0957f1de922e5a5564ac2 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Fri, 8 Nov 2024 17:07:50 +0100 Subject: [PATCH 117/208] pika: add 0.30.0 (#47498) --- var/spack/repos/builtin/packages/pika/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index f5fe4b9727acfb..8a9b901a5c7fb8 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -19,6 +19,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): license("BSL-1.0") + version("0.30.0", sha256="1798bf7de2505bc707bf95716fda8de5630b2e2ae54a6c4ef59f9931394d31cc") version("0.29.0", sha256="2c61079f52f3e135a8d0845a993e6e4fb64031fbee9b5cef0ead57efb6175e3c") version("0.28.0", sha256="a64ebac04135c0c8d392ddcd8d683fe02e2c0782abfe130754244d58f27ae6cf") version("0.27.0", sha256="4a58dc4014edc2074399e4a6ecfa244537c89ce1319b3e14ff3dfe617fb9f9e8") From 4778d2d332d36c3db0054746d75531d6e357effb Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 8 Nov 2024 17:51:58 +0100 Subject: [PATCH 118/208] Add missing imports (#47496) --- lib/spack/spack/test/build_environment.py | 2 ++ lib/spack/spack/test/concretize_requirements.py | 2 ++ lib/spack/spack/test/flag_mixing.py | 1 + 3 files changed, 5 insertions(+) diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index 281d79f8563e7c..dd42d5cb458306 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -15,6 +15,8 @@ from llnl.util.filesystem import HeaderList, LibraryList import spack.build_environment +import spack.compiler +import spack.compilers import spack.config import spack.deptypes as dt import spack.package_base diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py index be66b2b0a82588..9e703be51fe08f 100644 --- a/lib/spack/spack/test/concretize_requirements.py +++ b/lib/spack/spack/test/concretize_requirements.py @@ -10,8 +10,10 @@ import spack.config import spack.error import spack.package_base +import spack.paths import spack.repo import spack.solver.asp +import spack.store import spack.util.spack_yaml as syaml import spack.version from spack.installer import PackageInstaller diff --git a/lib/spack/spack/test/flag_mixing.py b/lib/spack/spack/test/flag_mixing.py index 6009ade058e5bd..878c0c71efc85e 100644 --- a/lib/spack/spack/test/flag_mixing.py +++ b/lib/spack/spack/test/flag_mixing.py @@ -8,6 +8,7 @@ import spack.config import spack.environment as ev +import spack.paths import spack.repo import spack.util.spack_yaml as syaml from spack.spec import Spec From 907a37145f9434d36b238ef8f435f157a7fb129b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 8 Nov 2024 22:55:53 +0100 Subject: [PATCH 119/208] llnl.util.filesystem: multiple entrypoints and max_depth (#47495) If a package `foo` doesn't implement `libs`, the default was to search recursively for `libfoo` whenever asking for `spec[foo].libs` (this also happens automatically if a package includes `foo` as a link dependency). This can lead to some strange behavior: 1. A package that is normally used as a build dependency (e.g. `cmake` at one point) is referenced like `depends_on(cmake)` which leads to a fully-recursive search for `libcmake` (this can take "forever" when CMake is registered as an external with a prefix like `/usr`, particularly on NFS mounts). 2. A similar hang can occur if a package is registered as an external with an incorrect prefix - [x] Update the default library search to stop after a maximum depth (by default, search the root prefix and each directory in it, but no lower). - [x] The following is a list of known changes to `find` compared to `develop`: 1. Matching directories are no longer returned -- `find` consistently only finds non-dirs, even at `max_depth` 2. Symlinked directories are followed (needed to support max_depth) 3. `find(..., "dir/*.txt")` is allowed, for finding files inside certain dirs. These "complex" patterns are delegated to `glob`, like they are on `develop`. 4. `root` and `files` arguments both support generic sequences, and `root` allows both `str` and `path` types. This allows us to specify multiple entry points to `find`. --------- Co-authored-by: Peter Scheibel --- lib/spack/llnl/util/filesystem.py | 265 +++++++++++++----- lib/spack/llnl/util/lang.py | 17 +- lib/spack/spack/test/llnl/util/file_list.py | 32 +-- lib/spack/spack/test/llnl/util/filesystem.py | 228 ++++++++++++++- lib/spack/spack/test/llnl/util/lang.py | 16 ++ lib/spack/spack/test/test_suite.py | 8 +- .../packages/attributes-foo/package.py | 4 +- 7 files changed, 457 insertions(+), 113 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 00bb270151908c..83cbe45104377b 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -20,11 +20,23 @@ import tempfile from contextlib import contextmanager from itertools import accumulate -from typing import Callable, Iterable, List, Match, Optional, Tuple, Union +from typing import ( + Callable, + Deque, + Dict, + Iterable, + List, + Match, + Optional, + Sequence, + Set, + Tuple, + Union, +) import llnl.util.symlink from llnl.util import tty -from llnl.util.lang import dedupe, memoized +from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink from ..path import path_to_os_path, system_path_filter @@ -85,6 +97,8 @@ "visit_directory_tree", ] +Path = Union[str, pathlib.Path] + if sys.version_info < (3, 7, 4): # monkeypatch shutil.copystat to fix PermissionError when copying read-only # files on Lustre when using Python < 3.7.4 @@ -1673,105 +1687,199 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() -def find(root, files, recursive=True): - """Search for ``files`` starting from the ``root`` directory. - - Like GNU/BSD find but written entirely in Python. - - Examples: - - .. code-block:: console - - $ find /usr -name python - - is equivalent to: - - >>> find('/usr', 'python') - - .. code-block:: console - - $ find /usr/local/bin -maxdepth 1 -name python - - is equivalent to: - - >>> find('/usr/local/bin', 'python', recursive=False) +def find( + root: Union[Path, Sequence[Path]], + files: Union[str, Sequence[str]], + recursive: bool = True, + max_depth: Optional[int] = None, +) -> List[str]: + """Finds all non-directory files matching the patterns from ``files`` starting from ``root``. + This function returns a deterministic result for the same input and directory structure when + run multiple times. Symlinked directories are followed, and unique directories are searched + only once. Each matching file is returned only once at lowest depth in case multiple paths + exist due to symlinked directories. Accepts any glob characters accepted by fnmatch: ========== ==================================== Pattern Meaning ========== ==================================== - ``*`` matches everything + ``*`` matches one or more characters ``?`` matches any single character ``[seq]`` matches any character in ``seq`` ``[!seq]`` matches any character not in ``seq`` ========== ==================================== + Examples: + + >>> find("/usr", "*.txt", recursive=True, max_depth=2) + + finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to + depth 2. + + >>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True) + + finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and + ``/var`` at any depth. + + >>> find("/usr", "GL/*.h", recursive=True) + + finds all header files in a directory GL at any depth in the directory ``/usr``. + Parameters: - root (str): The root directory to start searching from - files (str or collections.abc.Sequence): Library name(s) to search for - recursive (bool): if False search only root folder, - if True descends top-down from the root. Defaults to True. + root: One or more root directories to start searching from + files: One or more filename patterns to search for + recursive: if False search only root, if True descends from roots. Defaults to True. + max_depth: if set, don't search below this depth. Cannot be set if recursive is False - Returns: - list: The files that have been found + Returns a list of absolute, matching file paths. """ + if isinstance(root, (str, pathlib.Path)): + root = [root] + elif not isinstance(root, collections.abc.Sequence): + raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']") + if isinstance(files, str): files = [files] + elif not isinstance(files, collections.abc.Sequence): + raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']") - if recursive: - tty.debug(f"Find (recursive): {root} {str(files)}") - result = _find_recursive(root, files) - else: - tty.debug(f"Find (not recursive): {root} {str(files)}") - result = _find_non_recursive(root, files) + # If recursive is false, max_depth can only be None or 0 + if max_depth and not recursive: + raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") - tty.debug(f"Find complete: {root} {str(files)}") + tty.debug(f"Find (max depth = {max_depth}): {root} {files}") + if not recursive: + max_depth = 0 + elif max_depth is None: + max_depth = sys.maxsize + result = _find_max_depth(root, files, max_depth) + tty.debug(f"Find complete: {root} {files}") return result -@system_path_filter -def _find_recursive(root, search_files): - # The variable here is **on purpose** a defaultdict. The idea is that - # we want to poke the filesystem as little as possible, but still maintain - # stability in the order of the answer. Thus we are recording each library - # found in a key, and reconstructing the stable order later. - found_files = collections.defaultdict(list) - - # Make the path absolute to have os.walk also return an absolute path - root = os.path.abspath(root) - for path, _, list_files in os.walk(root): - for search_file in search_files: - matches = glob.glob(os.path.join(path, search_file)) - matches = [os.path.join(path, x) for x in matches] - found_files[search_file].extend(matches) +def _log_file_access_issue(e: OSError, path: str) -> None: + errno_name = errno.errorcode.get(e.errno, "UNKNOWN") + tty.debug(f"find must skip {path}: {errno_name} {e}") - answer = [] - for search_file in search_files: - answer.extend(found_files[search_file]) - return answer +def _file_id(s: os.stat_result) -> Tuple[int, int]: + # Note: on windows, st_ino is the file index and st_dev is the volume serial number. See + # https://github.com/python/cpython/blob/3.9/Python/fileutils.c + return (s.st_ino, s.st_dev) -@system_path_filter -def _find_non_recursive(root, search_files): - # The variable here is **on purpose** a defaultdict as os.list_dir - # can return files in any order (does not preserve stability) - found_files = collections.defaultdict(list) +def _dedupe_files(paths: List[str]) -> List[str]: + """Deduplicate files by inode and device, dropping files that cannot be accessed.""" + unique_files: List[str] = [] + # tuple of (inode, device) for each file without following symlinks + visited: Set[Tuple[int, int]] = set() + for path in paths: + try: + stat_info = os.lstat(path) + except OSError as e: + _log_file_access_issue(e, path) + continue + file_id = _file_id(stat_info) + if file_id not in visited: + unique_files.append(path) + visited.add(file_id) + return unique_files + + +def _find_max_depth( + roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize +) -> List[str]: + """See ``find`` for the public API.""" + # We optimize for the common case of simple filename only patterns: a single, combined regex + # is used. For complex patterns that include path components, we use a slower glob call from + # every directory we visit within max_depth. + filename_only_patterns = { + f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x + } + complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x} + regex = re.compile(fnmatch_translate_multiple(filename_only_patterns)) + # Ordered dictionary that keeps track of what pattern found which files + matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)} + # Ensure returned paths are always absolute + roots = [os.path.abspath(r) for r in roots] + # Breadth-first search queue. Each element is a tuple of (depth, dir) + dir_queue: Deque[Tuple[int, str]] = collections.deque() + # Set of visited directories. Each element is a tuple of (inode, device) + visited_dirs: Set[Tuple[int, int]] = set() + + for root in roots: + try: + stat_root = os.stat(root) + except OSError as e: + _log_file_access_issue(e, root) + continue + dir_id = _file_id(stat_root) + if dir_id not in visited_dirs: + dir_queue.appendleft((0, root)) + visited_dirs.add(dir_id) - # Make the path absolute to have absolute path returned - root = os.path.abspath(root) + while dir_queue: + depth, curr_dir = dir_queue.pop() + try: + dir_iter = os.scandir(curr_dir) + except OSError as e: + _log_file_access_issue(e, curr_dir) + continue - for search_file in search_files: - matches = glob.glob(os.path.join(root, search_file)) - matches = [os.path.join(root, x) for x in matches] - found_files[search_file].extend(matches) + # Use glob.glob for complex patterns. + for pattern_name, pattern in complex_patterns.items(): + matched_paths[pattern_name].extend( + path + for path in glob.glob(os.path.join(curr_dir, pattern)) + if not os.path.isdir(path) + ) - answer = [] - for search_file in search_files: - answer.extend(found_files[search_file]) + with dir_iter: + ordered_entries = sorted(dir_iter, key=lambda x: x.name) + for dir_entry in ordered_entries: + try: + it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) + except OSError as e: + # Possible permission issue, or a symlink that cannot be resolved (ELOOP). + _log_file_access_issue(e, dir_entry.path) + continue - return answer + if it_is_a_dir: + if depth >= max_depth: + continue + try: + # The stat should be performed in a try/except block. We repeat that here + # vs. moving to the above block because we only want to call `stat` if we + # haven't exceeded our max_depth + if sys.platform == "win32": + # Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we + # have to call os.stat + stat_info = os.stat(dir_entry.path, follow_symlinks=True) + else: + stat_info = dir_entry.stat(follow_symlinks=True) + except OSError as e: + _log_file_access_issue(e, dir_entry.path) + continue + + dir_id = _file_id(stat_info) + if dir_id not in visited_dirs: + dir_queue.appendleft((depth + 1, dir_entry.path)) + visited_dirs.add(dir_id) + elif filename_only_patterns: + m = regex.match(os.path.normcase(dir_entry.name)) + if not m: + continue + for pattern_name in filename_only_patterns: + if m.group(pattern_name): + matched_paths[pattern_name].append(dir_entry.path) + break + + all_matching_paths = [path for paths in matched_paths.values() for path in paths] + + # we only dedupe files if we have any complex patterns, since only they can match the same file + # multiple times + return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths # Utilities for libraries and headers @@ -2210,7 +2318,9 @@ def find_system_libraries(libraries, shared=True): return libraries_found -def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): +def find_libraries( + libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None +): """Returns an iterable of full paths to libraries found in a root dir. Accepts any glob characters accepted by fnmatch: @@ -2231,6 +2341,8 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): otherwise for static. Defaults to True. recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. + max_depth (int): if set, don't search below this depth. Cannot be set + if recursive is False runtime (bool): Windows only option, no-op elsewhere. If true, search for runtime shared libs (.DLL), otherwise, search for .Lib files. If shared is false, this has no meaning. @@ -2239,6 +2351,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): Returns: LibraryList: The libraries that have been found """ + if isinstance(libraries, str): libraries = [libraries] elif not isinstance(libraries, collections.abc.Sequence): @@ -2271,8 +2384,10 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes] if not recursive: + if max_depth: + raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") # If not recursive, look for the libraries directly in root - return LibraryList(find(root, libraries, False)) + return LibraryList(find(root, libraries, recursive=False)) # To speedup the search for external packages configured e.g. in /usr, # perform first non-recursive search in root/lib then in root/lib64 and @@ -2290,7 +2405,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): if found_libs: break else: - found_libs = find(root, libraries, True) + found_libs = find(root, libraries, recursive=True, max_depth=max_depth) return LibraryList(found_libs) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index f43773346a948d..4913a50fad930e 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -5,15 +5,17 @@ import collections.abc import contextlib +import fnmatch import functools import itertools import os import re import sys import traceback +import typing import warnings from datetime import datetime, timedelta -from typing import Callable, Iterable, List, Tuple, TypeVar +from typing import Callable, Dict, Iterable, List, Tuple, TypeVar # Ignore emacs backups when listing modules ignore_modules = r"^\.#|~$" @@ -859,6 +861,19 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]: return line_list +if sys.version_info >= (3, 9): + PatternStr = re.Pattern[str] +else: + PatternStr = typing.Pattern[str] + + +def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str: + """Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern + names, and values are filename patterns. The output is a regex that matches any of the + patterns in order, and named capture groups are used to identify which pattern matched.""" + return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items()) + + @contextlib.contextmanager def nullcontext(*args, **kwargs): """Empty context manager. diff --git a/lib/spack/spack/test/llnl/util/file_list.py b/lib/spack/spack/test/llnl/util/file_list.py index 75ba3ae89d9aca..e2ff5a82109510 100644 --- a/lib/spack/spack/test/llnl/util/file_list.py +++ b/lib/spack/spack/test/llnl/util/file_list.py @@ -9,7 +9,7 @@ import pytest -from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries +from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries import spack.paths @@ -324,33 +324,3 @@ def test_searching_order(search_fn, search_list, root, kwargs): # List should be empty here assert len(rlist) == 0 - - -@pytest.mark.parametrize( - "root,search_list,kwargs,expected", - [ - ( - search_dir, - "*/*bar.tx?", - {"recursive": False}, - [ - os.path.join(search_dir, os.path.join("a", "foobar.txt")), - os.path.join(search_dir, os.path.join("b", "bar.txp")), - os.path.join(search_dir, os.path.join("c", "bar.txt")), - ], - ), - ( - search_dir, - "*/*bar.tx?", - {"recursive": True}, - [ - os.path.join(search_dir, os.path.join("a", "foobar.txt")), - os.path.join(search_dir, os.path.join("b", "bar.txp")), - os.path.join(search_dir, os.path.join("c", "bar.txt")), - ], - ), - ], -) -def test_find_with_globbing(root, search_list, kwargs, expected): - matches = find(root, search_list, **kwargs) - assert sorted(matches) == sorted(expected) diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index a0c98747698b20..fd801295f4c26b 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -6,6 +6,7 @@ """Tests for ``llnl/util/filesystem.py``""" import filecmp import os +import pathlib import shutil import stat import sys @@ -14,7 +15,8 @@ import pytest import llnl.util.filesystem as fs -from llnl.util.symlink import islink, readlink, symlink +import llnl.util.symlink +from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink import spack.paths @@ -1035,3 +1037,227 @@ def test_windows_sfn(tmpdir): assert "d\\LONGER~1" in fs.windows_sfn(d) assert "d\\LONGER~2" in fs.windows_sfn(e) shutil.rmtree(tmpdir.join("d")) + + +@pytest.fixture +def dir_structure_with_things_to_find(tmpdir): + """ + / + dir_one/ + file_one + dir_two/ + dir_three/ + dir_four/ + file_two + file_three + file_four + """ + dir_one = tmpdir.join("dir_one").ensure(dir=True) + tmpdir.join("dir_two").ensure(dir=True) + dir_three = tmpdir.join("dir_three").ensure(dir=True) + dir_four = dir_three.join("dir_four").ensure(dir=True) + + locations = {} + locations["file_one"] = str(dir_one.join("file_one").ensure()) + locations["file_two"] = str(dir_four.join("file_two").ensure()) + locations["file_three"] = str(dir_three.join("file_three").ensure()) + locations["file_four"] = str(tmpdir.join("file_four").ensure()) + + return str(tmpdir), locations + + +def test_find_path_glob_matches(dir_structure_with_things_to_find): + root, locations = dir_structure_with_things_to_find + # both file name and path match + assert ( + fs.find(root, "file_two") + == fs.find(root, "*/*/file_two") + == fs.find(root, "dir_t*/*/*two") + == [locations["file_two"]] + ) + # ensure that * does not match directory separators + assert fs.find(root, "dir*file_two") == [] + # ensure that file name matches after / are matched from the start of the file name + assert fs.find(root, "*/ile_two") == [] + # file name matches exist, but not with these paths + assert fs.find(root, "dir_one/*/*two") == fs.find(root, "*/*/*/*/file_two") == [] + + +def test_find_max_depth(dir_structure_with_things_to_find): + root, locations = dir_structure_with_things_to_find + + # Make sure the paths we use to verify are absolute + assert os.path.isabs(locations["file_one"]) + + assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]} + assert set(fs.find(root, "file_*", max_depth=1)) == { + locations["file_one"], + locations["file_three"], + locations["file_four"], + } + assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]} + assert not set(fs.find(root, "file_two", max_depth=1)) + assert set(fs.find(root, "file_two")) == {locations["file_two"]} + assert set(fs.find(root, "file_*")) == set(locations.values()) + + +def test_find_max_depth_relative(dir_structure_with_things_to_find): + """find_max_depth should return absolute paths even if the provided path is relative.""" + root, locations = dir_structure_with_things_to_find + with fs.working_dir(root): + assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]} + assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]} + + +@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)]) +def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth): + root = str(tmpdir) + error_str = "cannot be set if recursive is False" + with pytest.raises(ValueError, match=error_str): + fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth) + + with pytest.raises(ValueError, match=error_str): + fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth) + + +@pytest.fixture(params=[True, False]) +def complex_dir_structure(request, tmpdir): + """ + "lx-dy" means "level x, directory y" + "lx-fy" means "level x, file y" + "lx-sy" means "level x, symlink y" + + / + l1-d1/ + l2-d1/ + l3-s1 -> l1-d2 # points to directory above l2-d1 + l3-d2/ + l4-f1 + l3-s3 -> l1-d1 # cyclic link + l3-d4/ + l4-f2 + l1-d2/ + l2-f1 + l2-d2/ + l3-f3 + l2-s3 -> l2-d2 + l1-s3 -> l3-d4 # a link that "skips" a directory level + l1-s4 -> l2-s3 # a link to a link to a dir + """ + use_junctions = request.param + if sys.platform == "win32" and not use_junctions and not _windows_can_symlink(): + pytest.skip("This Windows instance is not configured with symlink support") + elif sys.platform != "win32" and use_junctions: + pytest.skip("Junctions are a Windows-only feature") + + l1_d1 = tmpdir.join("l1-d1").ensure(dir=True) + l2_d1 = l1_d1.join("l2-d1").ensure(dir=True) + l3_d2 = l2_d1.join("l3-d2").ensure(dir=True) + l3_d4 = l2_d1.join("l3-d4").ensure(dir=True) + l1_d2 = tmpdir.join("l1-d2").ensure(dir=True) + l2_d2 = l1_d2.join("l1-d2").ensure(dir=True) + + if use_junctions: + link_fn = llnl.util.symlink._windows_create_junction + else: + link_fn = os.symlink + + link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1") + link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3") + link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3") + l2_s3 = pathlib.Path(l1_d2) / "l2-s3" + link_fn(l2_d2, l2_s3) + link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4") + + locations = { + "l4-f1": str(l3_d2.join("l4-f1").ensure()), + "l4-f2-full": str(l3_d4.join("l4-f2").ensure()), + "l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"), + "l2-f1": str(l1_d2.join("l2-f1").ensure()), + "l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"), + "l3-f3-full": str(l2_d2.join("l3-f3").ensure()), + "l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"), + } + + return str(tmpdir), locations + + +def test_find_max_depth_symlinks(complex_dir_structure): + root, locations = complex_dir_structure + root = pathlib.Path(root) + assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]} + assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]} + assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]} + # File is accessible via symlink and subdir, the link path will be + # searched first, and the directory will not be searched again when + # it is encountered the second time (via not-link) in the traversal + assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]} + # File is accessible only via the dir, so the full file path should + # be reported + assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]} + # Check following links to links + assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]} + + +def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure): + root, locations = complex_dir_structure + + fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1") + snd = str(pathlib.Path(root) / "l1-d2") + nonexistent = str(pathlib.Path(root) / "nonexistent") + + assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == { + locations["l2-f1"], + locations["l4-f1"], + locations["l4-f2-full"], + locations["l3-f3-full"], + } + + +def test_multiple_patterns(complex_dir_structure): + root, _ = complex_dir_structure + paths = fs.find(root, ["l2-f1", "l*-d*/l3-f3", "*", "*/*"]) + # There shouldn't be duplicate results with multiple, overlapping patterns + assert len(set(paths)) == len(paths) + # All files should be found + filenames = [os.path.basename(p) for p in paths] + assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"} + # They are ordered by first matching pattern (this is a bit of an implementation detail, + # and we could decide to change the exact order in the future) + assert filenames[0] == "l2-f1" + assert filenames[1] == "l3-f3" + + +def test_find_input_types(tmp_path: pathlib.Path): + """test that find only accepts sequences and instances of pathlib.Path and str for root, and + only sequences and instances of str for patterns. In principle mypy catches these issues, but + it is not enabled on all call-sites.""" + (tmp_path / "file.txt").write_text("") + assert ( + fs.find(tmp_path, "file.txt") + == fs.find(str(tmp_path), "file.txt") + == fs.find([tmp_path, str(tmp_path)], "file.txt") + == fs.find((tmp_path, str(tmp_path)), "file.txt") + == fs.find(tmp_path, "file.txt") + == fs.find(tmp_path, ["file.txt"]) + == fs.find(tmp_path, ("file.txt",)) + == [str(tmp_path / "file.txt")] + ) + + with pytest.raises(TypeError): + fs.find(tmp_path, pathlib.Path("file.txt")) # type: ignore + + with pytest.raises(TypeError): + fs.find(1, "file.txt") # type: ignore + + +def test_find_only_finds_files(tmp_path: pathlib.Path): + """ensure that find only returns files even at max_depth""" + (tmp_path / "subdir").mkdir() + (tmp_path / "subdir" / "dir").mkdir() + (tmp_path / "subdir" / "file.txt").write_text("") + assert ( + fs.find(tmp_path, "*", max_depth=1) + == fs.find(tmp_path, "*/*", max_depth=1) + == [str(tmp_path / "subdir" / "file.txt")] + ) diff --git a/lib/spack/spack/test/llnl/util/lang.py b/lib/spack/spack/test/llnl/util/lang.py index 52dcf3950a452b..6926c50cd89e90 100644 --- a/lib/spack/spack/test/llnl/util/lang.py +++ b/lib/spack/spack/test/llnl/util/lang.py @@ -373,3 +373,19 @@ class _SomeClass: _SomeClass.deprecated.error_lvl = 2 with pytest.raises(AttributeError): _ = s.deprecated + + +def test_fnmatch_multiple(): + named_patterns = {"a": "libf*o.so", "b": "libb*r.so"} + regex = re.compile(llnl.util.lang.fnmatch_translate_multiple(named_patterns)) + + a = regex.match("libfoo.so") + assert a and a.group("a") == "libfoo.so" + + b = regex.match("libbar.so") + assert b and b.group("b") == "libbar.so" + + assert not regex.match("libfoo.so.1") + assert not regex.match("libbar.so.1") + assert not regex.match("libfoo.solibbar.so") + assert not regex.match("libbaz.so") diff --git a/lib/spack/spack/test/test_suite.py b/lib/spack/spack/test/test_suite.py index 60a54e7171bba2..3ed4e30d42c740 100644 --- a/lib/spack/spack/test/test_suite.py +++ b/lib/spack/spack/test/test_suite.py @@ -501,18 +501,20 @@ def test_find_required_file(tmpdir): # First just find a single path results = spack.install_test.find_required_file( - tmpdir.join("c"), filename, expected=1, recursive=True + str(tmpdir.join("c")), filename, expected=1, recursive=True ) assert isinstance(results, str) # Ensure none file if do not recursively search that directory with pytest.raises(spack.install_test.SkipTest, match="Expected 1"): spack.install_test.find_required_file( - tmpdir.join("c"), filename, expected=1, recursive=False + str(tmpdir.join("c")), filename, expected=1, recursive=False ) # Now make sure we get all of the files - results = spack.install_test.find_required_file(tmpdir, filename, expected=3, recursive=True) + results = spack.install_test.find_required_file( + str(tmpdir), filename, expected=3, recursive=True + ) assert isinstance(results, list) and len(results) == 3 diff --git a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py index 31c88f4b08564a..b882fc9b6595b6 100644 --- a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py +++ b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py @@ -44,7 +44,7 @@ def libs(self): # Header provided by the bar virutal package @property def bar_headers(self): - return find_headers("bar/bar", root=self.home.include, recursive=False) + return find_headers("bar", root=self.home.include, recursive=True) # Libary provided by the bar virtual package @property @@ -59,7 +59,7 @@ def baz_home(self): # Header provided by the baz virtual package @property def baz_headers(self): - return find_headers("baz/baz", root=self.baz_home.include, recursive=False) + return find_headers("baz", root=self.baz_home.include, recursive=True) # Library provided by the baz virtual package @property From 4322cf56b15a0a088156f5702d44a04e8238b4b3 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:12:50 -0800 Subject: [PATCH 120/208] upcxx %oneapi@2025: cxxflags add -Wno-error=missing-template-arg-list-after-template-kw (#47503) --- var/spack/repos/builtin/packages/upcxx/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/upcxx/package.py b/var/spack/repos/builtin/packages/upcxx/package.py index 91971025683297..4e23f42431a014 100644 --- a/var/spack/repos/builtin/packages/upcxx/package.py +++ b/var/spack/repos/builtin/packages/upcxx/package.py @@ -164,7 +164,11 @@ class Upcxx(Package, CudaPackage, ROCmPackage): depends_on("oneapi-level-zero@1.8.0:", when="+level_zero") # All flags should be passed to the build-env in autoconf-like vars - flag_handler = env_flags + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("%oneapi@2025:"): + flags.append("-Wno-error=missing-template-arg-list-after-template-kw") + return (flags, None, None) def set_variables(self, env): env.set("UPCXX_INSTALL", self.prefix) From c6997e11a74e5dedbeabf93ea1df3f8d2a4601e8 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 9 Nov 2024 01:25:02 +0100 Subject: [PATCH 121/208] `spack.compiler`/`spack.util.libc`: add caching (#47213) * spack.compiler: cache output * compute libc from the dynamic linker at most once per spack process * wrap compiler cache entry in class, add type hints * test compiler caching * ensure tests do not populate user cache, and fix 2 tests * avoid recursion: cache lookup -> compute key -> cflags -> real_version -> cache lookup * allow compiler execution in test that depends on get_real_version --- lib/spack/docs/conf.py | 1 + lib/spack/spack/compiler.py | 154 +++++++++++++++++++---- lib/spack/spack/compilers/aocc.py | 2 +- lib/spack/spack/test/compilers/basics.py | 80 ++++++++++-- lib/spack/spack/test/concretize.py | 1 + lib/spack/spack/test/conftest.py | 16 ++- lib/spack/spack/util/libc.py | 10 ++ 7 files changed, 227 insertions(+), 37 deletions(-) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 18495d4bca51f3..4d8592ffd930da 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -221,6 +221,7 @@ def setup(sphinx): ("py:class", "spack.filesystem_view.SimpleFilesystemView"), ("py:class", "spack.traverse.EdgeAndDepth"), ("py:class", "archspec.cpu.microarchitecture.Microarchitecture"), + ("py:class", "spack.compiler.CompilerCache"), # TypeVar that is not handled correctly ("py:class", "llnl.util.lang.T"), ] diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 31067a14d9fb59..98c0c22f0ae7f3 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -4,20 +4,23 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import contextlib +import hashlib import itertools +import json import os import platform import re import shutil import sys import tempfile -from typing import List, Optional, Sequence +from typing import Dict, List, Optional, Sequence import llnl.path import llnl.util.lang import llnl.util.tty as tty from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs +import spack.caches import spack.error import spack.schema.environment import spack.spec @@ -34,7 +37,7 @@ @llnl.util.lang.memoized -def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()): +def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -> str: """Invokes the compiler at a given path passing a single version argument and returns the output. @@ -57,7 +60,7 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()): return output -def get_compiler_version_output(compiler_path, *args, **kwargs): +def get_compiler_version_output(compiler_path, *args, **kwargs) -> str: """Wrapper for _get_compiler_version_output().""" # This ensures that we memoize compiler output by *absolute path*, # not just executable name. If we don't do this, and the path changes @@ -290,6 +293,7 @@ def __init__( self.environment = environment or {} self.extra_rpaths = extra_rpaths or [] self.enable_implicit_rpaths = enable_implicit_rpaths + self.cache = COMPILER_CACHE self.cc = paths[0] self.cxx = paths[1] @@ -390,15 +394,11 @@ def real_version(self): E.g. C++11 flag checks. """ - if not self._real_version: - try: - real_version = spack.version.Version(self.get_real_version()) - if real_version == spack.version.Version("unknown"): - return self.version - self._real_version = real_version - except spack.util.executable.ProcessError: - self._real_version = self.version - return self._real_version + real_version_str = self.cache.get(self).real_version + if not real_version_str or real_version_str == "unknown": + return self.version + + return spack.version.StandardVersion.from_string(real_version_str) def implicit_rpaths(self) -> List[str]: if self.enable_implicit_rpaths is False: @@ -445,9 +445,7 @@ def required_libs(self): @property def compiler_verbose_output(self) -> Optional[str]: """Verbose output from compiling a dummy C source file. Output is cached.""" - if not hasattr(self, "_compile_c_source_output"): - self._compile_c_source_output = self._compile_dummy_c_source() - return self._compile_c_source_output + return self.cache.get(self).c_compiler_output def _compile_dummy_c_source(self) -> Optional[str]: cc = self.cc if self.cc else self.cxx @@ -559,7 +557,7 @@ def fc_pic_flag(self): # Note: This is not a class method. The class methods are used to detect # compilers on PATH based systems, and do not set up the run environment of # the compiler. This method can be called on `module` based systems as well - def get_real_version(self): + def get_real_version(self) -> str: """Query the compiler for its version. This is the "real" compiler version, regardless of what is in the @@ -569,14 +567,17 @@ def get_real_version(self): modifications) to enable the compiler to run properly on any platform. """ cc = spack.util.executable.Executable(self.cc) - with self.compiler_environment(): - output = cc( - self.version_argument, - output=str, - error=str, - ignore_errors=tuple(self.ignore_version_errors), - ) - return self.extract_version_from_output(output) + try: + with self.compiler_environment(): + output = cc( + self.version_argument, + output=str, + error=str, + ignore_errors=tuple(self.ignore_version_errors), + ) + return self.extract_version_from_output(output) + except spack.util.executable.ProcessError: + return "unknown" @property def prefix(self): @@ -603,7 +604,7 @@ def default_version(cls, cc): @classmethod @llnl.util.lang.memoized - def extract_version_from_output(cls, output): + def extract_version_from_output(cls, output: str) -> str: """Extracts the version from compiler's output.""" match = re.search(cls.version_regex, output) return match.group(1) if match else "unknown" @@ -732,3 +733,106 @@ def __init__(self, compiler, feature, flag_name, ver_string=None): ) + " implement the {0} property and submit a pull request or issue.".format(flag_name), ) + + +class CompilerCacheEntry: + """Deserialized cache entry for a compiler""" + + __slots__ = ["c_compiler_output", "real_version"] + + def __init__(self, c_compiler_output: Optional[str], real_version: str): + self.c_compiler_output = c_compiler_output + self.real_version = real_version + + @classmethod + def from_dict(cls, data: Dict[str, Optional[str]]): + if not isinstance(data, dict): + raise ValueError(f"Invalid {cls.__name__} data") + c_compiler_output = data.get("c_compiler_output") + real_version = data.get("real_version") + if not isinstance(real_version, str) or not isinstance( + c_compiler_output, (str, type(None)) + ): + raise ValueError(f"Invalid {cls.__name__} data") + return cls(c_compiler_output, real_version) + + +class CompilerCache: + """Base class for compiler output cache. Default implementation does not cache anything.""" + + def value(self, compiler: Compiler) -> Dict[str, Optional[str]]: + return { + "c_compiler_output": compiler._compile_dummy_c_source(), + "real_version": compiler.get_real_version(), + } + + def get(self, compiler: Compiler) -> CompilerCacheEntry: + return CompilerCacheEntry.from_dict(self.value(compiler)) + + +class FileCompilerCache(CompilerCache): + """Cache for compiler output, which is used to determine implicit link paths, the default libc + version, and the compiler version.""" + + name = os.path.join("compilers", "compilers.json") + + def __init__(self, cache: "spack.caches.FileCacheType") -> None: + self.cache = cache + self.cache.init_entry(self.name) + self._data: Dict[str, Dict[str, Optional[str]]] = {} + + def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]: + try: + return CompilerCacheEntry.from_dict(self._data[key]) + except ValueError: + del self._data[key] + except KeyError: + pass + return None + + def get(self, compiler: Compiler) -> CompilerCacheEntry: + # Cache hit + try: + with self.cache.read_transaction(self.name) as f: + assert f is not None + self._data = json.loads(f.read()) + assert isinstance(self._data, dict) + except (json.JSONDecodeError, AssertionError): + self._data = {} + + key = self._key(compiler) + value = self._get_entry(key) + if value is not None: + return value + + # Cache miss + with self.cache.write_transaction(self.name) as (old, new): + try: + assert old is not None + self._data = json.loads(old.read()) + assert isinstance(self._data, dict) + except (json.JSONDecodeError, AssertionError): + self._data = {} + + # Use cache entry that may have been created by another process in the meantime. + entry = self._get_entry(key) + + # Finally compute the cache entry + if entry is None: + self._data[key] = self.value(compiler) + entry = CompilerCacheEntry.from_dict(self._data[key]) + + new.write(json.dumps(self._data, separators=(",", ":"))) + + return entry + + def _key(self, compiler: Compiler) -> str: + as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8") + return hashlib.sha256(as_bytes).hexdigest() + + +def _make_compiler_cache(): + return FileCompilerCache(spack.caches.MISC_CACHE) + + +COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index 7ac861c745733d..920e7d049263c2 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -116,5 +116,5 @@ def fflags(self): def _handle_default_flag_addtions(self): # This is a known issue for AOCC 3.0 see: # https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf - if self.real_version.satisfies(ver("3.0.0")): + if self.version.satisfies(ver("3.0.0")): return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false" diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py index ee31e50f53893e..75e79d497c6c65 100644 --- a/lib/spack/spack/test/compilers/basics.py +++ b/lib/spack/spack/test/compilers/basics.py @@ -3,8 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Test basic behavior of compilers in Spack""" +import json import os from copy import copy +from typing import Optional import pytest @@ -17,6 +19,7 @@ import spack.util.module_cmd from spack.compiler import Compiler from spack.util.executable import Executable, ProcessError +from spack.util.file_cache import FileCache def test_multiple_conflicting_compiler_definitions(mutable_config): @@ -101,11 +104,14 @@ def verbose_flag(self): @pytest.mark.not_on_windows("Not supported on Windows (yet)") -def test_implicit_rpaths(dirs_with_libfiles): +def test_implicit_rpaths(dirs_with_libfiles, monkeypatch): lib_to_dirs, all_dirs = dirs_with_libfiles - compiler = MockCompiler() - compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs) - retrieved_rpaths = compiler.implicit_rpaths() + monkeypatch.setattr( + MockCompiler, + "_compile_dummy_c_source", + lambda self: "ld " + " ".join(f"-L{d}" for d in all_dirs), + ) + retrieved_rpaths = MockCompiler().implicit_rpaths() assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"]) @@ -647,6 +653,7 @@ def test_raising_if_compiler_target_is_over_specific(config): @pytest.mark.not_on_windows("Not supported on Windows (yet)") +@pytest.mark.enable_compiler_execution def test_compiler_get_real_version(working_env, monkeypatch, tmpdir): # Test variables test_version = "2.2.2" @@ -736,6 +743,7 @@ def test_get_compilers(config): ) == [spack.compilers._compiler_from_config_entry(without_suffix)] +@pytest.mark.enable_compiler_execution def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir): # Test variables test_version = "2.2.2" @@ -784,15 +792,13 @@ def _call(*args, **kwargs): compilers = spack.compilers.get_compilers([compiler_dict]) assert len(compilers) == 1 compiler = compilers[0] - try: - _ = compiler.get_real_version() - assert False - except ProcessError: - # Confirm environment does not change after failed call - assert "SPACK_TEST_CMP_ON" not in os.environ + assert compiler.get_real_version() == "unknown" + # Confirm environment does not change after failed call + assert "SPACK_TEST_CMP_ON" not in os.environ @pytest.mark.not_on_windows("Bash scripting unsupported on Windows (for now)") +@pytest.mark.enable_compiler_execution def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir): # Create compiler gcc = str(tmpdir.join("gcc")) @@ -895,3 +901,57 @@ def test_compiler_environment(working_env): ) with compiler.compiler_environment(): assert os.environ["TEST"] == "yes" + + +class MockCompilerWithoutExecutables(MockCompiler): + def __init__(self): + super().__init__() + self._compile_dummy_c_source_count = 0 + self._get_real_version_count = 0 + + def _compile_dummy_c_source(self) -> Optional[str]: + self._compile_dummy_c_source_count += 1 + return "gcc helloworld.c -o helloworld" + + def get_real_version(self) -> str: + self._get_real_version_count += 1 + return "1.0.0" + + +def test_compiler_output_caching(tmp_path): + """Test that compiler output is cached on the filesystem.""" + # The first call should trigger the cache to updated. + a = MockCompilerWithoutExecutables() + cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path))) + assert cache.get(a).c_compiler_output == "gcc helloworld.c -o helloworld" + assert cache.get(a).real_version == "1.0.0" + assert a._compile_dummy_c_source_count == 1 + assert a._get_real_version_count == 1 + + # The second call on an equivalent but distinct object should not trigger compiler calls. + b = MockCompilerWithoutExecutables() + cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path))) + assert cache.get(b).c_compiler_output == "gcc helloworld.c -o helloworld" + assert cache.get(b).real_version == "1.0.0" + assert b._compile_dummy_c_source_count == 0 + assert b._get_real_version_count == 0 + + # Cache schema change should be handled gracefully. + with open(cache.cache.cache_path(cache.name), "w") as f: + for k in cache._data: + cache._data[k] = "corrupted entry" + f.write(json.dumps(cache._data)) + + c = MockCompilerWithoutExecutables() + cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path))) + assert cache.get(c).c_compiler_output == "gcc helloworld.c -o helloworld" + assert cache.get(c).real_version == "1.0.0" + + # Cache corruption should be handled gracefully. + with open(cache.cache.cache_path(cache.name), "w") as f: + f.write("corrupted cache") + + d = MockCompilerWithoutExecutables() + cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path))) + assert cache.get(d).c_compiler_output == "gcc helloworld.c -o helloworld" + assert cache.get(d).real_version == "1.0.0" diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index dd2444df0a7a1f..e33f9761dad728 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -2316,6 +2316,7 @@ def test_compiler_match_constraints_when_selected(self): @pytest.mark.regression("36339") @pytest.mark.not_on_windows("Not supported on Windows") + @pytest.mark.enable_compiler_execution def test_compiler_with_custom_non_numeric_version(self, mock_executable): """Test that, when a compiler has a completely made up version, we can use its 'real version' to detect targets and don't raise during concretization. diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 5f461d9d3588a4..f6670157edf9ff 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -973,12 +973,26 @@ def _return_none(*args): return None +def _compiler_output(self): + return "" + + +def _get_real_version(self): + return str(self.version) + + @pytest.fixture(scope="function", autouse=True) def disable_compiler_execution(monkeypatch, request): """Disable compiler execution to determine implicit link paths and libc flavor and version. To re-enable use `@pytest.mark.enable_compiler_execution`""" if "enable_compiler_execution" not in request.keywords: - monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _return_none) + monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _compiler_output) + monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", _get_real_version) + + +@pytest.fixture(autouse=True) +def disable_compiler_output_cache(monkeypatch): + monkeypatch.setattr(spack.compiler, "COMPILER_CACHE", spack.compiler.CompilerCache()) @pytest.fixture(scope="function") diff --git a/lib/spack/spack/util/libc.py b/lib/spack/spack/util/libc.py index 148c4cb13a529a..d842cd102212d8 100644 --- a/lib/spack/spack/util/libc.py +++ b/lib/spack/spack/util/libc.py @@ -11,6 +11,8 @@ from subprocess import PIPE, run from typing import Dict, List, Optional +from llnl.util.lang import memoized + import spack.spec import spack.util.elf @@ -61,6 +63,14 @@ def default_search_paths_from_dynamic_linker(dynamic_linker: str) -> List[str]: def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]: + maybe_spec = _libc_from_dynamic_linker(dynamic_linker) + if maybe_spec: + return maybe_spec.copy() + return None + + +@memoized +def _libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]: if not os.path.exists(dynamic_linker): return None From da1d533877f90610571b72f070c01e13b9729108 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Fri, 8 Nov 2024 17:07:40 -0800 Subject: [PATCH 122/208] fix patched dependencies across repositories (#42463) Currently, if a package has a dependency from another repository and patches it, generation of the patch cache will fail. Concretization succeeds if a fixed patch cache is in place. - [x] don't assume that patched dependencies are in the same repo when indexing - [x] add some test fixtures to support multi-repo tests. --------- Signed-off-by: Todd Gamblin Co-authored-by: Todd Gamblin --- lib/spack/spack/main.py | 2 +- lib/spack/spack/patch.py | 9 +++------ lib/spack/spack/paths.py | 1 + lib/spack/spack/test/conftest.py | 16 +++++++++++----- lib/spack/spack/test/patch.py | 6 ++++++ var/spack/repos/builtin.mock/README.md | 7 +++++++ .../mock2-package.patch | 11 +++++++++++ .../patch-a-foreign-dependency/package.py | 17 +++++++++++++++++ var/spack/repos/builtin.mock2/README.md | 6 ++++++ .../mock2-patched-dependency/package.py | 15 +++++++++++++++ var/spack/repos/builtin.mock2/repo.yaml | 2 ++ 11 files changed, 80 insertions(+), 12 deletions(-) create mode 100644 var/spack/repos/builtin.mock/README.md create mode 100644 var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch create mode 100644 var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/package.py create mode 100644 var/spack/repos/builtin.mock2/README.md create mode 100644 var/spack/repos/builtin.mock2/packages/mock2-patched-dependency/package.py create mode 100644 var/spack/repos/builtin.mock2/repo.yaml diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 7cab47d77f7bf1..28567f26e81f42 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -547,7 +547,7 @@ def setup_main_options(args): key = syaml.syaml_str("repos") key.override = True spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict( - [(key, [spack.paths.mock_packages_path])] + [(key, [spack.paths.mock_packages_path, spack.paths.mock_packages_path2])] ) spack.repo.PATH = spack.repo.create(spack.config.CONFIG) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index a0f4152317c1e9..d4bc9fb4f46efd 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -530,7 +530,7 @@ def update_package(self, pkg_fullname: str) -> None: # update the index with per-package patch indexes pkg_cls = self.repository.get_pkg_class(pkg_fullname) - partial_index = self._index_patches(pkg_cls, self.repository) + partial_index = self._index_patches(pkg_cls) for sha256, package_to_patch in partial_index.items(): p2p = self.index.setdefault(sha256, {}) p2p.update(package_to_patch) @@ -546,14 +546,11 @@ def update(self, other: "PatchCache") -> None: p2p.update(package_to_patch) @staticmethod - def _index_patches( - pkg_class: Type["spack.package_base.PackageBase"], repository: "spack.repo.RepoPath" - ) -> Dict[Any, Any]: + def _index_patches(pkg_class: Type["spack.package_base.PackageBase"]) -> Dict[Any, Any]: """Patch index for a specific patch. Args: pkg_class: package object to get patches for - repository: repository containing the package Returns: The patch index for that package. @@ -571,7 +568,7 @@ def _index_patches( for dependency in deps_by_name.values(): for patch_list in dependency.patches.values(): for patch in patch_list: - dspec_cls = repository.get_pkg_class(dependency.spec.name) + dspec_cls = spack.repo.PATH.get_pkg_class(dependency.spec.fullname) patch_dict = patch.to_dict() patch_dict.pop("sha256") # save some space index[patch.sha256] = {dspec_cls.fullname: patch_dict} diff --git a/lib/spack/spack/paths.py b/lib/spack/spack/paths.py index 84583cd552f531..aeca3a98996fbf 100644 --- a/lib/spack/spack/paths.py +++ b/lib/spack/spack/paths.py @@ -60,6 +60,7 @@ repos_path = os.path.join(var_path, "repos") packages_path = os.path.join(repos_path, "builtin") mock_packages_path = os.path.join(repos_path, "builtin.mock") +mock_packages_path2 = os.path.join(repos_path, "builtin.mock2") # # Writable things in $spack/var/spack diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index f6670157edf9ff..b66e1edb353e23 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -575,6 +575,11 @@ def mock_repo_path(): yield spack.repo.from_path(spack.paths.mock_packages_path) +@pytest.fixture(scope="session") +def mock_repo_path2(): + yield spack.repo.from_path(spack.paths.mock_packages_path2) + + def _pkg_install_fn(pkg, spec, prefix): # sanity_check_prefix requires something in the install directory mkdirp(prefix.bin) @@ -588,19 +593,20 @@ def mock_pkg_install(monkeypatch): @pytest.fixture(scope="function") -def mock_packages(mock_repo_path, mock_pkg_install, request): - """Use the 'builtin.mock' repository instead of 'builtin'""" +def mock_packages(mock_repo_path, mock_repo_path2, mock_pkg_install, request): + """Use the 'builtin.mock' and 'builtin.mock2' repositories instead of 'builtin'""" ensure_configuration_fixture_run_before(request) - with spack.repo.use_repositories(mock_repo_path) as mock_repo: + with spack.repo.use_repositories(mock_repo_path, mock_repo_path2) as mock_repo: yield mock_repo @pytest.fixture(scope="function") -def mutable_mock_repo(mock_repo_path, request): +def mutable_mock_repo(request): """Function-scoped mock packages, for tests that need to modify them.""" ensure_configuration_fixture_run_before(request) mock_repo = spack.repo.from_path(spack.paths.mock_packages_path) - with spack.repo.use_repositories(mock_repo) as mock_repo_path: + mock_repo2 = spack.repo.from_path(spack.paths.mock_packages_path2) + with spack.repo.use_repositories(mock_repo, mock_repo2) as mock_repo_path: yield mock_repo_path diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py index 4b5f31b904a64b..1088b1f24b1873 100644 --- a/lib/spack/spack/test/patch.py +++ b/lib/spack/spack/test/patch.py @@ -499,3 +499,9 @@ def test_invalid_from_dict(mock_packages, config): } with pytest.raises(spack.fetch_strategy.ChecksumError, match="sha256 checksum failed for"): spack.patch.from_dict(dictionary) + + +@pytest.mark.regression("43097") +def test_cross_repo_patch(mock_packages, config): + cross_repo_patch = Spec("patch-a-foreign-dependency") + cross_repo_patch.concretize() diff --git a/var/spack/repos/builtin.mock/README.md b/var/spack/repos/builtin.mock/README.md new file mode 100644 index 00000000000000..5a5f6e747f5b57 --- /dev/null +++ b/var/spack/repos/builtin.mock/README.md @@ -0,0 +1,7 @@ +# `builtin.mock` + +This repository and the secondary mock repo `builtin.mock2` contain mock packages used +by Spack tests. + +Most tests are in `builtin.mock`, but `builtin.mock2` is used for scenarios where we +need multiple repos for testing. diff --git a/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch b/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch new file mode 100644 index 00000000000000..02bfad9103c7f1 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch @@ -0,0 +1,11 @@ +--- patch-a-dependency/configure 2018-08-13 23:13:51.000000000 -0700 ++++ patch-a-dependency/configure.patched 2018-08-13 23:14:15.000000000 -0700 +@@ -2,7 +2,7 @@ + prefix=$(echo $1 | sed 's/--prefix=//') + cat > Makefile < Date: Sat, 9 Nov 2024 08:50:37 +0100 Subject: [PATCH 123/208] Fix style checks on develop (#47518) `mypy` checks have been accidentally broken by #47213 --- lib/spack/spack/compiler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 98c0c22f0ae7f3..46382a3d983107 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -29,6 +29,7 @@ import spack.util.module_cmd import spack.version from spack.util.environment import filter_system_paths +from spack.util.file_cache import FileCache __all__ = ["Compiler"] @@ -776,7 +777,7 @@ class FileCompilerCache(CompilerCache): name = os.path.join("compilers", "compilers.json") - def __init__(self, cache: "spack.caches.FileCacheType") -> None: + def __init__(self, cache: "FileCache") -> None: self.cache = cache self.cache.init_entry(self.name) self._data: Dict[str, Dict[str, Optional[str]]] = {} From 7fbfb0f6dc962a18efbaefdf199f3a8e3760194a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 9 Nov 2024 01:25:25 -0800 Subject: [PATCH 124/208] Revert "fix patched dependencies across repositories (#42463)" (#47519) This reverts commit da1d533877f90610571b72f070c01e13b9729108. --- lib/spack/spack/main.py | 2 +- lib/spack/spack/patch.py | 9 ++++++--- lib/spack/spack/paths.py | 1 - lib/spack/spack/test/conftest.py | 16 +++++----------- lib/spack/spack/test/patch.py | 6 ------ var/spack/repos/builtin.mock/README.md | 7 ------- .../mock2-package.patch | 11 ----------- .../patch-a-foreign-dependency/package.py | 17 ----------------- var/spack/repos/builtin.mock2/README.md | 6 ------ .../mock2-patched-dependency/package.py | 15 --------------- var/spack/repos/builtin.mock2/repo.yaml | 2 -- 11 files changed, 12 insertions(+), 80 deletions(-) delete mode 100644 var/spack/repos/builtin.mock/README.md delete mode 100644 var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch delete mode 100644 var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/package.py delete mode 100644 var/spack/repos/builtin.mock2/README.md delete mode 100644 var/spack/repos/builtin.mock2/packages/mock2-patched-dependency/package.py delete mode 100644 var/spack/repos/builtin.mock2/repo.yaml diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 28567f26e81f42..7cab47d77f7bf1 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -547,7 +547,7 @@ def setup_main_options(args): key = syaml.syaml_str("repos") key.override = True spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict( - [(key, [spack.paths.mock_packages_path, spack.paths.mock_packages_path2])] + [(key, [spack.paths.mock_packages_path])] ) spack.repo.PATH = spack.repo.create(spack.config.CONFIG) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index d4bc9fb4f46efd..a0f4152317c1e9 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -530,7 +530,7 @@ def update_package(self, pkg_fullname: str) -> None: # update the index with per-package patch indexes pkg_cls = self.repository.get_pkg_class(pkg_fullname) - partial_index = self._index_patches(pkg_cls) + partial_index = self._index_patches(pkg_cls, self.repository) for sha256, package_to_patch in partial_index.items(): p2p = self.index.setdefault(sha256, {}) p2p.update(package_to_patch) @@ -546,11 +546,14 @@ def update(self, other: "PatchCache") -> None: p2p.update(package_to_patch) @staticmethod - def _index_patches(pkg_class: Type["spack.package_base.PackageBase"]) -> Dict[Any, Any]: + def _index_patches( + pkg_class: Type["spack.package_base.PackageBase"], repository: "spack.repo.RepoPath" + ) -> Dict[Any, Any]: """Patch index for a specific patch. Args: pkg_class: package object to get patches for + repository: repository containing the package Returns: The patch index for that package. @@ -568,7 +571,7 @@ def _index_patches(pkg_class: Type["spack.package_base.PackageBase"]) -> Dict[An for dependency in deps_by_name.values(): for patch_list in dependency.patches.values(): for patch in patch_list: - dspec_cls = spack.repo.PATH.get_pkg_class(dependency.spec.fullname) + dspec_cls = repository.get_pkg_class(dependency.spec.name) patch_dict = patch.to_dict() patch_dict.pop("sha256") # save some space index[patch.sha256] = {dspec_cls.fullname: patch_dict} diff --git a/lib/spack/spack/paths.py b/lib/spack/spack/paths.py index aeca3a98996fbf..84583cd552f531 100644 --- a/lib/spack/spack/paths.py +++ b/lib/spack/spack/paths.py @@ -60,7 +60,6 @@ repos_path = os.path.join(var_path, "repos") packages_path = os.path.join(repos_path, "builtin") mock_packages_path = os.path.join(repos_path, "builtin.mock") -mock_packages_path2 = os.path.join(repos_path, "builtin.mock2") # # Writable things in $spack/var/spack diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index b66e1edb353e23..f6670157edf9ff 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -575,11 +575,6 @@ def mock_repo_path(): yield spack.repo.from_path(spack.paths.mock_packages_path) -@pytest.fixture(scope="session") -def mock_repo_path2(): - yield spack.repo.from_path(spack.paths.mock_packages_path2) - - def _pkg_install_fn(pkg, spec, prefix): # sanity_check_prefix requires something in the install directory mkdirp(prefix.bin) @@ -593,20 +588,19 @@ def mock_pkg_install(monkeypatch): @pytest.fixture(scope="function") -def mock_packages(mock_repo_path, mock_repo_path2, mock_pkg_install, request): - """Use the 'builtin.mock' and 'builtin.mock2' repositories instead of 'builtin'""" +def mock_packages(mock_repo_path, mock_pkg_install, request): + """Use the 'builtin.mock' repository instead of 'builtin'""" ensure_configuration_fixture_run_before(request) - with spack.repo.use_repositories(mock_repo_path, mock_repo_path2) as mock_repo: + with spack.repo.use_repositories(mock_repo_path) as mock_repo: yield mock_repo @pytest.fixture(scope="function") -def mutable_mock_repo(request): +def mutable_mock_repo(mock_repo_path, request): """Function-scoped mock packages, for tests that need to modify them.""" ensure_configuration_fixture_run_before(request) mock_repo = spack.repo.from_path(spack.paths.mock_packages_path) - mock_repo2 = spack.repo.from_path(spack.paths.mock_packages_path2) - with spack.repo.use_repositories(mock_repo, mock_repo2) as mock_repo_path: + with spack.repo.use_repositories(mock_repo) as mock_repo_path: yield mock_repo_path diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py index 1088b1f24b1873..4b5f31b904a64b 100644 --- a/lib/spack/spack/test/patch.py +++ b/lib/spack/spack/test/patch.py @@ -499,9 +499,3 @@ def test_invalid_from_dict(mock_packages, config): } with pytest.raises(spack.fetch_strategy.ChecksumError, match="sha256 checksum failed for"): spack.patch.from_dict(dictionary) - - -@pytest.mark.regression("43097") -def test_cross_repo_patch(mock_packages, config): - cross_repo_patch = Spec("patch-a-foreign-dependency") - cross_repo_patch.concretize() diff --git a/var/spack/repos/builtin.mock/README.md b/var/spack/repos/builtin.mock/README.md deleted file mode 100644 index 5a5f6e747f5b57..00000000000000 --- a/var/spack/repos/builtin.mock/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# `builtin.mock` - -This repository and the secondary mock repo `builtin.mock2` contain mock packages used -by Spack tests. - -Most tests are in `builtin.mock`, but `builtin.mock2` is used for scenarios where we -need multiple repos for testing. diff --git a/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch b/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch deleted file mode 100644 index 02bfad9103c7f1..00000000000000 --- a/var/spack/repos/builtin.mock/packages/patch-a-foreign-dependency/mock2-package.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- patch-a-dependency/configure 2018-08-13 23:13:51.000000000 -0700 -+++ patch-a-dependency/configure.patched 2018-08-13 23:14:15.000000000 -0700 -@@ -2,7 +2,7 @@ - prefix=$(echo $1 | sed 's/--prefix=//') - cat > Makefile < Date: Sat, 9 Nov 2024 08:30:38 -0500 Subject: [PATCH 125/208] root: fix macos build (#47483) No ROOT `builtin` should ever be set to true if possible, because that builds an existing library that spack may not know about. Furthermore, using `builtin_glew` forces the package to be on, even when not building x/gl/aqua on macos. This causes build failures. Caused by https://github.com/spack/spack/pull/45632#issuecomment-2276311748 . --- var/spack/repos/builtin/packages/root/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 106bde97757840..e6da973906b73b 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -634,7 +634,7 @@ def cmake_args(self): define("builtin_freetype", False), define("builtin_ftgl", False), define("builtin_gl2ps", False), - define("builtin_glew", self.spec.satisfies("platform=darwin")), + define("builtin_glew", False), define("builtin_gsl", False), define("builtin_llvm", True), define("builtin_lz4", self.spec.satisfies("@6.12.02:6.12")), From b97015b791ca021ae6a1719940823c42fd87eeac Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 9 Nov 2024 15:04:51 +0100 Subject: [PATCH 126/208] ci: ci/all must always run, and fail if any job has status "fail" or "canceled" (#47517) This means it succeeds when a both jobs have either status "success" or status "skipped" --- .github/workflows/ci.yaml | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2024014f1bb58d..a7ceb1bd8e445f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,10 +83,17 @@ jobs: all-prechecks: needs: [ prechecks ] + if: ${{ always() }} runs-on: ubuntu-latest steps: - name: Success - run: "true" + run: | + if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then + echo "Unit tests failed." + exit 1 + else + exit 0 + fi coverage: needs: [ unit-tests, prechecks ] @@ -94,8 +101,19 @@ jobs: secrets: inherit all: - needs: [ coverage, bootstrap ] + needs: [ unit-tests, coverage, bootstrap ] + if: ${{ always() }} runs-on: ubuntu-latest + # See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context steps: - - name: Success - run: "true" + - name: Status summary + run: | + if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then + echo "Unit tests failed." + exit 1 + elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then + echo "Bootstrap tests failed." + exit 1 + else + exit 0 + fi From e99bf48d28163f202cb98080454dd8f28d229b77 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sat, 9 Nov 2024 06:12:46 -0800 Subject: [PATCH 127/208] Revert "upcxx %oneapi@2025: cxxflags add -Wno-error=missing-template-arg-list-after-template-kw (#47503)" (#47512) This reverts commit 4322cf56b15a0a088156f5702d44a04e8238b4b3. --- var/spack/repos/builtin/packages/upcxx/package.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/upcxx/package.py b/var/spack/repos/builtin/packages/upcxx/package.py index 4e23f42431a014..91971025683297 100644 --- a/var/spack/repos/builtin/packages/upcxx/package.py +++ b/var/spack/repos/builtin/packages/upcxx/package.py @@ -164,11 +164,7 @@ class Upcxx(Package, CudaPackage, ROCmPackage): depends_on("oneapi-level-zero@1.8.0:", when="+level_zero") # All flags should be passed to the build-env in autoconf-like vars - def flag_handler(self, name, flags): - if name == "cxxflags": - if self.spec.satisfies("%oneapi@2025:"): - flags.append("-Wno-error=missing-template-arg-list-after-template-kw") - return (flags, None, None) + flag_handler = env_flags def set_variables(self, env): env.set("UPCXX_INSTALL", self.prefix) From 97acf2614a5493dba28060d804a08482c463e141 Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Sat, 9 Nov 2024 07:39:55 -0700 Subject: [PATCH 128/208] cprnc: set install rpath and add v1.0.8 (#47505) --- .../builtin/packages/cprnc/install_rpath.patch | 18 ++++++++++++++++++ .../repos/builtin/packages/cprnc/package.py | 3 +++ 2 files changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cprnc/install_rpath.patch diff --git a/var/spack/repos/builtin/packages/cprnc/install_rpath.patch b/var/spack/repos/builtin/packages/cprnc/install_rpath.patch new file mode 100644 index 00000000000000..92888468f8f7b2 --- /dev/null +++ b/var/spack/repos/builtin/packages/cprnc/install_rpath.patch @@ -0,0 +1,18 @@ +--- a/CMakeLists.txt 2023-12-04 07:01:57.000000000 -0700 ++++ b/CMakeLists.txt 2024-11-08 06:53:55.090900241 -0700 +@@ -21,6 +21,7 @@ + + set(CMAKE_POSITION_INDEPENDENT_CODE ON) + set(CMAKE_MACOSX_RPATH 1) ++SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) + + # Compiler-specific compile options + if ("${CMAKE_Fortran_COMPILER_ID}" STREQUAL "GNU") +@@ -79,6 +80,7 @@ + get_filename_component(netcdf_c_lib_location ${netcdf_c_lib} DIRECTORY) + #message (STATUS "netcdf_c_lib_location == ${netcdf_c_lib_location}") + ++SET(CMAKE_INSTALL_RPATH "${netcdf_fortran_lib_location};${netcdf_c_lib_location}") + list(APPEND CMAKE_BUILD_RPATH ${netcdf_fortran_lib_location} ${netcdf_c_lib_location}) + #message("CMAKE_BUILD_RPATH is ${CMAKE_BUILD_RPATH}") + add_executable (cprnc ${CPRNC_Fortran_SRCS} ${CPRNC_GenF90_SRCS}) diff --git a/var/spack/repos/builtin/packages/cprnc/package.py b/var/spack/repos/builtin/packages/cprnc/package.py index c719c107155cc5..c1bb841ad9fcaa 100644 --- a/var/spack/repos/builtin/packages/cprnc/package.py +++ b/var/spack/repos/builtin/packages/cprnc/package.py @@ -15,6 +15,7 @@ class Cprnc(CMakePackage): maintainers("jedwards4b", "billsacks") + version("1.0.8", sha256="94ee3b4e724bc06161e576d45f34401f1452acf738803528cb80726eed230cae") version("1.0.3", sha256="3e7400f9a13d5de01964d7dd95151d08e6e30818d2a1efa9a9c7896cf6646d69") version("1.0.2", sha256="02edfa8050135ac0dc4a74aea05d19b0823d769b22cafa88b9352e29723d4179") version("1.0.1", sha256="b8a8fd4ad7e2716968dfa60f677217c55636580807b1309276f4c062ee432ccd") @@ -25,6 +26,8 @@ class Cprnc(CMakePackage): depends_on("netcdf-fortran") depends_on("cmake@3:", type="build") + patch("install_rpath.patch", when="@:1.0.7") + resource( name="genf90", git="https://github.com/PARALLELIO/genf90", From fa6b8a4cebc299ccb92be004b0db938e16ca8ba4 Mon Sep 17 00:00:00 2001 From: JStewart28 <80227058+JStewart28@users.noreply.github.com> Date: Sat, 9 Nov 2024 13:43:55 -0700 Subject: [PATCH 129/208] beatnik: add v1.1 (#47361) Co-authored-by: Patrick Bridges --- var/spack/repos/builtin/packages/beatnik/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py index ac817cca25359d..8d75a590db80f2 100644 --- a/var/spack/repos/builtin/packages/beatnik/package.py +++ b/var/spack/repos/builtin/packages/beatnik/package.py @@ -16,9 +16,10 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") + version("1.1", commit="7d5a6fa588bcb7065fc53c3e8ae52d4d7f13b6f1", submodules=True) version("1.0", commit="ae31ef9cb44678d5ace77994b45b0778defa3d2f") - version("develop", branch="develop") - version("main", branch="main") + version("develop", branch="develop", submodules=True) + version("main", branch="main", submodules=True) depends_on("cxx", type="build") # generated @@ -47,13 +48,17 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): depends_on("kokkos +wrapper", when="%gcc+cuda") # Cabana dependencies - depends_on("cabana @0.6.0 +grid +heffte +silo +hdf5 +mpi") + depends_on("cabana @0.7.0 +grid +heffte +silo +hdf5 +mpi +arborx", when="@1.1") + depends_on("cabana @0.7.0 +grid +heffte +silo +hdf5 +mpi +arborx", when="@1.0") + depends_on("cabana @master +grid +heffte +silo +hdf5 +mpi +arborx", when="@develop") + depends_on("cabana @0.7.0 +grid +heffte +silo +hdf5 +mpi +arborx", when="@main") depends_on("cabana +cuda", when="+cuda") depends_on("cabana +rocm", when="+rocm") # Silo dependencies depends_on("silo @4.11:") - depends_on("silo @4.11.1:", when="%cce") # Eariler silo versions have trouble cce + depends_on("silo @4.11.1 +fpzip+hzip~python", when="%cce") + # Eariler silo versions have trouble with cce # Heffte dependencies - We always require FFTW so that there's a host # backend even when we're compiling for GPUs From fb5910d139317d423ea850bd95c496e8481c648b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 10 Nov 2024 17:53:15 +0100 Subject: [PATCH 130/208] py-torchmetrics: add v1.5.2 (#47497) --- var/spack/repos/builtin/packages/py-torchmetrics/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-torchmetrics/package.py b/var/spack/repos/builtin/packages/py-torchmetrics/package.py index 16b272e7a992f9..f3cf233cbfa769 100644 --- a/var/spack/repos/builtin/packages/py-torchmetrics/package.py +++ b/var/spack/repos/builtin/packages/py-torchmetrics/package.py @@ -15,6 +15,7 @@ class PyTorchmetrics(PythonPackage): license("Apache-2.0") maintainers("adamjstewart") + version("1.5.2", sha256="2d0e4957af0ea76438d2779fe1a626d8cba6cda8607eadb54267598153e7ea63") version("1.5.1", sha256="9701632cf811bc460abf07bd7b971b79c1ae9c8231e03d495b53a0975e43fe07") version("1.5.0", sha256="c18e68bab4104ad7d2285af601ddc6dc04f9f3b7cafaa8ad13fa1dcc539e33b6") version("1.4.3", sha256="5554a19167e91f543afe82ff58a01059c8eec854359ad22896449c2c8fb0ad89") From 33109ce9b9468d75d82567b5ba0f851cef6b46e1 Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Sun, 10 Nov 2024 17:11:13 +0000 Subject: [PATCH 131/208] lksctp-tools: added version 1.0.21 (#47493) Adds version 1.0.21 of lksctp-tools --- var/spack/repos/builtin/packages/lksctp-tools/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/lksctp-tools/package.py b/var/spack/repos/builtin/packages/lksctp-tools/package.py index 1222dddffb42f3..e690b5a9be127d 100644 --- a/var/spack/repos/builtin/packages/lksctp-tools/package.py +++ b/var/spack/repos/builtin/packages/lksctp-tools/package.py @@ -14,6 +14,7 @@ class LksctpTools(AutotoolsPackage): license("GPL-2.0-or-later AND LGPL-2.1-or-later") + version("1.0.21", sha256="8738bf17ecffbbe2440a6e2ffaf1cbcebb633fc99d63d88761af35c02a571893") version("1.0.18", sha256="3e9ab5b3844a8b65fc8152633aafe85f406e6da463e53921583dfc4a443ff03a") depends_on("c", type="build") # generated From 825fd1ccf6e5b3142ecc01cfb0eb81ff8d58dd68 Mon Sep 17 00:00:00 2001 From: Chris Marsh Date: Sun, 10 Nov 2024 11:47:12 -0600 Subject: [PATCH 132/208] Disable the optional flexblas support as system flexiblas is possibly used as flexiblas is not a depends and the entire build chain to support using flexibls is not setup. As this does not seem to be needed with the spack blas and lapack, it is easier to disable (#47514) --- var/spack/repos/builtin/packages/armadillo/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index 0f723296caf920..a2b562518ab33e 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -87,4 +87,9 @@ def cmake_args(self): self.define("SuperLU_LIBRARY", spec["superlu"].libs.joined(";")), # HDF5 support self.define("DETECT_HDF5", "ON" if spec.satisfies("+hdf5") else "OFF"), + # disable flexiblas support because armadillo will possibly detect system + # flexiblas which causes problems. If this is removed, then SuperLU and ARPACK must + # also link with Flexiblas. As this does not seem to be needed with the spack + # blas and lapack, it is easier to disable + self.define("ALLOW_FLEXIBLAS_LINUX", "OFF"), ] From 4c9bc8d879a2eee8c38eeaee209fc0308fc68a37 Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Sun, 10 Nov 2024 17:51:07 +0000 Subject: [PATCH 133/208] Add v0.47 (#47456) --- var/spack/repos/builtin/packages/yosys/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/yosys/package.py b/var/spack/repos/builtin/packages/yosys/package.py index d37712955a6536..2da4aa007a11d4 100644 --- a/var/spack/repos/builtin/packages/yosys/package.py +++ b/var/spack/repos/builtin/packages/yosys/package.py @@ -29,6 +29,7 @@ class Yosys(MakefilePackage): version("master", branch="master") + version("0.47", commit="647d61dd9212365a3cd44db219660b8f90b95cbd", submodules=True) version("0.46", commit="e97731b9dda91fa5fa53ed87df7c34163ba59a41", submodules=True) version("0.45", commit="9ed031ddd588442f22be13ce608547a5809b62f0", submodules=True) version("0.44", commit="80ba43d26264738c93900129dc0aab7fab36c53f", submodules=True) From 97b5ec6e4fc5c77885c9535e921bc03449e3e604 Mon Sep 17 00:00:00 2001 From: Kaan <61908449+kaanolgu@users.noreply.github.com> Date: Sun, 10 Nov 2024 18:51:39 +0000 Subject: [PATCH 134/208] Add support for Codeplay AMD Plugin for Intel OneAPI Compilers (#46749) * Added support for Codeplay AMD Plugin for Intel OneAPI Compilers * [@spackbot] updating style on behalf of kaanolgu * Adding 2025.0.0 * removed HOME and XDG_RUNTIME_DIR * [@spackbot] updating style on behalf of kaanolgu --------- Co-authored-by: Kaan Olgu --- .../intel-oneapi-compilers/package.py | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 3fc63be6522921..d7cb974a56e185 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -20,6 +20,14 @@ "url": "https://registrationcenter-download.intel.com/akdlm/IRC_NAS/69f79888-2d6c-4b20-999e-e99d72af68d4/intel-fortran-compiler-2025.0.0.723_offline.sh", "sha256": "2be6d607ce84f35921228595b118fbc516d28587cbc4e6dcf6b7219e5cd1a9a9", }, + "nvidia-plugin": { + "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=nvidia&version=2025.0.0&filters[]=12.0&filters[]=linux", + "sha256": "264a43d2e07c08eb31d6483fb1c289a6b148709e48e9a250efc1b1e9a527feb6", + }, + "amd-plugin": { + "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=amd&version=2025.0.0&filters[]=6.1.0&filters[]=linux", + "sha256": "2c5a147e82f0e995b9c0457b53967cc066d5741d675cb64cb9eba8e3c791a064", + }, }, { "version": "2024.2.1", @@ -35,6 +43,10 @@ "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=nvidia&version=2024.2.1&filters[]=12.0&filters[]=linux", "sha256": "2c377027c650291ccd8267cbf75bd3d00c7b11998cc59d5668a02a0cbc2c015f", }, + "amd-plugin": { + "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=amd&version=2024.2.1&filters[]=6.1.0&filters[]=linux", + "sha256": "fbeb64f959f907cbf3469f4e154b2af6d8ff46fe4fc667c811e04f3872a13823", + }, }, { "version": "2024.2.0", @@ -50,6 +62,10 @@ "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=nvidia&version=2024.2.0&filters[]=12.0&filters[]=linux", "sha256": "0622df0054364b01e91e7ed72a33cb3281e281db5b0e86579f516b1cc5336b0f", }, + "amd-plugin": { + "url": "https://developer.codeplay.com/api/v1/products/download?product=oneapi&variant=amd&version=2024.2.0&filters[]=6.1.0&filters[]=linux", + "sha256": "d1e9d30fa92f3ef606f054d8cbd7c338b3e46f6a9f8472736e29e8ccd9e50688", + }, }, { "version": "2024.1.0", @@ -286,6 +302,9 @@ class IntelOneapiCompilers(IntelOneApiPackage, CompilerPackage): # Add the nvidia variant variant("nvidia", default=False, description="Install NVIDIA plugin for OneAPI") conflicts("@:2022.2.1", when="+nvidia", msg="Codeplay NVIDIA plugin requires newer release") + # Add the amd variant + variant("amd", default=False, description="Install AMD plugin for OneAPI") + conflicts("@:2022.2.1", when="+amd", msg="Codeplay AMD plugin requires newer release") # TODO: effectively gcc is a direct dependency of intel-oneapi-compilers, but we # cannot express that properly. For now, add conflicts for non-gcc compilers # instead. @@ -309,6 +328,14 @@ class IntelOneapiCompilers(IntelOneApiPackage, CompilerPackage): expand=False, **v["nvidia-plugin"], ) + if "amd-plugin" in v: + resource( + name="amd-plugin-installer", + placement="amd-plugin-installer", + when="@{0}".format(v["version"]), + expand=False, + **v["amd-plugin"], + ) @property def v2_layout_versions(self): @@ -374,12 +401,15 @@ def install(self, spec, prefix): if nvidia_script: if platform.system() == "Linux": bash = Executable("bash") - # Installer writes files in ~/intel set HOME so it goes to prefix - bash.add_default_env("HOME", prefix) - # Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well - bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime")) # For NVIDIA plugin installer bash(nvidia_script[0], "-y", "--install-dir", self.prefix) + if self.spec.satisfies("+amd"): + amd_script = find("amd-plugin-installer", "*") + if amd_script: + if platform.system() == "Linux": + bash = Executable("bash") + # For AMD plugin installer + bash(amd_script[0], "-y", "--install-dir", self.prefix) @run_after("install") def inject_rpaths(self): From ebd4ef934c754f4c8cb5b19a7bde0c5b3d09fe7b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 13:03:37 -0600 Subject: [PATCH 135/208] build(deps): bump types-six in /.github/workflows/requirements/style (#47454) Bumps [types-six](https://github.com/python/typeshed) from 1.16.21.20241009 to 1.16.21.20241105. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-six dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/requirements/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/requirements/style/requirements.txt b/.github/workflows/requirements/style/requirements.txt index be2bfefc80bb11..93ab7eaa7f78cf 100644 --- a/.github/workflows/requirements/style/requirements.txt +++ b/.github/workflows/requirements/style/requirements.txt @@ -3,5 +3,5 @@ clingo==5.7.1 flake8==7.1.1 isort==5.13.2 mypy==1.8.0 -types-six==1.16.21.20241009 +types-six==1.16.21.20241105 vermin==1.6.0 From 16b01c5661c53736bf0a4a274bf40d4f176966e1 Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Sun, 10 Nov 2024 19:06:41 +0000 Subject: [PATCH 136/208] librdkafka: added missing dependency on curl (#47500) * librdkafka: added missing dependency on curl This PR adds a missing dependency on curl in librdkafka. * librdkafka: added dependency on openssl and zlib --- var/spack/repos/builtin/packages/librdkafka/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/librdkafka/package.py b/var/spack/repos/builtin/packages/librdkafka/package.py index 9d1a506dc19762..df90516c227e8d 100644 --- a/var/spack/repos/builtin/packages/librdkafka/package.py +++ b/var/spack/repos/builtin/packages/librdkafka/package.py @@ -31,3 +31,6 @@ class Librdkafka(AutotoolsPackage): depends_on("zstd") depends_on("lz4") + depends_on("curl") + depends_on("openssl") + depends_on("zlib") From 2713b0c216cbd4483009e22a5916d843308984fb Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 10 Nov 2024 20:21:01 +0100 Subject: [PATCH 137/208] py-kornia: add v0.7.4 (#47435) --- var/spack/repos/builtin/packages/py-kornia/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-kornia/package.py b/var/spack/repos/builtin/packages/py-kornia/package.py index 4789984a2ecd78..06a0c262988946 100644 --- a/var/spack/repos/builtin/packages/py-kornia/package.py +++ b/var/spack/repos/builtin/packages/py-kornia/package.py @@ -23,6 +23,7 @@ class PyKornia(PythonPackage): "adamjstewart", ) + version("0.7.4", sha256="1f8dd6268ca5a2f2ec04b13c48da4dfb90ba2cfae7e31e0cc80d37f6520fa3f1") version("0.7.3", sha256="0eb861ea5d7e6c3891ae699a8b7103a5783af0a7c41888ca482420dd3d055306") version("0.7.2", sha256="f834ccd51188d071ed286a6727471c94344ea2a718903cc6f0e56a92f9c66ac5") version("0.7.1", sha256="65b54a50f70c1f88240b557fda3fdcc1ab866982a5d062e52213130f5a48465c") From 2da4366ba6b180007976e00ee20d87d19fa0e78d Mon Sep 17 00:00:00 2001 From: Stephen Nicholas Swatman Date: Sun, 10 Nov 2024 21:12:23 +0100 Subject: [PATCH 138/208] benchmark: enable shared libraries by default (#47368) * benchmark: enable shared libraries by default The existing behaviour of Google Benchmark yiels static objects which are of little use for most projects. This PR changes the spec to use dynamic libraries instead. * Add shared variant --- var/spack/repos/builtin/packages/benchmark/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/benchmark/package.py b/var/spack/repos/builtin/packages/benchmark/package.py index 8e398aefa3c722..488a7509518904 100644 --- a/var/spack/repos/builtin/packages/benchmark/package.py +++ b/var/spack/repos/builtin/packages/benchmark/package.py @@ -14,6 +14,7 @@ class Benchmark(CMakePackage): git = "https://github.com/google/benchmark.git" license("Apache-2.0") + maintainers("stephenswat") # first properly installed CMake config packages in # 1.2.0 release: https://github.com/google/benchmark/issues/363 @@ -54,6 +55,9 @@ class Benchmark(CMakePackage): when="@1.5.4:", description="Enable performance counters provided by libpfm", ) + variant( + "shared", default=True, sticky=True, description="Build the libraries as shared objects" + ) depends_on("cmake@2.8.11:", type="build", when="@:1.1.0") depends_on("cmake@2.8.12:", type="build", when="@1.2.0:1.4") @@ -64,6 +68,7 @@ def cmake_args(self): # No need for testing for the install args = [ self.define("BENCHMARK_ENABLE_TESTING", False), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("BENCHMARK_ENABLE_LIBPFM", "performance_counters"), ] return args From 68570b75874c1312d463d1d43907a2b95db8c76f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 10 Nov 2024 21:34:20 +0100 Subject: [PATCH 139/208] GDAL: add v3.10.0 (#47472) --- var/spack/repos/builtin/packages/gdal/package.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 1253219e8bf091..67e8e209875e7a 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -28,10 +28,10 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): list_url = "https://download.osgeo.org/gdal/" list_depth = 1 - maintainers("adamjstewart") - license("MIT") + maintainers("adamjstewart") + version("3.10.0", sha256="af821a3bcf68cf085724c21c9b53605fd451d83af3c8854d8bf194638eb734a8") version("3.9.3", sha256="34a037852ffe6d2163f1b8948a1aa7019ff767148aea55876c1339b22ad751f1") version("3.9.2", sha256="bfbcc9f087f012c36151c20c79f8eac9529e1e5298fbded79cd5a1365f0b113a") version("3.9.1", sha256="aff3086fee75f5773e33a5598df98d8a4d10be411f777d3ce23584b21d8171ca") @@ -115,6 +115,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): variant( "arrow", default=False, when="build_system=cmake", description="Required for Arrow driver" ) + variant("avif", default=False, when="@3.10:", description="Required for AVIF driver") variant( "basisu", default=False, when="@3.6:", description="Required for BASISU and KTX2 drivers" ) @@ -197,6 +198,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): "opencad", default=False, when="build_system=cmake", description="Required for CAD driver" ) variant("opencl", default=False, description="Required to accelerate warping computations") + variant("opendrive", default=False, when="@3.10:", description="Required for XODR driver") variant("openexr", default=False, when="@3.1:", description="Required for EXR driver") variant("openjpeg", default=False, description="Required for JP2OpenJPEG driver") variant("openssl", default=False, when="@2.3:", description="Required for EEDAI driver") @@ -289,6 +291,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("blas", when="+armadillo") depends_on("lapack", when="+armadillo") depends_on("arrow", when="+arrow") + depends_on("libavif", when="+avif") # depends_on("basis-universal", when="+basisu") depends_on("c-blosc", when="+blosc") depends_on("brunsli", when="+brunsli") @@ -354,6 +357,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): # depends_on('ogdi', when='+ogdi') # depends_on('lib-opencad', when='+opencad') depends_on("opencl", when="+opencl") + # depends_on("libopendrive@0.6:", when="+opendrive") depends_on("openexr@2.2:", when="+openexr") depends_on("openjpeg@2.3.1:", when="@3.9:+openjpeg") depends_on("openjpeg", when="+openjpeg") @@ -549,6 +553,7 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_ARCHIVE", "archive"), self.define_from_variant("GDAL_USE_ARMADILLO", "armadillo"), self.define_from_variant("GDAL_USE_ARROW", "arrow"), + self.define_from_variant("GDAL_USE_AVIF", "avif"), self.define_from_variant("GDAL_USE_BASISU", "basisu"), self.define_from_variant("GDAL_USE_BLOSC", "blosc"), self.define_from_variant("GDAL_USE_BRUNSLI", "brunsli"), @@ -595,6 +600,7 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_OGDI", "ogdi"), self.define_from_variant("GDAL_USE_OPENCAD", "opencad"), self.define_from_variant("GDAL_USE_OPENCL", "opencl"), + self.define_from_variant("GDAL_USE_OPENDRIVE", "opendrive"), self.define_from_variant("GDAL_USE_OPENEXR", "openexr"), self.define_from_variant("GDAL_USE_OPENJPEG", "openjpeg"), self.define_from_variant("GDAL_USE_OPENSSL", "openssl"), From 913dcd97bcbcf3b6d15e32f8385705a585fb2159 Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Sun, 10 Nov 2024 23:07:12 +0000 Subject: [PATCH 140/208] verilator: add v5.030 (#47455) * Add 5.030 and remove the requirement to patch verilator, the problem has be fixed in this rev * Update var/spack/repos/builtin/packages/verilator/package.py Co-authored-by: Wouter Deconinck --------- Co-authored-by: Wouter Deconinck --- var/spack/repos/builtin/packages/verilator/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/verilator/package.py b/var/spack/repos/builtin/packages/verilator/package.py index 0399839d0492c3..1422c28f1701f1 100644 --- a/var/spack/repos/builtin/packages/verilator/package.py +++ b/var/spack/repos/builtin/packages/verilator/package.py @@ -42,6 +42,7 @@ class Verilator(AutotoolsPackage): version("master", branch="master") + version("5.030", sha256="b9e7e97257ca3825fcc75acbed792b03c3ec411d6808ad209d20917705407eac") version("5.028", sha256="02d4b6f34754b46a97cfd70f5fcbc9b730bd1f0a24c3fc37223397778fcb142c") version("5.026", sha256="87fdecf3967007d9ee8c30191ff2476f2a33635d0e0c6e3dbf345cc2f0c50b78") version("5.024", sha256="88b04c953e7165c670d6a700f202cef99c746a0867b4e2efe1d7ea789dee35f3") @@ -87,7 +88,7 @@ class Verilator(AutotoolsPackage): conflicts("%gcc@:6", msg="C++14 support required") - patch("fix_compile_gch.patch", level=1, when="@5.0.18:") + patch("fix_compile_gch.patch", level=1, when="@5.018:5.028") # we need to fix the CXX and LINK paths, as they point to the spack # wrapper scripts which aren't usable without spack From f5b8b0ac5dfab0fa5e9baa3b2a32fc500d9a93a6 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sun, 10 Nov 2024 17:09:06 -0600 Subject: [PATCH 141/208] mbedtls: add v2.28.9, v3.6.2 (fix CVEs) (#46637) * mbedtls: add v2.28.9, v3.6.1 (fix CVEs) * mbedtls: add v3.6.2 --- .../repos/builtin/packages/mbedtls/package.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/mbedtls/package.py b/var/spack/repos/builtin/packages/mbedtls/package.py index 73f463239d026c..2478192bc48f66 100644 --- a/var/spack/repos/builtin/packages/mbedtls/package.py +++ b/var/spack/repos/builtin/packages/mbedtls/package.py @@ -18,13 +18,18 @@ class Mbedtls(MakefilePackage): maintainers("haampie") - license("Apache-2.0 OR GPL-2.0-or-later") + license("Apache-2.0 OR GPL-2.0-or-later", checked_by="wdconinc") # version 3.x - version("3.6.0", sha256="3ecf94fcfdaacafb757786a01b7538a61750ebd85c4b024f56ff8ba1490fcd38") - version("3.3.0", sha256="a22ff38512697b9cd8472faa2ea2d35e320657f6d268def3a64765548b81c3ec") + version("3.6.2", sha256="8b54fb9bcf4d5a7078028e0520acddefb7900b3e66fec7f7175bb5b7d85ccdca") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2024-45159 + version("3.6.1", sha256="fc8bef0991b43629b7e5319de6f34f13359011105e08e3e16eed3a9fe6ffd3a3") + version("3.6.0", sha256="3ecf94fcfdaacafb757786a01b7538a61750ebd85c4b024f56ff8ba1490fcd38") + version("3.3.0", sha256="a22ff38512697b9cd8472faa2ea2d35e320657f6d268def3a64765548b81c3ec") # version 2.x + version("2.28.9", sha256="e85ea97aaf78dd6c0a5ba2e54dd5932ffa15f39abfc189c26beef7684630c02b") version("2.28.8", sha256="241c68402cef653e586be3ce28d57da24598eb0df13fcdea9d99bfce58717132") version("2.28.2", sha256="1db6d4196178fa9f8264bef5940611cd9febcd5d54ec05f52f1e8400f792b5a4") version("2.7.19", sha256="3da12b1cebe1a25da8365d5349f67db514aefcaa75e26082d7cb2fa3ce9608aa") @@ -79,7 +84,10 @@ class Mbedtls(MakefilePackage): def url_for_version(self, version): if self.spec.satisfies("@:2.28.7,3:3.5"): return f"https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v{version}.tar.gz" - return f"https://github.com/Mbed-TLS/mbedtls/releases/download/v{version}/mbedtls-{version}.tar.bz2" + if self.spec.satisfies("@2.28.8,3.6.0"): + return f"https://github.com/Mbed-TLS/mbedtls/releases/download/v{version}/mbedtls-{version}.tar.bz2" + # release tags for @2.28.9:2,3.6.1: + return f"https://github.com/Mbed-TLS/mbedtls/releases/download/mbedtls-{version}/mbedtls-{version}.tar.bz2" def flag_handler(self, name, flags): # Compile with PIC, if requested. From 30db7644495c8a52f8bbf81af564a33f1373fb31 Mon Sep 17 00:00:00 2001 From: Giuncan <38205328+Giuncan@users.noreply.github.com> Date: Mon, 11 Nov 2024 03:12:15 +0100 Subject: [PATCH 142/208] lua: always generate pcfile without patch and remove +pcfile variant (#47353) * lua: add +pcfile support for @5.4: versions, without using a version-dependent patch * lua: always generate pcfile, remove +pcfile variant from all packages * lua: minor fixes * rpm: minor fix --- .../repos/builtin/packages/lua/package.py | 38 ++++++++++++------- .../repos/builtin/packages/rpm/package.py | 4 +- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py index 683657882359f0..8f791b5cd867b7 100644 --- a/var/spack/repos/builtin/packages/lua/package.py +++ b/var/spack/repos/builtin/packages/lua/package.py @@ -12,6 +12,23 @@ from spack.package import * from spack.util.executable import Executable +# This is the template for a pkgconfig file for rpm +# https://github.com/guix-mirror/guix/raw/dcaf70897a0bad38a4638a2905aaa3c46b1f1402/gnu/packages/patches/lua-pkgconfig.patch +_LUA_PC_TEMPLATE = """prefix={0} +libdir={0}/lib +includedir={0}/include +bindir={0}/bin +INSTALL_LMOD={0}/share/lua/{1} +INSTALL_CMOD={0}/lib/lua/{1} +INTERPRETER=${{bindir}}/lua +COMPILER=${{bindir}}/luac +Name: Lua +Description: A powerful, fast, lightweight, embeddable scripting language +Version: {2} +Libs: -L${{libdir}} -llua -lm +Cflags: -I${{includedir}} +""" + class LuaImplPackage(MakefilePackage): """Specialized class for lua *implementations* @@ -226,7 +243,6 @@ class Lua(LuaImplPackage): depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated - variant("pcfile", default=False, description="Add patch for lua.pc generation") variant("shared", default=True, description="Builds a shared version of the library") provides("lua-lang@5.1", when="@5.1:5.1.99") @@ -237,12 +253,6 @@ class Lua(LuaImplPackage): depends_on("ncurses+termlib") depends_on("readline") - patch( - "http://lua.2524044.n2.nabble.com/attachment/7666421/0/pkg-config.patch", - sha256="208316c2564bdd5343fa522f3b230d84bd164058957059838df7df56876cb4ae", - when="+pcfile @:5.3.9999", - ) - def build(self, spec, prefix): if spec.satisfies("platform=darwin"): target = "macosx" @@ -289,10 +299,10 @@ def install(self, spec, prefix): os.symlink(src_path, dest_path) @run_after("install") - def link_pkg_config(self): - if self.spec.satisfies("+pcfile"): - versioned_pc_file_name = "lua{0}.pc".format(self.version.up_to(2)) - symlink( - join_path(self.prefix.lib, "pkgconfig", versioned_pc_file_name), - join_path(self.prefix.lib, "pkgconfig", "lua.pc"), - ) + def generate_pkg_config(self): + mkdirp(self.prefix.lib.pkgconfig) + versioned_pc_file_name = "lua{0}.pc".format(self.version.up_to(2)) + versioned_pc_file_path = join_path(self.prefix.lib.pkgconfig, versioned_pc_file_name) + with open(versioned_pc_file_path, "w") as pcfile: + pcfile.write(_LUA_PC_TEMPLATE.format(self.prefix, self.version.up_to(2), self.version)) + symlink(versioned_pc_file_path, join_path(self.prefix.lib.pkgconfig, "lua.pc")) diff --git a/var/spack/repos/builtin/packages/rpm/package.py b/var/spack/repos/builtin/packages/rpm/package.py index 18d373f299e9ec..39d2d44af4d848 100644 --- a/var/spack/repos/builtin/packages/rpm/package.py +++ b/var/spack/repos/builtin/packages/rpm/package.py @@ -60,8 +60,8 @@ class Rpm(AutotoolsPackage): # Always required depends_on("popt") - # Without this file patch, we don't detect lua - depends_on("lua+pcfile@5.3.5:", when="+lua") + # support for embedded Lua interpreter + depends_on("lua@5.3.5:", when="+lua") # Enable POSIX.1e draft 15 file capabilities support depends_on("libcap", when="+posix") From a9e60749964e976382fc87bf5e8b9a0d40b816d2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 11 Nov 2024 10:43:23 +0100 Subject: [PATCH 143/208] filesystem.py find: return directories and improve performance (#47537) --- lib/spack/llnl/util/filesystem.py | 84 ++++++++++---------- lib/spack/spack/test/llnl/util/filesystem.py | 22 ++--- 2 files changed, 49 insertions(+), 57 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 83cbe45104377b..a876a76c270266 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -1693,11 +1693,11 @@ def find( recursive: bool = True, max_depth: Optional[int] = None, ) -> List[str]: - """Finds all non-directory files matching the patterns from ``files`` starting from ``root``. - This function returns a deterministic result for the same input and directory structure when - run multiple times. Symlinked directories are followed, and unique directories are searched - only once. Each matching file is returned only once at lowest depth in case multiple paths - exist due to symlinked directories. + """Finds all files matching the patterns from ``files`` starting from ``root``. This function + returns a deterministic result for the same input and directory structure when run multiple + times. Symlinked directories are followed, and unique directories are searched only once. Each + matching file is returned only once at lowest depth in case multiple paths exist due to + symlinked directories. Accepts any glob characters accepted by fnmatch: @@ -1830,54 +1830,58 @@ def _find_max_depth( # Use glob.glob for complex patterns. for pattern_name, pattern in complex_patterns.items(): matched_paths[pattern_name].extend( - path - for path in glob.glob(os.path.join(curr_dir, pattern)) - if not os.path.isdir(path) + path for path in glob.glob(os.path.join(curr_dir, pattern)) ) + # List of subdirectories by path and (inode, device) tuple + subdirs: List[Tuple[str, Tuple[int, int]]] = [] + with dir_iter: - ordered_entries = sorted(dir_iter, key=lambda x: x.name) - for dir_entry in ordered_entries: + for dir_entry in dir_iter: + + # Match filename only patterns + if filename_only_patterns: + m = regex.match(os.path.normcase(dir_entry.name)) + if m: + for pattern_name in filename_only_patterns: + if m.group(pattern_name): + matched_paths[pattern_name].append(dir_entry.path) + break + + # Collect subdirectories + if depth >= max_depth: + continue + try: - it_is_a_dir = dir_entry.is_dir(follow_symlinks=True) + if not dir_entry.is_dir(follow_symlinks=True): + continue + if sys.platform == "win32": + # Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have + # to call os.stat + stat_info = os.stat(dir_entry.path, follow_symlinks=True) + else: + stat_info = dir_entry.stat(follow_symlinks=True) except OSError as e: # Possible permission issue, or a symlink that cannot be resolved (ELOOP). _log_file_access_issue(e, dir_entry.path) continue - if it_is_a_dir: - if depth >= max_depth: - continue - try: - # The stat should be performed in a try/except block. We repeat that here - # vs. moving to the above block because we only want to call `stat` if we - # haven't exceeded our max_depth - if sys.platform == "win32": - # Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we - # have to call os.stat - stat_info = os.stat(dir_entry.path, follow_symlinks=True) - else: - stat_info = dir_entry.stat(follow_symlinks=True) - except OSError as e: - _log_file_access_issue(e, dir_entry.path) - continue + subdirs.append((dir_entry.path, _file_id(stat_info))) - dir_id = _file_id(stat_info) - if dir_id not in visited_dirs: - dir_queue.appendleft((depth + 1, dir_entry.path)) - visited_dirs.add(dir_id) - elif filename_only_patterns: - m = regex.match(os.path.normcase(dir_entry.name)) - if not m: - continue - for pattern_name in filename_only_patterns: - if m.group(pattern_name): - matched_paths[pattern_name].append(dir_entry.path) - break + # Enqueue subdirectories in a deterministic order + if subdirs: + subdirs.sort(key=lambda s: os.path.basename(s[0])) + for subdir, subdir_id in subdirs: + if subdir_id not in visited_dirs: + dir_queue.appendleft((depth + 1, subdir)) + visited_dirs.add(subdir_id) + # Sort the matched paths for deterministic output + for paths in matched_paths.values(): + paths.sort() all_matching_paths = [path for paths in matched_paths.values() for path in paths] - # we only dedupe files if we have any complex patterns, since only they can match the same file + # We only dedupe files if we have any complex patterns, since only they can match the same file # multiple times return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index fd801295f4c26b..1a32e5707c735b 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -1130,16 +1130,16 @@ def complex_dir_structure(request, tmpdir): / l1-d1/ l2-d1/ - l3-s1 -> l1-d2 # points to directory above l2-d1 l3-d2/ l4-f1 - l3-s3 -> l1-d1 # cyclic link l3-d4/ l4-f2 + l3-s1 -> l1-d2 # points to directory above l2-d1 + l3-s3 -> l1-d1 # cyclic link l1-d2/ - l2-f1 l2-d2/ l3-f3 + l2-f1 l2-s3 -> l2-d2 l1-s3 -> l3-d4 # a link that "skips" a directory level l1-s4 -> l2-s3 # a link to a link to a dir @@ -1155,7 +1155,7 @@ def complex_dir_structure(request, tmpdir): l3_d2 = l2_d1.join("l3-d2").ensure(dir=True) l3_d4 = l2_d1.join("l3-d4").ensure(dir=True) l1_d2 = tmpdir.join("l1-d2").ensure(dir=True) - l2_d2 = l1_d2.join("l1-d2").ensure(dir=True) + l2_d2 = l1_d2.join("l2-d2").ensure(dir=True) if use_junctions: link_fn = llnl.util.symlink._windows_create_junction @@ -1216,7 +1216,7 @@ def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure def test_multiple_patterns(complex_dir_structure): root, _ = complex_dir_structure - paths = fs.find(root, ["l2-f1", "l*-d*/l3-f3", "*", "*/*"]) + paths = fs.find(root, ["l2-f1", "l*-d*/l3-f3", "*-f*", "*/*-f*"]) # There shouldn't be duplicate results with multiple, overlapping patterns assert len(set(paths)) == len(paths) # All files should be found @@ -1249,15 +1249,3 @@ def test_find_input_types(tmp_path: pathlib.Path): with pytest.raises(TypeError): fs.find(1, "file.txt") # type: ignore - - -def test_find_only_finds_files(tmp_path: pathlib.Path): - """ensure that find only returns files even at max_depth""" - (tmp_path / "subdir").mkdir() - (tmp_path / "subdir" / "dir").mkdir() - (tmp_path / "subdir" / "file.txt").write_text("") - assert ( - fs.find(tmp_path, "*", max_depth=1) - == fs.find(tmp_path, "*/*", max_depth=1) - == [str(tmp_path / "subdir" / "file.txt")] - ) From 6961514122e04f3509c5e4a095060b056c38f5e9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 10 Nov 2024 16:00:34 -0800 Subject: [PATCH 144/208] imports: move `conditional` to `directives.py` `conditional()`, which defines conditional variant values, and the other ways to declare variant values should probably be in a layer above `spack.variant`. This does the simple thing and moves *just* `conditional()` to `spack.directives` to avoid a circular import. We can revisit the public variant interface later, when we split packages from core. Co-authored-by: Harmen Stoppels Signed-off-by: Todd Gamblin --- lib/spack/spack/directives.py | 10 ++++++++++ lib/spack/spack/package.py | 7 +------ lib/spack/spack/variant.py | 11 ++--------- var/spack/repos/builtin/packages/geant4/package.py | 4 ++-- var/spack/repos/builtin/packages/vecgeom/package.py | 4 ++-- 5 files changed, 17 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 8f9e43bf8bfbba..7a3657e2225f5c 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -64,6 +64,7 @@ class OpenMpi(Package): "DirectiveMeta", "DisableRedistribute", "version", + "conditional", "conflicts", "depends_on", "extends", @@ -577,6 +578,15 @@ def _execute_patch(pkg_or_dep: Union["spack.package_base.PackageBase", Dependenc return _execute_patch +def conditional(*values: List[Any], when: Optional[WhenType] = None): + """Conditional values that can be used in variant declarations.""" + # _make_when_spec returns None when the condition is statically false. + when = _make_when_spec(when) + return spack.variant.ConditionalVariantValues( + spack.variant.Value(x, when=when) for x in values + ) + + @directive("variants") def variant( name: str, diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index bf8538032ae462..f8028d9ecaf10c 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -103,12 +103,7 @@ from spack.spec import InvalidSpecDetected, Spec from spack.util.executable import * from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type -from spack.variant import ( - any_combination_of, - auto_or_any_combination_of, - conditional, - disjoint_sets, -) +from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets from spack.version import Version, ver # These are just here for editor support; they will be replaced when the build env diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index e5a5ddfa3c904a..e5dcf72f36f94f 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -266,7 +266,7 @@ def _flatten(values) -> Collection: flattened: List = [] for item in values: - if isinstance(item, _ConditionalVariantValues): + if isinstance(item, ConditionalVariantValues): flattened.extend(item) else: flattened.append(item) @@ -884,17 +884,10 @@ def prevalidate_variant_value( ) -class _ConditionalVariantValues(lang.TypedMutableSequence): +class ConditionalVariantValues(lang.TypedMutableSequence): """A list, just with a different type""" -def conditional(*values: List[Any], when: Optional["spack.directives.WhenType"] = None): - """Conditional values that can be used in variant declarations.""" - # _make_when_spec returns None when the condition is statically false. - when = spack.directives._make_when_spec(when) - return _ConditionalVariantValues([Value(x, when=when) for x in values]) - - class DuplicateVariantError(error.SpecError): """Raised when the same variant occurs in a spec twice.""" diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 8dd3f18b5d92b5..38ebb060fb6340 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * -from spack.variant import _ConditionalVariantValues +from spack.variant import ConditionalVariantValues class Geant4(CMakePackage): @@ -180,7 +180,7 @@ class Geant4(CMakePackage): def std_when(values): for v in values: - if isinstance(v, _ConditionalVariantValues): + if isinstance(v, ConditionalVariantValues): for c in v: yield (c.value, c.when) else: diff --git a/var/spack/repos/builtin/packages/vecgeom/package.py b/var/spack/repos/builtin/packages/vecgeom/package.py index d48585ad13e990..ff0fdd145dc7da 100644 --- a/var/spack/repos/builtin/packages/vecgeom/package.py +++ b/var/spack/repos/builtin/packages/vecgeom/package.py @@ -5,7 +5,7 @@ from spack.package import * -from spack.variant import _ConditionalVariantValues +from spack.variant import ConditionalVariantValues class Vecgeom(CMakePackage, CudaPackage): @@ -196,7 +196,7 @@ class Vecgeom(CMakePackage, CudaPackage): def std_when(values): for v in values: - if isinstance(v, _ConditionalVariantValues): + if isinstance(v, ConditionalVariantValues): for c in v: yield (c.value, c.when) else: From c4a5a996a54b0e9385c9311aea0efbe6df00de28 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 10 Nov 2024 17:37:36 -0800 Subject: [PATCH 145/208] solver: avoid parsing specs in setup - [x] Get rid of a call to `parser.quote_if_needed()` during solver setup, which introduces a circular import and also isn't necessary. - [x] Rename `spack.variant.Value` to `spack.variant.ConditionalValue`, as it is *only* used for conditional values. This makes it much easier to understand some of the logic for variant definitions. Co-authored-by: Harmen Stoppels Signed-off-by: Todd Gamblin --- lib/spack/spack/audit.py | 4 ++-- lib/spack/spack/directives.py | 2 +- lib/spack/spack/solver/asp.py | 17 +++++++++-------- lib/spack/spack/test/variant.py | 2 +- lib/spack/spack/variant.py | 11 +++++++---- 5 files changed, 20 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index dc988ac90edd97..273e335ade03cf 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -714,9 +714,9 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls): for pkg_name in pkgs: pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) - # values are either Value objects (for conditional values) or the values themselves + # values are either ConditionalValue objects or the values themselves build_system_names = set( - v.value if isinstance(v, spack.variant.Value) else v + v.value if isinstance(v, spack.variant.ConditionalValue) else v for _, variant in pkg_cls.variant_definitions("build_system") for v in variant.values ) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 7a3657e2225f5c..0d6b66780c8921 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -583,7 +583,7 @@ def conditional(*values: List[Any], when: Optional[WhenType] = None): # _make_when_spec returns None when the condition is statically false. when = _make_when_spec(when) return spack.variant.ConditionalVariantValues( - spack.variant.Value(x, when=when) for x in values + spack.variant.ConditionalValue(x, when=when) for x in values ) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index b723b6bbb22023..24b7aeb4ff17a5 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1436,14 +1436,13 @@ def define_variant( for value in sorted(values): pkg_fact(fn.variant_possible_value(vid, value)) - # when=True means unconditional, so no need for conditional values - if getattr(value, "when", True) is True: + # we're done here for unconditional values + if not isinstance(value, vt.ConditionalValue): continue - # now we have to handle conditional values - quoted_value = spack.parser.quote_if_needed(str(value)) - vstring = f"{name}={quoted_value}" - variant_has_value = spack.spec.Spec(vstring) + # make a spec indicating whether the variant has this conditional value + variant_has_value = spack.spec.Spec() + variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value) if value.when: # the conditional value is always "possible", but it imposes its when condition as @@ -1454,10 +1453,12 @@ def define_variant( imposed_spec=value.when, required_name=pkg.name, imposed_name=pkg.name, - msg=f"{pkg.name} variant {name} has value '{quoted_value}' when {value.when}", + msg=f"{pkg.name} variant {name} has value '{value.value}' when {value.when}", ) else: - # We know the value is never allowed statically (when was false), but we can't just + vstring = f"{name}='{value.value}'" + + # We know the value is never allowed statically (when was None), but we can't just # ignore it b/c it could come in as a possible value and we need a good error msg. # So, it's a conflict -- if the value is somehow used, it'll trigger an error. trigger_id = self.condition( diff --git a/lib/spack/spack/test/variant.py b/lib/spack/spack/test/variant.py index c4c439b86f8991..518110d52534d0 100644 --- a/lib/spack/spack/test/variant.py +++ b/lib/spack/spack/test/variant.py @@ -762,7 +762,7 @@ def test_disjoint_set_fluent_methods(): @pytest.mark.regression("32694") @pytest.mark.parametrize("other", [True, False]) def test_conditional_value_comparable_to_bool(other): - value = spack.variant.Value("98", when="@1.0") + value = spack.variant.ConditionalValue("98", when=Spec("@1.0")) comparison = value == other assert comparison is False diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index e5dcf72f36f94f..0dc82b2ff7b381 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -775,18 +775,21 @@ def disjoint_sets(*sets): @functools.total_ordering -class Value: - """Conditional value that might be used in variants.""" +class ConditionalValue: + """Conditional value for a variant.""" value: Any - when: Optional["spack.spec.Spec"] # optional b/c we need to know about disabled values + + # optional because statically disabled values (when=False) are set to None + # when=True results in spack.spec.Spec() + when: Optional["spack.spec.Spec"] def __init__(self, value: Any, when: Optional["spack.spec.Spec"]): self.value = value self.when = when def __repr__(self): - return f"Value({self.value}, when={self.when})" + return f"ConditionalValue({self.value}, when={self.when})" def __str__(self): return str(self.value) From 84d33fccce5d4395091b63a2aa8a1ee61ecbc5af Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 11 Nov 2024 13:05:01 +0100 Subject: [PATCH 146/208] llvm: filter clang-ocl from the executables being probed (#47536) This filters any selected executable ending with `-ocl` from the list of executables being probed as candidate for external `llvm` installations. I couldn't reproduce the entire issue, but with a simple script: ``` #!/bin/bash touch foo.o echo "clang version 10.0.0-4ubuntu1 " echo "Target: x86_64-pc-linux-gnu" echo "Thread model: posix" echo "InstalledDir: /usr/bin" exit 0 ``` I noticed the executable was still probed: ``` $ spack -d compiler find /tmp/ocl [ ... ] ==> [2024-11-11-08:38:41.933618] '/tmp/ocl/bin/clang-ocl' '--version' ``` and `foo.o` was left in the working directory. With this change, instead the executable is filtered out of the list on which we run `--version`, so `clang-ocl --version` is not run by Spack. --- var/spack/repos/builtin/packages/llvm/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 208d73f0940533..0dc66b950357cc 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -28,7 +28,7 @@ def filter_detected_exes(cls, prefix, exes_in_prefix): # Executables like lldb-vscode-X are daemon listening on some port and would hang Spack # during detection. clang-cl, clang-cpp, etc. are dev tools that we don't need to test reject = re.compile( - r"-(vscode|cpp|cl|gpu|tidy|rename|scan-deps|format|refactor|offload|" + r"-(vscode|cpp|cl|ocl|gpu|tidy|rename|scan-deps|format|refactor|offload|" r"check|query|doc|move|extdef|apply|reorder|change-namespace|" r"include-fixer|import-test|dap|server)" ) From 01edde35beefb57c44c87eeea5afc759db683c01 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Mon, 11 Nov 2024 14:07:08 +0100 Subject: [PATCH 147/208] ut: add 2.1.0 and 2.1.1 (#47538) --- var/spack/repos/builtin/packages/ut/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/ut/package.py b/var/spack/repos/builtin/packages/ut/package.py index fc4add539ccfc8..2cdd96dc99eca6 100644 --- a/var/spack/repos/builtin/packages/ut/package.py +++ b/var/spack/repos/builtin/packages/ut/package.py @@ -18,6 +18,8 @@ class Ut(CMakePackage): license("BSL-1.0") version("master", branch="master") + version("2.1.1", sha256="016ac5ece1808cd1100be72f90da4fa59ea41de487587a3283c6c981381cc216") + version("2.1.0", sha256="1c9c35c039ad3a9795a278447db6da0a4ec1a1d223bf7d64687ad28f673b7ae8") version("2.0.1", sha256="1e43be17045a881c95cedc843d72fe9c1e53239b02ed179c1e39e041ebcd7dad") version("2.0.0", sha256="8b5b11197d1308dfc1fe20efd6a656e0c833dbec2807e2292967f6e2f7c0420f") version("1.1.9", sha256="1a666513157905aa0e53a13fac602b5673dcafb04a869100a85cd3f000c2ed0d") From 8b165c2cfec43659fa33847b538fb8f0c94e3a42 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 07:28:52 -0600 Subject: [PATCH 148/208] py-gosam: add v2.1.2 (#47533) --- var/spack/repos/builtin/packages/py-gosam/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-gosam/package.py b/var/spack/repos/builtin/packages/py-gosam/package.py index fc6eff141da774..c5f5433383e915 100644 --- a/var/spack/repos/builtin/packages/py-gosam/package.py +++ b/var/spack/repos/builtin/packages/py-gosam/package.py @@ -20,6 +20,11 @@ class PyGosam(Package): license("GPL-3.0-only") + version( + "2.1.2", + url="https://github.com/gudrunhe/gosam/releases/download/2.1.2/gosam-2.1.2+c307997.tar.gz", + sha256="53601ab203c3d572764439018f976baff9c83b87abe1fcbbe15c07caf174680c", + ) version( "2.1.1", url="https://github.com/gudrunhe/gosam/releases/download/2.1.1/gosam-2.1.1-4b98559.tar.gz", From 8c962a94b0c2d5ebe8371774966d32fea4a5bbcc Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 08:24:44 -0600 Subject: [PATCH 149/208] vbfnlo: add v3.0; depends on tcsh (build) (#47532) * vbfnlo: depends on tcsh (build) * vbfnlo: add v3.0 * vbfnlo: comment Co-authored-by: Valentin Volkl --------- Co-authored-by: Valentin Volkl --- var/spack/repos/builtin/packages/vbfnlo/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/vbfnlo/package.py b/var/spack/repos/builtin/packages/vbfnlo/package.py index 77648aef6148a0..42abc3245cfcd4 100644 --- a/var/spack/repos/builtin/packages/vbfnlo/package.py +++ b/var/spack/repos/builtin/packages/vbfnlo/package.py @@ -20,6 +20,7 @@ class Vbfnlo(AutotoolsPackage): license("GPL-2.0-only") # The commented out versions exist, but are not tested + version("3.0", sha256="b9df02603e4f801f866360c720191a29afdb958d0bd4369ea7d810e761503e51") version( "3.0.0beta5", sha256="777a3dedb365ea9abc38848a60f30d325da3799cbad69fa308664b94a8c31a90" ) @@ -41,7 +42,6 @@ class Vbfnlo(AutotoolsPackage): depends_on("cxx", type="build") # generated depends_on("fortran", type="build") # generated - # version('2.7.0', sha256='0e96c0912599e3000fffec5305700b947b604a7b06c7975851503f445311e4ef') # Documentation is broken on some systems: # See https://github.com/vbfnlo/vbfnlo/issues/2 @@ -55,6 +55,8 @@ class Vbfnlo(AutotoolsPackage): depends_on("autoconf", type="build") depends_on("m4", type="build") depends_on("libtool", type="build") + # needed as tcsh is hardcoded in m4/vbfnlo.m4, could be patched out in the future + depends_on("tcsh", type="build") @when("@2.7.1") def setup_build_environment(self, env): From f458392c1b223d628bffe1ce72d7a86118f3fc75 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Mon, 11 Nov 2024 08:52:20 -0600 Subject: [PATCH 150/208] petsc: use --with-exodusii-dir [as exodus does not have 'libs()' to provide value for --with-exodusii-lib] (#47506) --- var/spack/repos/builtin/packages/petsc/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 0a0d61da856919..918b2e83679969 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -514,6 +514,9 @@ def configure_options(self): else: hdf5libs = ":hl" + if "+exodusii+fortran" in spec and "+fortran" in spec: + options.append("--with-exodusii-fortran-bindings") + # tuple format (spacklibname, petsclibname, useinc, uselib) # default: 'gmp', => ('gmp', 'gmp', True, True) # any other combination needs a full tuple @@ -553,7 +556,7 @@ def configure_options(self): ("parallel-netcdf", "pnetcdf", True, True), ("moab", "moab", False, False), ("random123", "random123", False, False), - "exodusii", + ("exodusii", "exodusii", False, False), "cgns", "memkind", "p4est", From 33dd894eff169b78103fb7bb5e2b6ebe62cd3916 Mon Sep 17 00:00:00 2001 From: v <39996356+vhewes@users.noreply.github.com> Date: Mon, 11 Nov 2024 09:10:09 -0600 Subject: [PATCH 151/208] py-oracledb: add v1.4.2, v2.3.0, v2.4.1 (#47313) the py-oracledb package only has a single outdated version available in its recipe. this PR adds a much broader range of versions and their corresponding checksums. * add more versions of py-oracledb * update py-oracledb recipe * add py-cython version dependencies * tweak py-cython version dependencies * remove older versions of py-oracledb --- .../repos/builtin/packages/py-oracledb/package.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-oracledb/package.py b/var/spack/repos/builtin/packages/py-oracledb/package.py index 3e50ac8da0c8d6..2e690e36be70a4 100644 --- a/var/spack/repos/builtin/packages/py-oracledb/package.py +++ b/var/spack/repos/builtin/packages/py-oracledb/package.py @@ -12,17 +12,23 @@ class PyOracledb(PythonPackage): Python programs to access Oracle Database.""" homepage = "https://oracle.github.io/python-oracledb/" - pypi = "oracledb/oracledb-1.2.2.tar.gz" + pypi = "oracledb/oracledb-1.4.2.tar.gz" license("Apache-2.0") - version("1.2.2", sha256="dd9f63084e44642b484a46b2fcfb4fc921f39facf494a1bab00628fa6409f4fc") + version("2.4.1", sha256="bd5976bef0e466e0f9d1b9f6531fb5b8171dc8534717ccb04b26e680b6c7571d") + version("2.3.0", sha256="b9b0c4ec280b10063e6789bed23ddc2435ae98569ebe64e0b9a270780b9103d5") + version("1.4.2", sha256="e28ed9046f2735dc2dd5bbcdf3667f284e384e0ec7eed3eeb3798fa8a7d47e36") - depends_on("c", type="build") # generated + depends_on("python@3.8:3.13", when="@2.4:") + depends_on("python@3.8:3.12", when="@2.0:2.3") + depends_on("python@3.8:3.11", when="@:1.4") + + depends_on("c", type="build") depends_on("py-setuptools@40.6.0:", type="build") depends_on("py-cryptography@3.2.1:", type=("build", "run")) - depends_on("py-cython", type="build") + depends_on("py-cython@3:", type="build") depends_on("python@3.6:", type=("build", "run")) depends_on("oracle-instant-client", type="run", when="impl=thick") From b803dabb2c28e93c41f62977c65aad385594b8f9 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Mon, 11 Nov 2024 09:34:39 -0600 Subject: [PATCH 152/208] mirrors: allow username/password as environment variables (#46549) `spack mirror add` and `set` now have flags `--oci-password-variable`, `--oci-password-variable`, `--s3-access-key-id-variable`, `--s3-access-key-secret-variable`, `--s3-access-token-variable`, which allows users to specify an environment variable in which a username or password is stored. Storing plain text passwords in config files is considered deprecated. The schema for mirrors.yaml has changed, notably the `access_pair` list is generally replaced with a dictionary of `{id: ..., secret_variable: ...}` or `{id_variable: ..., secret_variable: ...}`. --- lib/spack/spack/binary_distribution.py | 3 + lib/spack/spack/cmd/common/arguments.py | 48 +++++++-- lib/spack/spack/cmd/mirror.py | 137 ++++++++++++++++++++++-- lib/spack/spack/mirror.py | 112 +++++++++++++++++-- lib/spack/spack/oci/opener.py | 5 +- lib/spack/spack/schema/mirrors.py | 61 ++++++++++- lib/spack/spack/test/cmd/mirror.py | 126 ++++++++++++++++++++-- lib/spack/spack/test/mirror.py | 60 ++++++++++- lib/spack/spack/util/s3.py | 25 ++--- share/spack/spack-completion.bash | 6 +- share/spack/spack-completion.fish | 36 ++++++- 11 files changed, 556 insertions(+), 63 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 2f74f38dac3200..dcc82130e77441 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -1182,6 +1182,9 @@ def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool) self.tmpdir: str self.executor: concurrent.futures.Executor + # Verify if the mirror meets the requirements to push + self.mirror.ensure_mirror_usable("push") + def __enter__(self): self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) self._executor = spack.util.parallel.make_concurrent_executor() diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index 6a4a43e9e93746..7ddafd4d14c566 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -581,23 +581,51 @@ def add_concretizer_args(subparser): def add_connection_args(subparser, add_help): - subparser.add_argument( - "--s3-access-key-id", help="ID string to use to connect to this S3 mirror" + def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs): + group = parser.add_mutually_exclusive_group() + group.add_argument(arg, **kwargs) + # Update help string + if "help" in kwargs: + kwargs["help"] = "environment variable containing " + kwargs["help"] + group.add_argument(arg + "-variable", **kwargs) + + s3_connection_parser = subparser.add_argument_group("S3 Connection") + + add_argument_string_or_variable( + s3_connection_parser, + "--s3-access-key-id", + help="ID string to use to connect to this S3 mirror", ) - subparser.add_argument( - "--s3-access-key-secret", help="secret string to use to connect to this S3 mirror" + add_argument_string_or_variable( + s3_connection_parser, + "--s3-access-key-secret", + help="secret string to use to connect to this S3 mirror", ) - subparser.add_argument( - "--s3-access-token", help="access token to use to connect to this S3 mirror" + add_argument_string_or_variable( + s3_connection_parser, + "--s3-access-token", + help="access token to use to connect to this S3 mirror", ) - subparser.add_argument( + s3_connection_parser.add_argument( "--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None ) - subparser.add_argument( + s3_connection_parser.add_argument( "--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror" ) - subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror") - subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror") + + oci_connection_parser = subparser.add_argument_group("OCI Connection") + + add_argument_string_or_variable( + oci_connection_parser, + "--oci-username", + deprecate_str=False, + help="username to use to connect to this OCI mirror", + ) + add_argument_string_or_variable( + oci_connection_parser, + "--oci-password", + help="password to use to connect to this OCI mirror", + ) def use_buildcache(cli_arg_value): diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index af6a45e3990752..ede042949796c0 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -231,31 +231,133 @@ def setup_parser(subparser): ) +def _configure_access_pair( + args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None +): + """Configure the access_pair options""" + + # Check if any of the arguments are set to update this access_pair. + # If none are set, then skip computing the new access pair + args_id = getattr(args, id_tok) + args_id_variable = getattr(args, id_variable_tok) + args_secret = getattr(args, secret_tok) + args_secret_variable = getattr(args, secret_variable_tok) + if not any([args_id, args_id_variable, args_secret, args_secret_variable]): + return None + + def _default_value(id_): + if isinstance(default, list): + return default[0] if id_ == "id" else default[1] + elif isinstance(default, dict): + return default.get(id_) + else: + return None + + def _default_variable(id_): + if isinstance(default, dict): + return default.get(id_ + "_variable") + else: + return None + + id_ = None + id_variable = None + secret = None + secret_variable = None + + # Get the value/default value if the argument of the inverse + if not args_id_variable: + id_ = getattr(args, id_tok) or _default_value("id") + if not args_id: + id_variable = getattr(args, id_variable_tok) or _default_variable("id") + if not args_secret_variable: + secret = getattr(args, secret_tok) or _default_value("secret") + if not args_secret: + secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret") + + if (id_ or id_variable) and (secret or secret_variable): + if secret: + if not id_: + raise SpackError("Cannot add mirror with a variable id and text secret") + + return [id_, secret] + else: + return dict( + [ + (("id", id_) if id_ else ("id_variable", id_variable)), + ("secret_variable", secret_variable), + ] + ) + else: + if id_ or id_variable or secret or secret_variable is not None: + id_arg_tok = id_tok.replace("_", "-") + secret_arg_tok = secret_tok.replace("_", "-") + tty.warn( + "Expected both parts of the access pair to be specified. " + f"(i.e. --{id_arg_tok} and --{secret_arg_tok})" + ) + + return None + + def mirror_add(args): """add a mirror to Spack""" if ( args.s3_access_key_id or args.s3_access_key_secret or args.s3_access_token + or args.s3_access_key_id_variable + or args.s3_access_key_secret_variable + or args.s3_access_token_variable or args.s3_profile or args.s3_endpoint_url or args.type or args.oci_username or args.oci_password + or args.oci_username_variable + or args.oci_password_variable or args.autopush or args.signed is not None ): connection = {"url": args.url} - if args.s3_access_key_id and args.s3_access_key_secret: - connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret] + # S3 Connection + if args.s3_access_key_secret: + tty.warn( + "Configuring mirror secrets as plain text with --s3-access-key-secret is " + "deprecated. Use --s3-access-key-secret-variable instead" + ) + if args.oci_password: + tty.warn( + "Configuring mirror secrets as plain text with --oci-password is deprecated. " + "Use --oci-password-variable instead" + ) + access_pair = _configure_access_pair( + args, + "s3_access_key_id", + "s3_access_key_id_variable", + "s3_access_key_secret", + "s3_access_key_secret_variable", + ) + if access_pair: + connection["access_pair"] = access_pair + if args.s3_access_token: connection["access_token"] = args.s3_access_token + elif args.s3_access_token_variable: + connection["access_token_variable"] = args.s3_access_token_variable + if args.s3_profile: connection["profile"] = args.s3_profile + if args.s3_endpoint_url: connection["endpoint_url"] = args.s3_endpoint_url - if args.oci_username and args.oci_password: - connection["access_pair"] = [args.oci_username, args.oci_password] + + # OCI Connection + access_pair = _configure_access_pair( + args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable" + ) + if access_pair: + connection["access_pair"] = access_pair + if args.type: connection["binary"] = "binary" in args.type connection["source"] = "source" in args.type @@ -285,16 +387,35 @@ def _configure_mirror(args): changes = {} if args.url: changes["url"] = args.url - if args.s3_access_key_id and args.s3_access_key_secret: - changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret] + + default_access_pair = entry._get_value("access_pair", direction or "fetch") + # TODO: Init access_pair args with the fetch/push/base values in the current mirror state + access_pair = _configure_access_pair( + args, + "s3_access_key_id", + "s3_access_key_id_variable", + "s3_access_key_secret", + "s3_access_key_secret_variable", + default=default_access_pair, + ) + if access_pair: + changes["access_pair"] = access_pair if args.s3_access_token: changes["access_token"] = args.s3_access_token if args.s3_profile: changes["profile"] = args.s3_profile if args.s3_endpoint_url: changes["endpoint_url"] = args.s3_endpoint_url - if args.oci_username and args.oci_password: - changes["access_pair"] = [args.oci_username, args.oci_password] + access_pair = _configure_access_pair( + args, + "oci_username", + "oci_username_variable", + "oci_password", + "oci_password_variable", + default=default_access_pair, + ) + if access_pair: + changes["access_pair"] = access_pair if getattr(args, "signed", None) is not None: changes["signed"] = args.signed if getattr(args, "autopush", None) is not None: diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index b320671361e1b1..328a456fc3cb17 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -18,7 +18,7 @@ import sys import traceback import urllib.parse -from typing import List, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import llnl.url import llnl.util.symlink @@ -153,8 +153,66 @@ def push_url(self): """Get the valid, canonicalized fetch URL""" return self.get_url("push") + def ensure_mirror_usable(self, direction: str = "push"): + access_pair = self._get_value("access_pair", direction) + access_token_variable = self._get_value("access_token_variable", direction) + + errors = [] + + # Verify that the credentials that are variables expand + if access_pair and isinstance(access_pair, dict): + if "id_variable" in access_pair and access_pair["id_variable"] not in os.environ: + errors.append(f"id_variable {access_pair['id_variable']} not set in environment") + if "secret_variable" in access_pair: + if access_pair["secret_variable"] not in os.environ: + errors.append( + f"environment variable `{access_pair['secret_variable']}` " + "(secret_variable) not set" + ) + + if access_token_variable: + if access_token_variable not in os.environ: + errors.append( + f"environment variable `{access_pair['access_token_variable']}` " + "(access_token_variable) not set" + ) + + if errors: + msg = f"invalid {direction} configuration for mirror {self.name}: " + msg += "\n ".join(errors) + raise spack.mirror.MirrorError(msg) + def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool): - keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"] + # Only allow one to exist in the config + if "access_token" in current_data and "access_token_variable" in new_data: + current_data.pop("access_token") + elif "access_token_variable" in current_data and "access_token" in new_data: + current_data.pop("access_token_variable") + + # If updating to a new access_pair that is the deprecated list, warn + warn_deprecated_access_pair = False + if "access_pair" in new_data: + warn_deprecated_access_pair = isinstance(new_data["access_pair"], list) + # If the not updating the current access_pair, and it is the deprecated list, warn + elif "access_pair" in current_data: + warn_deprecated_access_pair = isinstance(current_data["access_pair"], list) + + if warn_deprecated_access_pair: + tty.warn( + f"in mirror {self.name}: support for plain text secrets in config files " + "(access_pair: [id, secret]) is deprecated and will be removed in a future Spack " + "version. Use environment variables instead (access_pair: " + "{id: ..., secret_variable: ...})" + ) + + keys = [ + "url", + "access_pair", + "access_token", + "access_token_variable", + "profile", + "endpoint_url", + ] if top_level: keys += ["binary", "source", "signed", "autopush"] changed = False @@ -270,11 +328,53 @@ def get_url(self, direction: str) -> str: return _url_or_path_to_url(url) - def get_access_token(self, direction: str) -> Optional[str]: - return self._get_value("access_token", direction) + def get_credentials(self, direction: str) -> Dict[str, Any]: + """Get the mirror credentials from the mirror config + + Args: + direction: fetch or push mirror config + + Returns: + Dictionary from credential type string to value - def get_access_pair(self, direction: str) -> Optional[List]: - return self._get_value("access_pair", direction) + Credential Type Map: + access_token -> str + access_pair -> tuple(str,str) + profile -> str + """ + creddict: Dict[str, Any] = {} + access_token = self.get_access_token(direction) + if access_token: + creddict["access_token"] = access_token + + access_pair = self.get_access_pair(direction) + if access_pair: + creddict.update({"access_pair": access_pair}) + + profile = self.get_profile(direction) + if profile: + creddict["profile"] = profile + + return creddict + + def get_access_token(self, direction: str) -> Optional[str]: + tok = self._get_value("access_token_variable", direction) + if tok: + return os.environ.get(tok) + else: + return self._get_value("access_token", direction) + return None + + def get_access_pair(self, direction: str) -> Optional[Tuple[str, str]]: + pair = self._get_value("access_pair", direction) + if isinstance(pair, (tuple, list)) and len(pair) == 2: + return (pair[0], pair[1]) if all(pair) else None + elif isinstance(pair, dict): + id_ = os.environ.get(pair["id_variable"]) if "id_variable" in pair else pair["id"] + secret = os.environ.get(pair["secret_variable"]) + return (id_, secret) if id_ and secret else None + else: + return None def get_profile(self, direction: str) -> Optional[str]: return self._get_value("profile", direction) diff --git a/lib/spack/spack/oci/opener.py b/lib/spack/spack/oci/opener.py index 906d5d2b92c60b..2f9e83f5be1619 100644 --- a/lib/spack/spack/oci/opener.py +++ b/lib/spack/spack/oci/opener.py @@ -377,9 +377,10 @@ def credentials_from_mirrors( # Prefer push credentials over fetch. Unlikely that those are different # but our config format allows it. for direction in ("push", "fetch"): - pair = mirror.get_access_pair(direction) - if pair is None: + pair = mirror.get_credentials(direction).get("access_pair") + if not pair: continue + url = mirror.get_url(direction) if not url.startswith("oci://"): continue diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py index 9a56f1a7e28415..8a61fd5b5344fe 100644 --- a/lib/spack/spack/schema/mirrors.py +++ b/lib/spack/spack/schema/mirrors.py @@ -15,14 +15,42 @@ "url": {"type": "string"}, # todo: replace this with named keys "username" / "password" or "id" / "secret" "access_pair": { - "type": "array", - "items": {"type": ["string", "null"], "minItems": 2, "maxItems": 2}, + "oneOf": [ + { + "type": "array", + "items": {"minItems": 2, "maxItems": 2, "type": ["string", "null"]}, + }, # deprecated + { + "type": "object", + "required": ["secret_variable"], + # Only allow id or id_variable to be set, not both + "oneOf": [{"required": ["id"]}, {"required": ["id_variable"]}], + "properties": { + "id": {"type": "string"}, + "id_variable": {"type": "string"}, + "secret_variable": {"type": "string"}, + }, + }, + ] }, - "access_token": {"type": ["string", "null"]}, "profile": {"type": ["string", "null"]}, "endpoint_url": {"type": ["string", "null"]}, + "access_token": {"type": ["string", "null"]}, # deprecated + "access_token_variable": {"type": ["string", "null"]}, } +connection_ext = { + "deprecatedProperties": [ + { + "names": ["access_token"], + "message": "Use of plain text `access_token` in mirror config is deprecated, use " + "environment variables instead (access_token_variable)", + "error": False, + } + ] +} + + #: Mirror connection inside pull/push keys fetch_and_push = { "anyOf": [ @@ -31,6 +59,7 @@ "type": "object", "additionalProperties": False, "properties": {**connection}, # type: ignore + **connection_ext, # type: ignore }, ] } @@ -49,6 +78,7 @@ "autopush": {"type": "boolean"}, **connection, # type: ignore }, + **connection_ext, # type: ignore } #: Properties for inclusion in other schemas @@ -70,3 +100,28 @@ "additionalProperties": False, "properties": properties, } + + +def update(data): + import jsonschema + + errors = [] + + def check_access_pair(name, section): + if not section or not isinstance(section, dict): + return + + if "access_token" in section and "access_token_variable" in section: + errors.append( + f'{name}: mirror credential "access_token" conflicts with "access_token_variable"' + ) + + # Check all of the sections + for name, section in data.items(): + check_access_pair(name, section) + if isinstance(section, dict): + check_access_pair(name, section.get("fetch")) + check_access_pair(name, section.get("push")) + + if errors: + raise jsonschema.ValidationError("\n".join(errors)) diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py index 2a67bc2e1432ed..ee827c05547e8d 100644 --- a/lib/spack/spack/test/cmd/mirror.py +++ b/lib/spack/spack/test/cmd/mirror.py @@ -17,6 +17,7 @@ import spack.version from spack.main import SpackCommand, SpackCommandError +config = SpackCommand("config") mirror = SpackCommand("mirror") env = SpackCommand("env") add = SpackCommand("add") @@ -181,20 +182,122 @@ def test_mirror_crud(mutable_config, capsys): output = mirror("remove", "mirror") assert "Removed mirror" in output - # Test S3 connection info id/key - mirror( - "add", - "--s3-access-key-id", - "foo", - "--s3-access-key-secret", - "bar", - "mirror", - "s3://spack-public", - ) + # Test S3 connection info token as variable + mirror("add", "--s3-access-token-variable", "aaaaaazzzzz", "mirror", "s3://spack-public") output = mirror("remove", "mirror") assert "Removed mirror" in output + def do_add_set_seturl_access_pair( + id_arg, secret_arg, mirror_name="mirror", mirror_url="s3://spack-public" + ): + # Test S3 connection info id/key + output = mirror("add", id_arg, "foo", secret_arg, "bar", mirror_name, mirror_url) + if "variable" not in secret_arg: + assert ( + f"Configuring mirror secrets as plain text with {secret_arg} is deprecated. " + in output + ) + + output = config("blame", "mirrors") + assert all([x in output for x in ("foo", "bar", mirror_name, mirror_url)]) + # Mirror access_pair deprecation warning should not be in blame output + assert "support for plain text secrets" not in output + + output = mirror("set", id_arg, "foo_set", secret_arg, "bar_set", mirror_name) + if "variable" not in secret_arg: + assert "support for plain text secrets" in output + output = config("blame", "mirrors") + assert all([x in output for x in ("foo_set", "bar_set", mirror_name, mirror_url)]) + if "variable" not in secret_arg: + output = mirror( + "set", id_arg, "foo_set", secret_arg + "-variable", "bar_set_var", mirror_name + ) + assert "support for plain text secrets" not in output + output = config("blame", "mirrors") + assert all( + [x in output for x in ("foo_set", "bar_set_var", mirror_name, mirror_url)] + ) + + output = mirror( + "set-url", + id_arg, + "foo_set_url", + secret_arg, + "bar_set_url", + "--push", + mirror_name, + mirror_url + "-push", + ) + output = config("blame", "mirrors") + assert all( + [ + x in output + for x in ("foo_set_url", "bar_set_url", mirror_name, mirror_url + "-push") + ] + ) + + output = mirror("set", id_arg, "a", mirror_name) + assert "No changes made to mirror" not in output + + output = mirror("set", secret_arg, "b", mirror_name) + assert "No changes made to mirror" not in output + + output = mirror("set-url", id_arg, "c", mirror_name, mirror_url) + assert "No changes made to mirror" not in output + + output = mirror("set-url", secret_arg, "d", mirror_name, mirror_url) + assert "No changes made to mirror" not in output + + output = mirror("remove", mirror_name) + assert "Removed mirror" in output + + output = mirror("add", id_arg, "foo", mirror_name, mirror_url) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("set-url", id_arg, "bar", mirror_name, mirror_url) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("set", id_arg, "bar", mirror_name) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("remove", mirror_name) + assert "Removed mirror" in output + + output = mirror("add", secret_arg, "bar", mirror_name, mirror_url) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("set-url", secret_arg, "bar", mirror_name, mirror_url) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("set", secret_arg, "bar", mirror_name) + assert "Expected both parts of the access pair to be specified. " in output + + output = mirror("remove", mirror_name) + assert "Removed mirror" in output + + output = mirror("list") + assert "No mirrors configured" in output + + do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret") + do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret-variable") + do_add_set_seturl_access_pair( + "--s3-access-key-id-variable", "--s3-access-key-secret-variable" + ) + with pytest.raises( + spack.error.SpackError, match="Cannot add mirror with a variable id and text secret" + ): + do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret") + + # Test OCI connection info user/password + do_add_set_seturl_access_pair("--oci-username", "--oci-password") + do_add_set_seturl_access_pair("--oci-username", "--oci-password-variable") + do_add_set_seturl_access_pair("--oci-username-variable", "--oci-password-variable") + with pytest.raises( + spack.error.SpackError, match="Cannot add mirror with a variable id and text secret" + ): + do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret") + # Test S3 connection info with endpoint URL mirror( "add", @@ -218,6 +321,9 @@ def test_mirror_crud(mutable_config, capsys): output = mirror("remove", "mirror") assert "Removed mirror" in output + output = mirror("list") + assert "No mirrors configured" in output + def test_mirror_nonexisting(mutable_config): with pytest.raises(SpackCommandError): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index b62d5a3e41c787..ce104259fc76ca 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -329,9 +329,9 @@ def test_update_4(): @pytest.mark.parametrize("direction", ["fetch", "push"]) -def test_update_connection_params(direction): +def test_update_connection_params(direction, tmpdir, monkeypatch): """Test whether new connection params expand the mirror config to a dict.""" - m = spack.mirror.Mirror("https://example.com") + m = spack.mirror.Mirror("https://example.com", "example") assert m.update( { @@ -354,12 +354,64 @@ def test_update_connection_params(direction): "endpoint_url": "https://example.com", }, } - - assert m.get_access_pair(direction) == ["username", "password"] + assert m.get_access_pair(direction) == ("username", "password") assert m.get_access_token(direction) == "token" assert m.get_profile(direction) == "profile" assert m.get_endpoint_url(direction) == "https://example.com" + # Expand environment variables + os.environ["_SPACK_TEST_PAIR_USERNAME"] = "expanded_username" + os.environ["_SPACK_TEST_PAIR_PASSWORD"] = "expanded_password" + os.environ["_SPACK_TEST_TOKEN"] = "expanded_token" + + assert m.update( + { + "access_pair": { + "id_variable": "_SPACK_TEST_PAIR_USERNAME", + "secret_variable": "_SPACK_TEST_PAIR_PASSWORD", + } + }, + direction, + ) + + assert m.to_dict() == { + "url": "https://example.com", + direction: { + "url": "http://example.org", + "access_pair": { + "id_variable": "_SPACK_TEST_PAIR_USERNAME", + "secret_variable": "_SPACK_TEST_PAIR_PASSWORD", + }, + "access_token": "token", + "profile": "profile", + "endpoint_url": "https://example.com", + }, + } + + assert m.get_access_pair(direction) == ("expanded_username", "expanded_password") + + assert m.update( + { + "access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"}, + "access_token_variable": "_SPACK_TEST_TOKEN", + }, + direction, + ) + + assert m.to_dict() == { + "url": "https://example.com", + direction: { + "url": "http://example.org", + "access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"}, + "access_token_variable": "_SPACK_TEST_TOKEN", + "profile": "profile", + "endpoint_url": "https://example.com", + }, + } + + assert m.get_access_pair(direction) == ("username", "expanded_password") + assert m.get_access_token(direction) == "expanded_token" + def test_mirror_name_or_url_dir_parsing(tmp_path): curdir = tmp_path / "mirror" diff --git a/lib/spack/spack/util/s3.py b/lib/spack/spack/util/s3.py index 5457abdca58453..700db07135704c 100644 --- a/lib/spack/spack/util/s3.py +++ b/lib/spack/spack/util/s3.py @@ -94,20 +94,17 @@ def get_mirror_s3_connection_info(mirror, method): # access token if isinstance(mirror, Mirror): - access_token = mirror.get_access_token(method) - if access_token: - s3_connection["aws_session_token"] = access_token - - # access pair - access_pair = mirror.get_access_pair(method) - if access_pair and access_pair[0] and access_pair[1]: - s3_connection["aws_access_key_id"] = access_pair[0] - s3_connection["aws_secret_access_key"] = access_pair[1] - - # profile - profile = mirror.get_profile(method) - if profile: - s3_connection["profile_name"] = profile + credentials = mirror.get_credentials(method) + if credentials: + if "access_token" in credentials: + s3_connection["aws_session_token"] = credentials["access_token"] + + if "access_pair" in credentials: + s3_connection["aws_access_key_id"] = credentials["access_pair"][0] + s3_connection["aws_secret_access_key"] = credentials["access_pair"][1] + + if "profile" in credentials: + s3_connection["profile_name"] = credentials["profile"] # endpoint url endpoint_url = mirror.get_endpoint_url(method) or os.environ.get("S3_ENDPOINT_URL") diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 8946bf1dcc888d..706fb7e2d59348 100644 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1455,7 +1455,7 @@ _spack_mirror_destroy() { _spack_mirror_add() { if $list_options then - SPACK_COMPREPLY="-h --help --scope --type --autopush --unsigned --signed --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" + SPACK_COMPREPLY="-h --help --scope --type --autopush --unsigned --signed --s3-access-key-id --s3-access-key-id-variable --s3-access-key-secret --s3-access-key-secret-variable --s3-access-token --s3-access-token-variable --s3-profile --s3-endpoint-url --oci-username --oci-username-variable --oci-password --oci-password-variable" else _mirrors fi @@ -1482,7 +1482,7 @@ _spack_mirror_rm() { _spack_mirror_set_url() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" + SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-id-variable --s3-access-key-secret --s3-access-key-secret-variable --s3-access-token --s3-access-token-variable --s3-profile --s3-endpoint-url --oci-username --oci-username-variable --oci-password --oci-password-variable" else _mirrors fi @@ -1491,7 +1491,7 @@ _spack_mirror_set_url() { _spack_mirror_set() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --type --url --autopush --no-autopush --unsigned --signed --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" + SPACK_COMPREPLY="-h --help --push --fetch --type --url --autopush --no-autopush --unsigned --signed --scope --s3-access-key-id --s3-access-key-id-variable --s3-access-key-secret --s3-access-key-secret-variable --s3-access-token --s3-access-token-variable --s3-profile --s3-endpoint-url --oci-username --oci-username-variable --oci-password --oci-password-variable" else _mirrors fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 17b7cd42e46f34..52153f01c0e05e 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -2295,7 +2295,7 @@ complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url - complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url -r -d 'find mirror to destroy by url' # spack mirror add -set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= autopush unsigned signed s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= +set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= autopush unsigned signed s3-access-key-id= s3-access-key-id-variable= s3-access-key-secret= s3-access-key-secret-variable= s3-access-token= s3-access-token-variable= s3-profile= s3-endpoint-url= oci-username= oci-username-variable= oci-password= oci-password-variable= complete -c spack -n '__fish_spack_using_command_pos 0 mirror add' -f complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -d 'show this help message and exit' @@ -2311,18 +2311,28 @@ complete -c spack -n '__fish_spack_using_command mirror add' -l signed -f -a sig complete -c spack -n '__fish_spack_using_command mirror add' -l signed -d 'require signing and signature verification when pushing and installing from this build cache' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-id -r -f -a s3_access_key_id complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-id -r -d 'ID string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-id-variable -r -f -a s3_access_key_id_variable +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-id-variable -r -d 'environment variable containing ID string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-secret -r -f -a s3_access_key_secret complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-secret -r -d 'secret string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-secret-variable -r -f -a s3_access_key_secret_variable +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-key-secret-variable -r -d 'environment variable containing secret string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-token -r -f -a s3_access_token complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-token -r -d 'access token to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-token-variable -r -f -a s3_access_token_variable +complete -c spack -n '__fish_spack_using_command mirror add' -l s3-access-token-variable -r -d 'environment variable containing access token to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -f -a s3_profile complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -f -a oci_username complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username-variable -r -f -a oci_username_variable +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username-variable -r -d 'environment variable containing username to use to connect to this OCI mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -f -a oci_password complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -d 'password to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password-variable -r -f -a oci_password_variable +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password-variable -r -d 'environment variable containing password to use to connect to this OCI mirror' # spack mirror remove set -g __fish_spack_optspecs_spack_mirror_remove h/help scope= @@ -2341,7 +2351,7 @@ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -f -a '_ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -d 'configuration scope to modify' # spack mirror set-url -set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= +set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-id-variable= s3-access-key-secret= s3-access-key-secret-variable= s3-access-token= s3-access-token-variable= s3-profile= s3-endpoint-url= oci-username= oci-username-variable= oci-password= oci-password-variable= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set-url' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -d 'show this help message and exit' @@ -2353,21 +2363,31 @@ complete -c spack -n '__fish_spack_using_command mirror set-url' -l scope -r -f complete -c spack -n '__fish_spack_using_command mirror set-url' -l scope -r -d 'configuration scope to modify' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-id -r -f -a s3_access_key_id complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-id -r -d 'ID string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-id-variable -r -f -a s3_access_key_id_variable +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-id-variable -r -d 'environment variable containing ID string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-secret -r -f -a s3_access_key_secret complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-secret -r -d 'secret string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-secret-variable -r -f -a s3_access_key_secret_variable +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-key-secret-variable -r -d 'environment variable containing secret string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-token -r -f -a s3_access_token complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-token -r -d 'access token to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-token-variable -r -f -a s3_access_token_variable +complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-access-token-variable -r -d 'environment variable containing access token to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -r -f -a s3_profile complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -f -a oci_username complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username-variable -r -f -a oci_username_variable +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username-variable -r -d 'environment variable containing username to use to connect to this OCI mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -f -a oci_password complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -d 'password to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password-variable -r -f -a oci_password_variable +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password-variable -r -d 'environment variable containing password to use to connect to this OCI mirror' # spack mirror set -set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= autopush no-autopush unsigned signed scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= +set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= autopush no-autopush unsigned signed scope= s3-access-key-id= s3-access-key-id-variable= s3-access-key-secret= s3-access-key-secret-variable= s3-access-token= s3-access-token-variable= s3-profile= s3-endpoint-url= oci-username= oci-username-variable= oci-password= oci-password-variable= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -d 'show this help message and exit' @@ -2391,18 +2411,28 @@ complete -c spack -n '__fish_spack_using_command mirror set' -l scope -r -f -a ' complete -c spack -n '__fish_spack_using_command mirror set' -l scope -r -d 'configuration scope to modify' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-id -r -f -a s3_access_key_id complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-id -r -d 'ID string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-id-variable -r -f -a s3_access_key_id_variable +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-id-variable -r -d 'environment variable containing ID string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-secret -r -f -a s3_access_key_secret complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-secret -r -d 'secret string to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-secret-variable -r -f -a s3_access_key_secret_variable +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-key-secret-variable -r -d 'environment variable containing secret string to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-token -r -f -a s3_access_token complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-token -r -d 'access token to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-token-variable -r -f -a s3_access_token_variable +complete -c spack -n '__fish_spack_using_command mirror set' -l s3-access-token-variable -r -d 'environment variable containing access token to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -f -a s3_profile complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -f -a oci_username complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username-variable -r -f -a oci_username_variable +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username-variable -r -d 'environment variable containing username to use to connect to this OCI mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -f -a oci_password complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -d 'password to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password-variable -r -f -a oci_password_variable +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password-variable -r -d 'environment variable containing password to use to connect to this OCI mirror' # spack mirror list set -g __fish_spack_optspecs_spack_mirror_list h/help scope= From 3b423a67a29d266cd1722b86a4420126b443811e Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Mon, 11 Nov 2024 11:18:03 -0600 Subject: [PATCH 153/208] butterflypack: add v3.2.0, strumpack: add v8.0.0 (#47462) * butterflypack: add v3.2.0 * strumpack: add v8.0.0 * restrict fj patch to @1.2.0 * Update var/spack/repos/builtin/packages/butterflypack/package.py Co-authored-by: Wouter Deconinck --------- Co-authored-by: Wouter Deconinck --- var/spack/repos/builtin/packages/butterflypack/package.py | 7 ++++--- var/spack/repos/builtin/packages/strumpack/package.py | 3 +++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py index 7fc6cbd77b9648..c36d1d3a4bd08d 100644 --- a/var/spack/repos/builtin/packages/butterflypack/package.py +++ b/var/spack/repos/builtin/packages/butterflypack/package.py @@ -28,6 +28,7 @@ class Butterflypack(CMakePackage): license("BSD-3-Clause-LBNL") version("master", branch="master") + version("3.2.0", sha256="0f1570947f0a7c0e130bbec3abbb2fa275ae453dc3f428e7a3a2265fecafe1ae") version("2.4.0", sha256="12d04e7101b2c8292b5c62d9f42b5cd1e8a3c5af639d2665596e3e4255fd0804") version("2.2.2", sha256="73f67073e4291877f1eee19483a8a7b3c761eaf79a75805d52105ceedead85ea") version("2.2.1", sha256="4cedc2896a6b368773ce4f9003aa2c0230baf56a4464a6b899a155e01406a232") @@ -61,9 +62,9 @@ class Butterflypack(CMakePackage): # https://github.com/spack/spack/issues/31818 patch("qopenmp-for-oneapi.patch", when="@2.1.1 %oneapi") - patch("longline.patch", when="%fj") - patch("fjfortran.patch", when="%fj") - patch("isnan.patch", when="%fj") + patch("longline.patch", when="@1.2.0 %fj") + patch("fjfortran.patch", when="@1.2.0 %fj") + patch("isnan.patch", when="@1.2.0 %fj") def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index cb1428dcff8a0b..fc600ac42e634b 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -36,6 +36,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause-LBNL") version("master", branch="master") + version("8.0.0", sha256="11cc8645d622a16510b39a20efc64f34862b41976152d17f9fbf3e91f899766c") version("7.2.0", sha256="6988c00c3213f13e53d75fb474102358f4fecf07a4b4304b7123d86fdc784639") version("7.1.3", sha256="c951f38ee7af20da3ff46429e38fcebd57fb6f12619b2c56040d6da5096abcb0") version("7.1.2", sha256="262a0193fa1682d0eaa90363f739e0be7a778d5deeb80e4d4ae12446082a39cc") @@ -86,9 +87,11 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): depends_on("parmetis", when="+parmetis") depends_on("scotch~metis", when="+scotch") depends_on("scotch~metis+mpi", when="+scotch+mpi") + depends_on("scotch@7.0.4:", when="@8.0.0: +scotch") depends_on("butterflypack@1.1.0", when="@3.3.0:3.9 +butterflypack+mpi") depends_on("butterflypack@1.2.0:", when="@4.0.0: +butterflypack+mpi") depends_on("butterflypack@2.1.0:", when="@6.3.0: +butterflypack+mpi") + depends_on("butterflypack@3.2.0:", when="@8.0.0: +butterflypack+mpi") depends_on("cuda", when="@4.0.0: +cuda") depends_on("zfp@0.5.5", when="@:7.0.1 +zfp") depends_on("zfp", when="@7.0.2: +zfp") From 4eb7b998e842e3f3deb4ff73391128e7e75099ad Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 11 Nov 2024 19:01:24 +0100 Subject: [PATCH 154/208] Move concretization tests to the same folder (#47539) * Move concretization tests to the same folder Signed-off-by: Massimiliano Culpo * Fix for clingo-cffi --------- Signed-off-by: Massimiliano Culpo --- .github/workflows/unit_tests.yaml | 2 +- .../compiler_runtimes.py} | 0 lib/spack/spack/test/{concretize.py => concretization/core.py} | 0 .../test/{concretize_errors.py => concretization/errors.py} | 0 lib/spack/spack/test/{ => concretization}/flag_mixing.py | 0 .../preferences.py} | 0 .../requirements.py} | 0 7 files changed, 1 insertion(+), 1 deletion(-) rename lib/spack/spack/test/{concretize_compiler_runtimes.py => concretization/compiler_runtimes.py} (100%) rename lib/spack/spack/test/{concretize.py => concretization/core.py} (100%) rename lib/spack/spack/test/{concretize_errors.py => concretization/errors.py} (100%) rename lib/spack/spack/test/{ => concretization}/flag_mixing.py (100%) rename lib/spack/spack/test/{concretize_preferences.py => concretization/preferences.py} (100%) rename lib/spack/spack/test/{concretize_requirements.py => concretization/requirements.py} (100%) diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 58525d405ce020..72184b7fbf90fa 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -174,7 +174,7 @@ jobs: spack bootstrap disable github-actions-v0.6 spack bootstrap status spack solve zlib - spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py + spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 with: name: coverage-clingo-cffi diff --git a/lib/spack/spack/test/concretize_compiler_runtimes.py b/lib/spack/spack/test/concretization/compiler_runtimes.py similarity index 100% rename from lib/spack/spack/test/concretize_compiler_runtimes.py rename to lib/spack/spack/test/concretization/compiler_runtimes.py diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretization/core.py similarity index 100% rename from lib/spack/spack/test/concretize.py rename to lib/spack/spack/test/concretization/core.py diff --git a/lib/spack/spack/test/concretize_errors.py b/lib/spack/spack/test/concretization/errors.py similarity index 100% rename from lib/spack/spack/test/concretize_errors.py rename to lib/spack/spack/test/concretization/errors.py diff --git a/lib/spack/spack/test/flag_mixing.py b/lib/spack/spack/test/concretization/flag_mixing.py similarity index 100% rename from lib/spack/spack/test/flag_mixing.py rename to lib/spack/spack/test/concretization/flag_mixing.py diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretization/preferences.py similarity index 100% rename from lib/spack/spack/test/concretize_preferences.py rename to lib/spack/spack/test/concretization/preferences.py diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretization/requirements.py similarity index 100% rename from lib/spack/spack/test/concretize_requirements.py rename to lib/spack/spack/test/concretization/requirements.py From 9ed5e1de8e81eff9fb2d746305d146d5c4cd64f5 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 11 Nov 2024 10:13:31 -0800 Subject: [PATCH 155/208] Bugfix: `spack find -x` in environments (#46798) This addresses part [1] of #46345 #44713 introduced a bug where all non-spec query parameters like date ranges, -x, etc. were ignored when an env was active. This fixes that issue and adds tests for it. --------- Co-authored-by: Harmen Stoppels --- lib/spack/spack/cmd/__init__.py | 12 ++ lib/spack/spack/cmd/find.py | 90 +++++++++------ lib/spack/spack/cmd/modules/__init__.py | 13 +-- lib/spack/spack/test/cmd/find.py | 140 ++++++++++++++++++++++++ lib/spack/spack/test/utilities.py | 32 ++++++ 5 files changed, 242 insertions(+), 45 deletions(-) create mode 100644 lib/spack/spack/test/utilities.py diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index e9df5fc18955b8..c0efd5252153e7 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -545,6 +545,18 @@ def __init__(self, name): super().__init__("{0} is not a permissible Spack command name.".format(name)) +class MultipleSpecsMatch(Exception): + """Raised when multiple specs match a constraint, in a context where + this is not allowed. + """ + + +class NoSpecMatches(Exception): + """Raised when no spec matches a constraint, in a context where + this is not allowed. + """ + + ######################################## # argparse types for argument validation ######################################## diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 079c5bf4d31913..c6930097ac3ad1 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -222,11 +222,9 @@ def decorator(spec, fmt): def display_env(env, args, decorator, results): """Display extra find output when running in an environment. - Find in an environment outputs 2 or 3 sections: - - 1. Root specs - 2. Concretized roots (if asked for with -c) - 3. Installed specs + In an environment, `spack find` outputs a preliminary section + showing the root specs of the environment (this is in addition + to the section listing out specs matching the query parameters). """ tty.msg("In environment %s" % env.name) @@ -299,42 +297,70 @@ def root_decorator(spec, string): print() -def find(parser, args): - env = ev.active_environment() - - if not env and args.only_roots: - tty.die("-r / --only-roots requires an active environment") - if not env and args.show_concretized: - tty.die("-c / --show-concretized requires an active environment") - +def _find_query(args, env): + q_args = query_arguments(args) + concretized_but_not_installed = list() if env: + all_env_specs = env.all_specs() if args.constraint: - init_specs = spack.cmd.parse_specs(args.constraint) - results = env.all_matching_specs(*init_specs) + init_specs = cmd.parse_specs(args.constraint) + env_specs = env.all_matching_specs(*init_specs) else: - results = env.all_specs() + env_specs = all_env_specs + + spec_hashes = set(x.dag_hash() for x in env_specs) + specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args)) + + results = list() + with spack.store.STORE.db.read_transaction(): + for spec in env_specs: + if not spec.installed: + concretized_but_not_installed.append(spec) + if spec in specs_meeting_q_args: + results.append(spec) else: - q_args = query_arguments(args) results = args.specs(**q_args) - decorator = make_env_decorator(env) if env else lambda s, f: f - # use groups by default except with format. if args.groups is None: args.groups = not args.format # Exit early with an error code if no package matches the constraint - if not results and args.constraint: - constraint_str = " ".join(str(s) for s in args.constraint_specs) - tty.die(f"No package matches the query: {constraint_str}") + if concretized_but_not_installed and args.show_concretized: + pass + elif results: + pass + elif args.constraint: + raise cmd.NoSpecMatches() # If tags have been specified on the command line, filter by tags if args.tags: packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags) results = [x for x in results if x.name in packages_with_tags] + concretized_but_not_installed = [ + x for x in concretized_but_not_installed if x.name in packages_with_tags + ] if args.loaded: - results = spack.cmd.filter_loaded_specs(results) + results = cmd.filter_loaded_specs(results) + + return results, concretized_but_not_installed + + +def find(parser, args): + env = ev.active_environment() + + if not env and args.only_roots: + tty.die("-r / --only-roots requires an active environment") + if not env and args.show_concretized: + tty.die("-c / --show-concretized requires an active environment") + + try: + results, concretized_but_not_installed = _find_query(args, env) + except cmd.NoSpecMatches: + # Note: this uses args.constraint vs. args.constraint_specs because + # the latter only exists if you call args.specs() + tty.die(f"No package matches the query: {' '.join(args.constraint)}") if args.install_status or args.show_concretized: status_fn = spack.spec.Spec.install_status @@ -345,14 +371,16 @@ def find(parser, args): if args.json: cmd.display_specs_as_json(results, deps=args.deps) else: + decorator = make_env_decorator(env) if env else lambda s, f: f + if not args.format: if env: display_env(env, args, decorator, results) if not args.only_roots: - display_results = results - if not args.show_concretized: - display_results = list(x for x in results if x.installed) + display_results = list(results) + if args.show_concretized: + display_results += concretized_but_not_installed cmd.display_specs( display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn ) @@ -370,13 +398,9 @@ def find(parser, args): concretized_suffix += " (show with `spack find -c`)" pkg_type = "loaded" if args.loaded else "installed" - spack.cmd.print_how_many_pkgs( - list(x for x in results if x.installed), pkg_type, suffix=installed_suffix - ) + cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix) if env: - spack.cmd.print_how_many_pkgs( - list(x for x in results if not x.installed), - "concretized", - suffix=concretized_suffix, + cmd.print_how_many_pkgs( + concretized_but_not_installed, "concretized", suffix=concretized_suffix ) diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py index 754813addcdc24..013f4723dba05f 100644 --- a/lib/spack/spack/cmd/modules/__init__.py +++ b/lib/spack/spack/cmd/modules/__init__.py @@ -19,6 +19,7 @@ import spack.modules import spack.modules.common import spack.repo +from spack.cmd import MultipleSpecsMatch, NoSpecMatches from spack.cmd.common import arguments description = "manipulate module files" @@ -91,18 +92,6 @@ def add_loads_arguments(subparser): arguments.add_common_arguments(subparser, ["recurse_dependencies"]) -class MultipleSpecsMatch(Exception): - """Raised when multiple specs match a constraint, in a context where - this is not allowed. - """ - - -class NoSpecMatches(Exception): - """Raised when no spec matches a constraint, in a context where - this is not allowed. - """ - - def one_spec_or_raise(specs): """Ensures exactly one spec has been selected, or raises the appropriate exception. diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py index d947362f185ae6..779c6a942f0f21 100644 --- a/lib/spack/spack/test/cmd/find.py +++ b/lib/spack/spack/test/cmd/find.py @@ -14,10 +14,13 @@ import spack.cmd as cmd import spack.cmd.find import spack.environment as ev +import spack.repo import spack.store import spack.user_environment as uenv from spack.main import SpackCommand from spack.spec import Spec +from spack.test.conftest import create_test_repo +from spack.test.utilities import SpackCommandArgs from spack.util.pattern import Bunch find = SpackCommand("find") @@ -453,3 +456,140 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path): with test_environment: output = find() assert "zlib%gcc@12.1.0" in output + + +_pkga = ( + "a0", + """\ +class A0(Package): + version("1.2") + version("1.1") + + depends_on("b0") + depends_on("c0") +""", +) + + +_pkgb = ( + "b0", + """\ +class B0(Package): + version("1.2") + version("1.1") +""", +) + + +_pkgc = ( + "c0", + """\ +class C0(Package): + version("1.2") + version("1.1") + + tags = ["tag0", "tag1"] +""", +) + + +_pkgd = ( + "d0", + """\ +class D0(Package): + version("1.2") + version("1.1") + + depends_on("c0") + depends_on("e0") +""", +) + + +_pkge = ( + "e0", + """\ +class E0(Package): + tags = ["tag1", "tag2"] + + version("1.2") + version("1.1") +""", +) + + +@pytest.fixture +def _create_test_repo(tmpdir, mutable_config): + r""" + a0 d0 + / \ / \ + b0 c0 e0 + """ + yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge]) + + +@pytest.fixture +def test_repo(_create_test_repo, monkeypatch, mock_stage): + with spack.repo.use_repositories(_create_test_repo) as mock_repo_path: + yield mock_repo_path + + +def test_find_concretized_not_installed( + mutable_mock_env_path, install_mockery, mock_fetch, test_repo, mock_archive +): + """Test queries against installs of specs against fake repo. + + Given A, B, C, D, E, create an environment and install A. + Add and concretize (but do not install) D. + Test a few queries after force uninstalling a dependency of A (but not + A itself). + """ + add = SpackCommand("add") + concretize = SpackCommand("concretize") + uninstall = SpackCommand("uninstall") + + def _query(_e, *args): + return spack.cmd.find._find_query(SpackCommandArgs("find")(*args), _e) + + def _nresults(_qresult): + return len(_qresult[0]), len(_qresult[1]) + + env("create", "test") + with ev.read("test") as e: + install("--fake", "--add", "a0") + + assert _nresults(_query(e)) == (3, 0) + assert _nresults(_query(e, "--explicit")) == (1, 0) + + add("d0") + concretize("--reuse") + + # At this point d0 should use existing c0, but d/e + # are not installed in the env + + # --explicit, --deprecated, --start-date, etc. are all + # filters on records, and therefore don't apply to + # concretized-but-not-installed results + assert _nresults(_query(e, "--explicit")) == (1, 2) + + assert _nresults(_query(e)) == (3, 2) + assert _nresults(_query(e, "-c", "d0")) == (0, 1) + + uninstall("-f", "-y", "b0") + + # b0 is now missing (it is not installed, but has an + # installed parent) + + assert _nresults(_query(e)) == (2, 3) + # b0 is "double-counted" here: it meets the --missing + # criteria, and also now qualifies as a + # concretized-but-not-installed spec + assert _nresults(_query(e, "--missing")) == (3, 3) + assert _nresults(_query(e, "--only-missing")) == (1, 3) + + # Tags are not attached to install records, so they + # can modify the concretized-but-not-installed results + + assert _nresults(_query(e, "--tag=tag0")) == (1, 0) + assert _nresults(_query(e, "--tag=tag1")) == (1, 1) + assert _nresults(_query(e, "--tag=tag2")) == (0, 1) diff --git a/lib/spack/spack/test/utilities.py b/lib/spack/spack/test/utilities.py new file mode 100644 index 00000000000000..5e83db9da27939 --- /dev/null +++ b/lib/spack/spack/test/utilities.py @@ -0,0 +1,32 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Non-fixture utilities for test code. Must be imported. +""" +from spack.main import make_argument_parser + + +class SpackCommandArgs: + """Use this to get an Args object like what is passed into + a command. + + Useful for emulating args in unit tests that want to check + helper functions in Spack commands. Ensures that you get all + the default arg values established by the parser. + + Example usage:: + + install_args = SpackCommandArgs("install")("-v", "mpich") + """ + + def __init__(self, command_name): + self.parser = make_argument_parser() + self.command_name = command_name + + def __call__(self, *argv, **kwargs): + self.parser.add_command(self.command_name) + prepend = kwargs["global_args"] if "global_args" in kwargs else [] + args, unknown = self.parser.parse_known_args(prepend + [self.command_name] + list(argv)) + return args From 484c9cf47c3017e96803edb16eb407a087ec2f11 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 11 Nov 2024 19:55:47 +0100 Subject: [PATCH 156/208] py-pillow: patch for disabling optional deps (#47542) --- var/spack/repos/builtin/packages/py-pillow/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 1e0e5374200cfd..3bd7e9399e9c3d 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -74,6 +74,12 @@ class PyPillowBase(PythonPackage): depends_on("libimagequant", when="+imagequant") depends_on("libxcb", when="+xcb") + patch( + "https://github.com/python-pillow/Pillow/commit/1c11d4581c5705dfa21bc5a4f3b6980c556978bf.patch?full_index=1", + sha256="599f37e6a5a8d1adb9f4025ffc7cae5f5b61cad60a04e7c7a3015f9e350047bb", + when="@11.0.0", + ) + @when("@10:") def config_settings(self, spec, prefix): settings = {"parallel": make_jobs} From a55073e7b09df4a6b2f53844fe470125a1ca76ba Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Mon, 11 Nov 2024 13:57:45 -0800 Subject: [PATCH 157/208] vtk-m %oneapi@2025: cxxflags add -Wno-error=missing-template-arg-list-after-template-kw (#47477) --- var/spack/repos/builtin/packages/vtk-m/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index 6bf7c4203f5538..c5cef49c5856f2 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -185,6 +185,12 @@ class VtkM(CMakePackage, CudaPackage, ROCmPackage): when="@1.6.0:2.1 +cuda ^cuda@12.5:", ) + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("@:2.2.0 %oneapi@2025:"): + flags.append("-Wno-error=missing-template-arg-list-after-template-kw") + return (flags, None, None) + def cmake_args(self): spec = self.spec options = [] From 4691301eba3747e1e488034f114cf06946420da4 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 11 Nov 2024 23:12:43 +0100 Subject: [PATCH 158/208] Compiler.default_libc: early exit on darwin/win (#47554) * Compiler.default_libc: early exit on darwin/win * use .cc when testing c++ compiler if c compiler is missing --- lib/spack/spack/compiler.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 46382a3d983107..e16e4a2725c194 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -428,6 +428,11 @@ def default_dynamic_linker(self) -> Optional[str]: @property def default_libc(self) -> Optional["spack.spec.Spec"]: """Determine libc targeted by the compiler from link line""" + # technically this should be testing the target platform of the compiler, but we don't have + # that, so stick to host platform for now. + if sys.platform in ("darwin", "win32"): + return None + dynamic_linker = self.default_dynamic_linker if not dynamic_linker: @@ -449,14 +454,20 @@ def compiler_verbose_output(self) -> Optional[str]: return self.cache.get(self).c_compiler_output def _compile_dummy_c_source(self) -> Optional[str]: - cc = self.cc if self.cc else self.cxx + if self.cc: + cc = self.cc + ext = "c" + else: + cc = self.cxx + ext = "cc" + if not cc or not self.verbose_flag: return None try: tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info") fout = os.path.join(tmpdir, "output") - fin = os.path.join(tmpdir, "main.c") + fin = os.path.join(tmpdir, f"main.{ext}") with open(fin, "w") as csource: csource.write( From 786f8dfcce92a9464f988478f27743c7dbec27f7 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 11 Nov 2024 23:14:38 +0100 Subject: [PATCH 159/208] openmpi: fix detection (#47541) Take a simpler approach to listing variant options -- store them in variables instead of trying to roundtrip them through metadata dictionaries. --- .../repos/builtin/packages/openmpi/package.py | 56 +++++++++---------- 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 58105a90f7538f..c82e58cfba6511 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -452,33 +452,34 @@ class Openmpi(AutotoolsPackage, CudaPackage): patch("pmix_getline_pmix_version.patch", when="@5.0.0:5.0.3") patch("pmix_getline_pmix_version-prte.patch", when="@5.0.3") + FABRICS = ( + "psm", + "psm2", + "verbs", + "mxm", + "ucx", + "ofi", + "fca", + "hcoll", + "ucc", + "xpmem", + "cma", + "knem", + ) + variant( "fabrics", values=disjoint_sets( - ("auto",), - ( - "psm", - "psm2", - "verbs", - "mxm", - "ucx", - "ofi", - "fca", - "hcoll", - "ucc", - "xpmem", - "cma", - "knem", - ), # shared memory transports + ("auto",), FABRICS # shared memory transports ).with_non_feature_values("auto", "none"), description="List of fabrics that are enabled; " "'auto' lets openmpi determine", ) + SCHEDULERS = ("alps", "lsf", "tm", "slurm", "sge", "loadleveler") + variant( "schedulers", - values=disjoint_sets( - ("auto",), ("alps", "lsf", "tm", "slurm", "sge", "loadleveler") - ).with_non_feature_values("auto", "none"), + values=disjoint_sets(("auto",), SCHEDULERS).with_non_feature_values("auto", "none"), description="List of schedulers for which support is enabled; " "'auto' lets openmpi determine", ) @@ -806,24 +807,26 @@ def determine_variants(cls, exes, version): variants.append("~pmi") # fabrics - fabrics = get_options_from_variant(cls, "fabrics") used_fabrics = [] - for fabric in fabrics: + for fabric in cls.FABRICS: match = re.search(r"\bMCA (?:mtl|btl|pml): %s\b" % fabric, output) if match: used_fabrics.append(fabric) if used_fabrics: variants.append("fabrics=" + ",".join(used_fabrics)) + else: + variants.append("fabrics=none") # schedulers - schedulers = get_options_from_variant(cls, "schedulers") used_schedulers = [] - for scheduler in schedulers: + for scheduler in cls.SCHEDULERS: match = re.search(r"\bMCA (?:prrte|ras): %s\b" % scheduler, output) if match: used_schedulers.append(scheduler) if used_schedulers: variants.append("schedulers=" + ",".join(used_schedulers)) + else: + variants.append("schedulers=none") # Get the appropriate compiler match = re.search(r"\bC compiler absolute: (\S+)", output) @@ -1412,12 +1415,3 @@ def is_enabled(text): if text in set(["t", "true", "enabled", "yes", "1"]): return True return False - - -# This code gets all the fabric names from the variants list -# Idea taken from the AutotoolsPackage source. -def get_options_from_variant(self, name): - values = self.variants[name][0].values - if getattr(values, "feature_values", None): - values = values.feature_values - return values From 993f7432452908c05676cc0d3a67eb17129784b8 Mon Sep 17 00:00:00 2001 From: Paul Gessinger Date: Tue, 12 Nov 2024 00:03:08 +0100 Subject: [PATCH 160/208] soqt: new package (#47443) * soqt: Add SoQt package The geomodel package needs this if visualization is turned on. * make qt versions explicit * use virtual dependency for qt * pr feedback Remove myself as maintainer Remove v1.6.0 Remove unused qt variant --- .../repos/builtin/packages/soqt/package.py | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 var/spack/repos/builtin/packages/soqt/package.py diff --git a/var/spack/repos/builtin/packages/soqt/package.py b/var/spack/repos/builtin/packages/soqt/package.py new file mode 100644 index 00000000000000..bceeb5bc46dd9d --- /dev/null +++ b/var/spack/repos/builtin/packages/soqt/package.py @@ -0,0 +1,56 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Soqt(CMakePackage): + """Old Coin GUI binding for Qt, replaced by Quarter""" + + homepage = "https://github.com/coin3d/soqt/" + url = "https://github.com/coin3d/soqt/releases/download/v1.6.3/soqt-1.6.3-src.tar.gz" + git = "https://github.com/coin3d/soqt/" + + depends_on("cxx", type="build") + depends_on("cmake@3:", type="build") + + license("BSD-3-Clause", checked_by="paulgessinger") + + version("1.6.3", sha256="79342e89290783457c075fb6a60088aad4a48ea072ede06fdf01985075ef46bd") + version("1.6.2", sha256="fb483b20015ab827ba46eb090bd7be5bc2f3d0349c2f947c3089af2b7003869c") + version("1.6.1", sha256="80289d9bd49ffe709ab85778c952573f43f1c725ea958c6d5969b2e9c77bb3ba") + + depends_on("coin3d") + depends_on("opengl") + + variant( + "static_defaults", + default=True, + description="Enable statically linked in default materials", + ) + variant("spacenav", default=True, description="Enable Space Navigator support") + variant("tests", default=False, description="Build small test programs.") + variant("iv", default=True, description="Enable extra Open Inventor extensions") + + depends_on("qmake") + with when("^[virtuals=qmake] qt"): + depends_on("qt@5 +gui +opengl") + with when("^[virtuals=qmake] qt-base"): + depends_on("qt-base@6 +gui +opengl +widgets") + + def cmake_args(self): + args = [ + self.define_from_variant("COIN_IV_EXTENSIONS", "iv"), + self.define_from_variant("WITH_STATIC_DEFAULTS", "static_defaults"), + self.define_from_variant("HAVE_SPACENAV_SUPPORT", "spacenav"), + self.define_from_variant("SOQT_BUILD_TESTS", "tests"), + ] + if self.spec.satisfies("^[virtuals=qmake] qt"): + args.append(self.define("SOQT_USE_QT5", True)) + args.append(self.define("SOQT_USE_QT6", False)) + if self.spec.satisfies("^[virtuals=qmake] qt-base"): + args.append(self.define("SOQT_USE_QT5", False)) + args.append(self.define("SOQT_USE_QT6", True)) + return args From 247446a8f3b4917ef079ee116646c50cfb8b4ab4 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 11 Nov 2024 18:12:48 -0600 Subject: [PATCH 161/208] bfs: add v4.0.4 (#47550) --- var/spack/repos/builtin/packages/bfs/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/bfs/package.py b/var/spack/repos/builtin/packages/bfs/package.py index add324f8d44e8f..105b698be4aefc 100644 --- a/var/spack/repos/builtin/packages/bfs/package.py +++ b/var/spack/repos/builtin/packages/bfs/package.py @@ -16,6 +16,7 @@ class Bfs(MakefilePackage): license("0BSD") + version("4.0.4", sha256="209da9e9f43d8fe30fd689c189ea529e9d6b5358ce84a63a44721003aea3e1ca") version("4.0.1", sha256="8117b76b0a967887278a11470cbfa9e7aeae98f11a7eeb136f456ac462e5ba23") version("3.1.1", sha256="d73f345c1021e0630e0db930a3fa68dd1f968833037d8471ee1096e5040bf91b") version("3.1", sha256="aa6a94231915d3d37e5dd62d194cb58a575a8f45270020f2bdd5ab41e31d1492") From 9fd698edcbb6961da40fad2eef1232dd2a62f54c Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 11 Nov 2024 18:15:34 -0600 Subject: [PATCH 162/208] fzf: add v0.56.2 (#47549) --- var/spack/repos/builtin/packages/fzf/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/fzf/package.py b/var/spack/repos/builtin/packages/fzf/package.py index a3c75798c47fd6..91b838a3fe8222 100644 --- a/var/spack/repos/builtin/packages/fzf/package.py +++ b/var/spack/repos/builtin/packages/fzf/package.py @@ -19,6 +19,7 @@ class Fzf(MakefilePackage): license("MIT") + version("0.56.2", sha256="1d67edb3e3ffbb14fcbf786bfcc0b5b8d87db6a0685135677b8ef4c114d2b864") version("0.55.0", sha256="805383f71bca7f8fb271ecd716852aea88fd898d5027d58add9e43df6ea766da") version("0.54.3", sha256="6413f3916f8058b396820f9078b1336d94c72cbae39c593b1d16b83fcc4fdf74") version("0.53.0", sha256="d45abbfb64f21913c633d46818d9d3eb3d7ebc7e94bd16f45941958aa5480e1d") From 31b2b790e76cb38214503be1f76d5d767a645555 Mon Sep 17 00:00:00 2001 From: teddy Date: Tue, 12 Nov 2024 01:57:00 +0100 Subject: [PATCH 163/208] py-non-regression-test-tools: add v1.1.4 (#47520) Co-authored-by: t. chantrait --- .../builtin/packages/py-non-regression-test-tools/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-non-regression-test-tools/package.py b/var/spack/repos/builtin/packages/py-non-regression-test-tools/package.py index d97e7868527847..e72f207275e523 100644 --- a/var/spack/repos/builtin/packages/py-non-regression-test-tools/package.py +++ b/var/spack/repos/builtin/packages/py-non-regression-test-tools/package.py @@ -18,8 +18,9 @@ class PyNonRegressionTestTools(PythonPackage): version("develop", branch="develop") version("main", branch="main") - version("1.1.2", tag="v1.1.2", preferred=True) + version("1.1.4", tag="v1.1.4") + version("1.1.2", tag="v1.1.2") depends_on("py-numpy", type="run") depends_on("python@3.10:", type="run") - depends_on("py-setuptools", type="build") + depends_on("py-setuptools@69.2.0:", type="build") From 82dd33c04c4f6e9308bfd97f5d352a20c252b218 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:21:27 -0600 Subject: [PATCH 164/208] git: add v2.46.2, v2.47.0 (#47534) --- var/spack/repos/builtin/packages/git/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index da8c14c6ad170a..713fd0abf8de8c 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -29,6 +29,8 @@ class Git(AutotoolsPackage): # Every new git release comes with a corresponding manpage resource: # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc + version("2.47.0", sha256="a84a7917e0ab608312834413f01fc01edc7844f9f9002ba69f3b4f4bcb8d937a") + version("2.46.2", sha256="65c5689fd44f1d09de7fd8c44de7fef074ddd69dda8b8503d44afb91495ecbce") version("2.45.2", sha256="98b26090ed667099a3691b93698d1e213e1ded73d36a2fde7e9125fce28ba234") version("2.44.2", sha256="f0655e81c5ecfeef7440aa4fcffa1c1a77eaccf764d6fe29579e9a06eac2cd04") version("2.43.5", sha256="324c3b85d668e6afe571b3502035848e4b349dead35188e2b8ab1b96c0cd45ff") @@ -99,6 +101,8 @@ class Git(AutotoolsPackage): depends_on("c", type="build") # generated for _version, _sha256_manpage in { + "2.47.0": "1a6f1e775dfe324a9b521793cbd2b3bba546442cc2ac2106d4df33dea9005038", + "2.46.2": "4bc3774ee4597098977befa4ec30b0f2cbed3b59b756e7cbb59ce1738682d43a", "2.45.2": "48c1e2e3ecbb2ce9faa020a19fcdbc6ce64ea25692111b5930686bc0bb4f0e7f", "2.45.1": "d9098fd93a3c0ef242814fc856a99886ce31dae2ba457afc416ba4e92af8f8f5", "2.44.2": "ee6a7238d5ede18fe21c0cc2131c7fbff1f871c25e2848892ee864d40baf7218", From cbd9fad66e711c91219e481f2c2613edbf10f5e9 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:22:32 -0600 Subject: [PATCH 165/208] xtrans: add v1.5.2 (#47530) --- var/spack/repos/builtin/packages/xtrans/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/xtrans/package.py b/var/spack/repos/builtin/packages/xtrans/package.py index 92a5c73d87dea3..aeadc39d7f547f 100644 --- a/var/spack/repos/builtin/packages/xtrans/package.py +++ b/var/spack/repos/builtin/packages/xtrans/package.py @@ -19,6 +19,7 @@ class Xtrans(AutotoolsPackage, XorgPackage): maintainers("wdconinc") + version("1.5.2", sha256="23031301f10fef5eaa55b438610fbd29294a70d2fa189355343bf0186bff8374") version("1.5.0", sha256="a806f8a92f879dcd0146f3f1153fdffe845f2fc0df9b1a26c19312b7b0a29c86") version("1.4.0", sha256="48ed850ce772fef1b44ca23639b0a57e38884045ed2cbb18ab137ef33ec713f9") version("1.3.5", sha256="b7a577c1b6c75030145e53b4793db9c88f9359ac49e7d771d4385d21b3e5945d") From b748907a61543572d794509d4b1537c58f9d0a50 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:26:29 -0600 Subject: [PATCH 166/208] pixman: add v0.44.0 (switch to meson) (#47529) * pixman: add v0.44.0 (switch to meson) --- .../repos/builtin/packages/pixman/package.py | 37 ++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index a1abb45bba7744..e84ff0a48aaa1f 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -18,6 +18,7 @@ class Pixman(AutotoolsPackage): license("MIT") + version("0.44.0", sha256="89a4c1e1e45e0b23dffe708202cb2eaffde0fe3727d7692b2e1739fec78a7dac") version("0.42.2", sha256="ea1480efada2fd948bc75366f7c349e1c96d3297d09a3fe62626e38e234a625e") version("0.42.0", sha256="07f74c8d95e4a43eb2b08578b37f40b7937e6c5b48597b3a0bb2c13a53f46c13") version("0.40.0", sha256="6d200dec3740d9ec4ec8d1180e25779c00bc749f94278c8b9021f5534db223fc") @@ -26,11 +27,19 @@ class Pixman(AutotoolsPackage): version("0.34.0", sha256="21b6b249b51c6800dc9553b65106e1e37d0e25df942c90531d4c3997aa20a88e") version("0.32.6", sha256="3dfed13b8060eadabf0a4945c7045b7793cc7e3e910e748a8bb0f0dc3e794904") - depends_on("c", type="build") # generated + build_system( + conditional("autotools", when="@:0.42"), + conditional("meson", when="@0.38:"), + default="meson", + ) + depends_on("c", type="build") + with when("build_system=meson"): + depends_on("meson@0.52:", type="build") depends_on("pkgconfig", type="build") depends_on("flex", type="build") depends_on("bison@3:", type="build") + depends_on("libpng") variant("shared", default=True, description="Build shared library") @@ -68,6 +77,32 @@ def libs(self): "libpixman-1", self.prefix, shared=self.spec.satisfies("+shared"), recursive=True ) + +class MesonBuilder(spack.build_systems.meson.MesonBuilder): + def meson_args(self): + args = ["-Dlibpng=enabled", "-Dgtk=disabled", "-Db_staticpic=true"] + + if sys.platform == "darwin": + args += ["-Dmmx=disabled"] + + # From homebrew, see: + # https://gitlab.freedesktop.org/pixman/pixman/-/issues/59 + # https://gitlab.freedesktop.org/pixman/pixman/-/issues/69 + if self.spec.target.family == "aarch64": + args.append("-Da64-neon=disabled") + + # The Fujitsu compiler does not support assembler macros. + if self.spec.satisfies("%fj"): + args.append("-Da64-neon=disabled") + + args.append( + "-Ddefault_library=" + ("shared" if self.spec.satisfies("+shared") else "static") + ) + + return args + + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): def configure_args(self): args = ["--enable-libpng", "--disable-gtk", "--with-pic"] From 4d91d3f77fa11f57f0e64551a1a2e220da35c0aa Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:28:06 -0600 Subject: [PATCH 167/208] scitokens-cpp: add v1.1.2 (#47523) --- var/spack/repos/builtin/packages/scitokens-cpp/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/scitokens-cpp/package.py b/var/spack/repos/builtin/packages/scitokens-cpp/package.py index 1426828700f0eb..6c193e4bd274db 100644 --- a/var/spack/repos/builtin/packages/scitokens-cpp/package.py +++ b/var/spack/repos/builtin/packages/scitokens-cpp/package.py @@ -17,6 +17,7 @@ class ScitokensCpp(CMakePackage): license("Apache-2.0") + version("1.1.2", sha256="07d33cb51a3ccd8460f2acebb15b35393aeccfc70e3554a73c9e5cffed6edb39") version("1.1.1", sha256="a9091b888fc778282caf2a6808c86f685d2411557673152d58fe53932a6c7212") version("1.1.0", sha256="9c4afd6638e94855ede52ecfc3d4f05082f2bdf151a9ab8dafcc2bb7cd4d9039") version("1.0.2", sha256="cdc1e80e0cba9ca0e16de2efa10ec5e38765792bf5107024bfb66ddad5a16a85") From 09a88ad3bd2edffc30957ddaca77d1c6330cbbb0 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:30:30 -0600 Subject: [PATCH 168/208] xerces-c: add v3.3.0 (#47522) --- .../repos/builtin/packages/xerces-c/package.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py index a62f25746e37fc..ac10c1e068d079 100644 --- a/var/spack/repos/builtin/packages/xerces-c/package.py +++ b/var/spack/repos/builtin/packages/xerces-c/package.py @@ -20,13 +20,16 @@ class XercesC(AutotoolsPackage): license("Apache-2.0") + version("3.3.0", sha256="ef752578587e26013a933f16d76305c9b43ca32f869e3d3426986e03efb01d64") version("3.2.5", sha256="1db4028c9b7f1f778efbf4a9462d65e13f9938f2c22f9e9994e12c49ba97e252") - version("3.2.4", sha256="74aa626fc71e729ee227602870dd29a5a01cd8c9c1c7330837a51da2eb5722cc") - version("3.2.3", sha256="45c2329e684405f2b8854ecbddfb8d5b055cdf0fe4d35736cc352c504989bbb6") - version("3.2.2", sha256="1f2a4d1dbd0086ce0f52b718ac0fa4af3dc1ce7a7ff73a581a05fbe78a82bce0") - version("3.2.1", sha256="a36b6e162913ec218cfb84772d2535d43c3365355a601d45d4b8ce11f0ece0da") - version("3.1.4", sha256="9408f12c1628ecf80730bedbe8b2caad810edd01bb4c66f77b60c873e8cc6891") - version("3.1.3", sha256="fc5e5e0247b108b8d64d75aeb124cabdee9b7fcd725a89fe2242b4637b25c1fa") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2018-1311 + version("3.2.4", sha256="74aa626fc71e729ee227602870dd29a5a01cd8c9c1c7330837a51da2eb5722cc") + version("3.2.3", sha256="45c2329e684405f2b8854ecbddfb8d5b055cdf0fe4d35736cc352c504989bbb6") + version("3.2.2", sha256="1f2a4d1dbd0086ce0f52b718ac0fa4af3dc1ce7a7ff73a581a05fbe78a82bce0") + version("3.2.1", sha256="a36b6e162913ec218cfb84772d2535d43c3365355a601d45d4b8ce11f0ece0da") + version("3.1.4", sha256="9408f12c1628ecf80730bedbe8b2caad810edd01bb4c66f77b60c873e8cc6891") + version("3.1.3", sha256="fc5e5e0247b108b8d64d75aeb124cabdee9b7fcd725a89fe2242b4637b25c1fa") depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated From b98e5886e5c9b5e005b54fa727704da384d81f21 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:44:28 -0600 Subject: [PATCH 169/208] py-pyppeteer: new package (#47375) * py-pyppeteer: new package * py-pyee: new package (v11.1.1, v12.0.0) --- .../repos/builtin/packages/py-pyee/package.py | 25 ++++++++++++++++ .../builtin/packages/py-pyppeteer/package.py | 29 +++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-pyee/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyppeteer/package.py diff --git a/var/spack/repos/builtin/packages/py-pyee/package.py b/var/spack/repos/builtin/packages/py-pyee/package.py new file mode 100644 index 00000000000000..34d26b7b57ab6d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyee/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPyee(PythonPackage): + """A rough port of Node.js's EventEmitter to Python + with a few tricks of its own.""" + + homepage = "https://github.com/jfhbrook/pyee" + pypi = "pyee/pyee-12.0.0.tar.gz" + + license("MIT", checked_by="wdconinc") + + version("12.0.0", sha256="c480603f4aa2927d4766eb41fa82793fe60a82cbfdb8d688e0d08c55a534e145") + version("11.1.1", sha256="82e1eb1853f8497c4ff1a0c7fa26b9cd2f1253e2b6ffb93b4700fda907017302") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-wheel", type="build") + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pyppeteer/package.py b/var/spack/repos/builtin/packages/py-pyppeteer/package.py new file mode 100644 index 00000000000000..64a999c3e40a3c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyppeteer/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPyppeteer(PythonPackage): + """Headless chrome/chromium automation library + (unofficial port of puppeteer).""" + + homepage = "https://github.com/pyppeteer/pyppeteer" + pypi = "pyppeteer/pyppeteer-2.0.0.tar.gz" + + license("MIT") + + version("2.0.0", sha256="4af63473ff36a746a53347b2336a49efda669bcd781e400bc1799b81838358d9") + + depends_on("py-poetry-core", type="build") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-appdirs@1.4.3:1", type=("build", "run")) + depends_on("py-importlib-metadata@1.4:", type=("build", "run")) + depends_on("py-pyee@11", type=("build", "run")) + depends_on("py-tqdm@4.42.1:4", type=("build", "run")) + depends_on("py-urllib3@1.25.8:1", type=("build", "run")) + depends_on("py-websockets@10", type=("build", "run")) + depends_on("py-certifi@2023:", type=("build", "run")) From cdaacce4db9207397d553a45239f73bfb4121e94 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:47:53 -0600 Subject: [PATCH 170/208] varnish-cache: add v7.6.1 (#47513) --- .../builtin/packages/varnish-cache/package.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/varnish-cache/package.py b/var/spack/repos/builtin/packages/varnish-cache/package.py index 2950922511ee9c..b6ffd4e571142e 100644 --- a/var/spack/repos/builtin/packages/varnish-cache/package.py +++ b/var/spack/repos/builtin/packages/varnish-cache/package.py @@ -10,15 +10,18 @@ class VarnishCache(AutotoolsPackage): """This is Varnish Cache, the high-performance HTTP accelerator.""" homepage = "https://www.varnish-cache.org/" - url = "https://github.com/varnishcache/varnish-cache/archive/varnish-6.4.0.tar.gz" + url = "https://github.com/varnishcache/varnish-cache/archive/refs/tags/varnish-6.4.0.tar.gz" license("BSD-2-Clause") - version("6.4.0", sha256="d9702c2c689c5d4ecd911886f769ddf22f46ac0722e275bee4033928cab09243") - version("6.3.2", sha256="e50f3dd4e26d5669c5b73657cdb0d5ddac7dcc3cfa1761a983afa24b659f3785") - version("6.3.1", sha256="8cc57360c1db36e8c77fc51304a935803a06247f6d6120fa47e8345efadf17a9") - version("6.3.0", sha256="c7170d4bc57f1d2454da046fc5e43e2d19a804448d2dd839fa5c33f76bd677bb") - version("6.2.3", sha256="64cd273aa155c78c21e74def53622be5920c8a7d952fee74f0663e57a01c9a9d") + version("7.6.1", sha256="6cfa30d761fa5edf33322048564cda3ee99de93ee57732c10f720d98d12f1899") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2022-23959 + version("6.4.0", sha256="d9702c2c689c5d4ecd911886f769ddf22f46ac0722e275bee4033928cab09243") + version("6.3.2", sha256="e50f3dd4e26d5669c5b73657cdb0d5ddac7dcc3cfa1761a983afa24b659f3785") + version("6.3.1", sha256="8cc57360c1db36e8c77fc51304a935803a06247f6d6120fa47e8345efadf17a9") + version("6.3.0", sha256="c7170d4bc57f1d2454da046fc5e43e2d19a804448d2dd839fa5c33f76bd677bb") + version("6.2.3", sha256="64cd273aa155c78c21e74def53622be5920c8a7d952fee74f0663e57a01c9a9d") depends_on("c", type="build") # generated @@ -26,7 +29,9 @@ class VarnishCache(AutotoolsPackage): depends_on("automake", type="build") depends_on("libtool", type="build") depends_on("m4", type="build") - depends_on("pcre") + depends_on("pkgconfig", type="build") + depends_on("pcre2", when="@7:") + depends_on("pcre", when="@:6") depends_on("readline") depends_on("python", type=("build", "run")) depends_on("py-sphinx", type=("build", "run")) From c44c938cafb64405de978a948f3648c6ee8f9399 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:50:02 -0600 Subject: [PATCH 171/208] rsyslog: add v8.2410.0 (fix CVE) (#47511) * rsyslog: add v8.2410.0 --- .../repos/builtin/packages/rsyslog/package.py | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/rsyslog/package.py b/var/spack/repos/builtin/packages/rsyslog/package.py index 6de8baf1116cd6..eafa2d9c1bab16 100644 --- a/var/spack/repos/builtin/packages/rsyslog/package.py +++ b/var/spack/repos/builtin/packages/rsyslog/package.py @@ -10,13 +10,22 @@ class Rsyslog(AutotoolsPackage): """The rocket-fast Syslog Server.""" homepage = "https://www.rsyslog.com/" - url = "https://github.com/rsyslog/rsyslog/archive/v8.2006.0.tar.gz" + url = "https://github.com/rsyslog/rsyslog/archive/refs/tags/v8.2006.0.tar.gz" license("Apache-2.0 AND GPL-3.0-or-later AND LGPL-3.0-or-later", checked_by="tgamblin") - version("8.2006.0", sha256="dc30a2ec02d5fac91d3a4f15a00641e0987941313483ced46592ab0b0d68f324") - version("8.2004.0", sha256="b56b985fec076a22160471d389b7ff271909dfd86513dad31e401a775a6dfdc2") - version("8.2002.0", sha256="b31d56311532335212ef2ea7be4501508224cb21f1bef9d262c6d78e21959ea1") + version("8.2410.0", sha256="0e4e6fcb1d72a1cb65438d85dd2bbf37a8f82115d7e271788535d1e7fbcf6838") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2022-24903 + version( + "8.2006.0", sha256="dc30a2ec02d5fac91d3a4f15a00641e0987941313483ced46592ab0b0d68f324" + ) + version( + "8.2004.0", sha256="b56b985fec076a22160471d389b7ff271909dfd86513dad31e401a775a6dfdc2" + ) + version( + "8.2002.0", sha256="b31d56311532335212ef2ea7be4501508224cb21f1bef9d262c6d78e21959ea1" + ) depends_on("c", type="build") # generated @@ -24,6 +33,7 @@ class Rsyslog(AutotoolsPackage): depends_on("automake", type="build") depends_on("libtool", type="build") depends_on("m4", type="build") + depends_on("pkgconfig", type="build") depends_on("libestr") depends_on("libfastjson") depends_on("zlib-api") @@ -36,6 +46,9 @@ class Rsyslog(AutotoolsPackage): def setup_run_environment(self, env): env.prepend_path("PATH", self.prefix.sbin) + def autoreconf(self, spec, prefix): + Executable("./autogen.sh")() + def configure_args(self): args = ["--with-systemdsystemunitdir=" + self.spec["rsyslog"].prefix.lib.systemd.system] return args From e38e51a6bcabfaaf2a2a23a7b3a20e907c61995a Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Mon, 11 Nov 2024 19:52:19 -0600 Subject: [PATCH 172/208] superlu-dist: add v9.1.0, v9.0.0 (#47461) Fix typo wrt @xiaoyeli --- var/spack/repos/builtin/packages/superlu-dist/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 1440da388a4b66..0e3844bcc3a9f5 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -16,10 +16,12 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): tags = ["e4s"] - maintainers("xiaoye", "gchavez2", "balay", "pghysels", "liuyangzhuan") + maintainers("xiaoyeli", "gchavez2", "balay", "pghysels", "liuyangzhuan") version("develop", branch="master") version("amd", branch="amd") + version("9.1.0", sha256="1cb2c6dc7e8231b2ec30c1266e55e440ffca9f55527771d8df28f900dd179f9d") + version("9.0.0", sha256="aa43d33d4b1b0f5f7b5ad7685e9a6bc25088832c6c74d2ab8f75a2c9f4e9e955") version("8.2.1", sha256="b77d065cafa6bc1a1dcc15bf23fd854f54b05762b165badcffc195835ad2bddf") version("8.2.0", sha256="d53573e5a399b2b4ab1fcc36e8421c1b6fab36345c0af14f8fa20326e3365f1f") version("8.1.2", sha256="7b16c442bb01ea8b298c0aab9a2584aa4615d09786aac968cb2f3118c058206b") From 0614ded2ef418fa9b2a90fb8c9db4dbc905dcdc2 Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Mon, 11 Nov 2024 17:54:19 -0800 Subject: [PATCH 173/208] Removing args to get libraries added in RPATH (#47465) --- ...ng-rocm-path-with-package-path-6.2.1.patch | 686 ++++++++++++++++++ .../packages/rocm-validation-suite/package.py | 28 +- 2 files changed, 696 insertions(+), 18 deletions(-) create mode 100644 var/spack/repos/builtin/packages/rocm-validation-suite/009-replacing-rocm-path-with-package-path-6.2.1.patch diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/009-replacing-rocm-path-with-package-path-6.2.1.patch b/var/spack/repos/builtin/packages/rocm-validation-suite/009-replacing-rocm-path-with-package-path-6.2.1.patch new file mode 100644 index 00000000000000..5283f5a25627fc --- /dev/null +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/009-replacing-rocm-path-with-package-path-6.2.1.patch @@ -0,0 +1,686 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 7867e3a..7268387 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -74,14 +74,18 @@ endif(rocblas_FOUND) + # Making ROCM_PATH, CMAKE_INSTALL_PREFIX, CPACK_PACKAGING_INSTALL_PREFIX as CACHE + # variables since we will pass them as cmake params appropriately, and + # all find_packages relevant to this build will be in ROCM path hence appending it to CMAKE_PREFIX_PATH +-set(ROCM_PATH "/opt/rocm" CACHE PATH "ROCM install path") +-set(CMAKE_INSTALL_PREFIX "/opt/rocm" CACHE PATH "CMAKE installation directory") +-set(CPACK_PACKAGING_INSTALL_PREFIX "/opt/rocm" CACHE PATH "Prefix used in built packages") ++set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") ++set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) ++set(CPACK_PACKAGING_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}") + list(APPEND CMAKE_PREFIX_PATH "${ROCM_PATH}") +-set(ROCR_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime" FORCE) +-set(ROCR_LIB_DIR "${ROCM_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime" FORCE) +-set(HIP_INC_DIR "${ROCM_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") +-set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk" FORCE) ++set(ROCR_INC_DIR "${HSA_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCR_LIB_DIR "${HSA_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime") ++set(HIP_INC_DIR "${HIP_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk") ++set(HIPRAND_INC_DIR "${HIPRAND_DIR}/include" CACHE PATH "Contains header files exported by ROC Trunk") ++set(HIPRAND_LIB_DIR "${HIPRAND_DIR}/lib" CACHE PATH "Contains header files exported by ROC Trunk") ++set(ROCRAND_INC_DIR "${ROCRAND_DIR}/include" CACHE PATH "Contains header files exported by ROC Trunk") ++set(ROCRAND_LIB_DIR "${ROCRAND_DIR}/lib" CACHE PATH "Contains header files exported by ROC Trunk") + + add_definitions(-DROCM_PATH="${ROCM_PATH}") + if(FETCH_ROCMPATH_FROM_ROCMCORE) +@@ -443,15 +447,18 @@ add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/rvs_smi-build/librocm_smi64.so + + endif() # if (RVS_ROCMSMI EQUAL 1) + +-set(HIPRAND_INC_DIR "${ROCM_PATH}/include") +-set(HIPRAND_LIB_DIR "${ROCM_PATH}/lib") ++set(HIPRAND_INC_DIR "${HIPRAND_DIR}/include") ++set(HIPRAND_LIB_DIR "${HIPRAND_DIR}/lib") ++ ++set(ROCRAND_INC_DIR "${ROCRAND_DIR}/include") ++set(ROCRAND_LIB_DIR "${ROCRAND_DIR}/lib") + + if (RVS_ROCBLAS EQUAL 1) + set(ROCBLAS_INC_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install") + set(ROCBLAS_LIB_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install/lib/") + else() +- set(ROCBLAS_INC_DIR "${ROCM_PATH}/include") +- set(ROCBLAS_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCBLAS_INC_DIR "${ROCBLAS_DIR}/include") ++ set(ROCBLAS_LIB_DIR "${ROCBLAS_DIR}/lib") + endif() + + if (RVS_ROCMSMI EQUAL 1) +@@ -466,8 +473,8 @@ else() + set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/rocm_smi/lib") + else() + message( STATUS "ROCBLAS REORG Enabled Version: ${RVS_ROCBLAS_VERSION_FLAT}" ) +- set(ROCM_SMI_INC_DIR "${ROCM_PATH}/include") +- set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCM_SMI_INC_DIR "${ROCM_SMI_DIR}/include") ++ set(ROCM_SMI_LIB_DIR "${ROCM_SMI_DIR}/lib") + endif() + endif() + set(ROCM_SMI_LIB "rocm_smi64" CACHE STRING "rocm_smi library name") +diff --git a/babel.so/CMakeLists.txt b/babel.so/CMakeLists.txt +index 54a0e3a..c9ddeaa 100644 +--- a/babel.so/CMakeLists.txt ++++ b/babel.so/CMakeLists.txt +@@ -109,13 +109,13 @@ set(HIP_HCC_LIB "amdhip64") + add_compile_options(-DRVS_ROCBLAS_VERSION_FLAT=${RVS_ROCBLAS_VERSION_FLAT}) + + # Determine Roc Runtime header files are accessible +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime_api.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime_api.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +@@ -135,16 +135,16 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") +- message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) ++if(NOT EXISTS "${HIP_PATH}/lib/lib${HIP_HCC_LIB}.so") ++ message("ERROR: ROC Runtime libraries can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${HIP_PATH}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HIP_PATH}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) + +@@ -156,7 +156,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/cmake_modules/tests_unit.cmake b/cmake_modules/tests_unit.cmake +index 9760b72..d585f8b 100644 +--- a/cmake_modules/tests_unit.cmake ++++ b/cmake_modules/tests_unit.cmake +@@ -27,7 +27,7 @@ + ## define additional unit testing include directories + include_directories(${UT_INC}) + ## define additional unit testing lib directories +-link_directories(${UT_LIB} ${RVS_LIB_DIR}) ++link_directories(${UT_LIB} ${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + + file(GLOB TESTSOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} test/test*.cpp ) + #message ( "TESTSOURCES: ${TESTSOURCES}" ) +@@ -46,6 +46,7 @@ FOREACH(SINGLE_TEST ${TESTSOURCES}) + add_dependencies(${TEST_NAME} rvs_gtest_target) + target_link_libraries(${TEST_NAME} + ${UT_LINK_LIBS} rvslibut rvslib gtest_main gtest pthread pci ++ ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so + ) + target_compile_definitions(${TEST_NAME} PUBLIC RVS_UNIT_TEST) + if(DEFINED tcd.${TEST_NAME}) +diff --git a/edp.so/CMakeLists.txt b/edp.so/CMakeLists.txt +index 7dd34ea..41c8493 100644 +--- a/edp.so/CMakeLists.txt ++++ b/edp.so/CMakeLists.txt +@@ -128,17 +128,17 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR} ${YAML_CPP_INCLUDE_DIRS} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpciaccess.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpciaccess.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set (SOURCES src/rvs_module.cpp src/action.cpp src/edp_worker.cpp ) +@@ -148,7 +148,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${HIPRAND_LIB} ${ROCRAND_LIB}) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/gm.so/CMakeLists.txt b/gm.so/CMakeLists.txt +index d3caa84..94a06be 100644 +--- a/gm.so/CMakeLists.txt ++++ b/gm.so/CMakeLists.txt +@@ -118,11 +118,11 @@ if(DEFINED RVS_ROCMSMI) + endif() + + ## define include directories +-include_directories(./ ../ ${ROCM_SMI_INC_DIR}) ++include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so librocm_smi64.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/worker.cpp) +@@ -133,7 +133,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${ROCM_SMI_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS}) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +@@ -149,7 +149,7 @@ install(FILES "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/lib${RVS}.so" + + # TEST SECTION + if (RVS_BUILD_TESTS) +- add_custom_command(TARGET ${RVS_TARGET} POST_BUILD ++ B_add_custom_command(TARGET ${RVS_TARGET} POST_BUILD + COMMAND ln -fs ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/lib${RVS}.so.${VERSION_MAJOR} ${RVS_BINTEST_FOLDER}/lib${RVS}.so WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} + ) + include(${CMAKE_CURRENT_SOURCE_DIR}/tests.cmake) +diff --git a/gm.so/tests.cmake b/gm.so/tests.cmake +index b360065..172e97c 100644 +--- a/gm.so/tests.cmake ++++ b/gm.so/tests.cmake +@@ -30,11 +30,11 @@ set(CORE_RUNTIME_NAME "hsa-runtime") + set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") + + set(UT_LINK_LIBS libpthread.so libpci.so libm.so libdl.so "lib${ROCM_SMI_LIB}.so" +- ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} ${HIPRAND_LIB} ++ ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} "lib${HIPRAND_LIB}.so" "lib${ROCRAND_LIB}.so" + ) + + # Add directories to look for library files to link +-link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR}) ++link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + + set (UT_SOURCES src/action.cpp src/worker.cpp + ) +diff --git a/gpup.so/CMakeLists.txt b/gpup.so/CMakeLists.txt +index 43d337a..c92d8ba 100644 +--- a/gpup.so/CMakeLists.txt ++++ b/gpup.so/CMakeLists.txt +@@ -109,11 +109,11 @@ else() + endif() + + ## define include directories +-include_directories(./ ../ include ../include) ++include_directories(./ ../ include ../include ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp) +diff --git a/gpup.so/tests.cmake b/gpup.so/tests.cmake +index 9a1f7ed..3649ae4 100644 +--- a/gpup.so/tests.cmake ++++ b/gpup.so/tests.cmake +@@ -25,12 +25,13 @@ + + set(ROCBLAS_LIB "rocblas") + set(HIPRAND_LIB "hiprand") ++set(ROCRAND_LIB "rocrand") + set(ROC_THUNK_NAME "hsakmt") + set(CORE_RUNTIME_NAME "hsa-runtime") + set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") + + set(UT_LINK_LIBS libpthread.so libm.so libdl.so ${ROCM_SMI_LIB} +- ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} ${HIPRAND_LIB}) ++ ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} ${HIPRAND_LIB} ${ROCRAND_LIB}) + + # Add directories to look for library files to link + link_directories(${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR}) +diff --git a/gst.so/CMakeLists.txt b/gst.so/CMakeLists.txt +index fd346ce..7e17a68 100644 +--- a/gst.so/CMakeLists.txt ++++ b/gst.so/CMakeLists.txt +@@ -137,17 +137,17 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR} ${YAML_CPP_INCLUDE_DIRS} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_DIR} ${ROCRAND_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/gst_worker.cpp) +diff --git a/iet.so/CMakeLists.txt b/iet.so/CMakeLists.txt +index 002c03c..604b86b 100644 +--- a/iet.so/CMakeLists.txt ++++ b/iet.so/CMakeLists.txt +@@ -145,7 +145,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + endif() + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -160,11 +160,11 @@ if(DEFINED RVS_ROCMSMI) + endif() + + ## define include directories +-include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${ROCBLAS_INC_DIR} ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${ROCBLAS_INC_DIR} ${ROCR_INC_DIR} ${HIP_INC_DIR} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so librocm_smi64.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so ) + + set(SOURCES src/rvs_module.cpp src/action.cpp src/iet_worker.cpp ) + +@@ -173,7 +173,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCM_SMI_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_INC_DIR}/lib/ ${HIP_HCC_LIB} ${ROCBLAS_LIB}) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/mem.so/CMakeLists.txt b/mem.so/CMakeLists.txt +index 5133337..3ba941f 100644 +--- a/mem.so/CMakeLists.txt ++++ b/mem.so/CMakeLists.txt +@@ -134,18 +134,18 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/rvs_memtest.cpp src/rvs_memworker.cpp) +diff --git a/pbqt.so/CMakeLists.txt b/pbqt.so/CMakeLists.txt +index 5ae675a..873a1e8 100644 +--- a/pbqt.so/CMakeLists.txt ++++ b/pbqt.so/CMakeLists.txt +@@ -136,11 +136,11 @@ if(NOT EXISTS ${ROCR_LIB_DIR}/${CORE_RUNTIME_LIBRARY}.so) + endif() + + ## define include directories +-include_directories(./ ../ pci ${ROCR_INC_DIR}) ++include_directories(./ ../ pci ${ROCR_INC_DIR} ${YAML_CPP_INCLUDE_DIRS} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/action_run.cpp +diff --git a/pebb.so/CMakeLists.txt b/pebb.so/CMakeLists.txt +index c4e2964..41a45f5 100644 +--- a/pebb.so/CMakeLists.txt ++++ b/pebb.so/CMakeLists.txt +@@ -137,11 +137,11 @@ if(NOT EXISTS ${ROCR_LIB_DIR}/${CORE_RUNTIME_LIBRARY}.so) + endif() + + ## define include directories +-include_directories(./ ../ pci ${ROCR_INC_DIR}) ++include_directories(./ ../ pci ${ROCR_INC_DIR} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR}/.. ${ROCRAND_LIB_DIR}/..) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/action_run.cpp +diff --git a/peqt.so/CMakeLists.txt b/peqt.so/CMakeLists.txt +index ead507d..d83c9e5 100644 +--- a/peqt.so/CMakeLists.txt ++++ b/peqt.so/CMakeLists.txt +@@ -107,11 +107,11 @@ else() + endif() + + ## define include directories +-include_directories(./ ../) ++include_directories(./ ../ ${HSA_PATH}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HSA_PATH}/lib/ ${HSAKMT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpci.so libm.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp) +@@ -121,7 +121,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/perf.so/CMakeLists.txt b/perf.so/CMakeLists.txt +index 518dac9..dfe05f5 100644 +--- a/perf.so/CMakeLists.txt ++++ b/perf.so/CMakeLists.txt +@@ -137,17 +137,17 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/perf_worker.cpp) +@@ -157,7 +157,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS}) + add_dependencies(${RVS_TARGET} rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/pesm.so/CMakeLists.txt b/pesm.so/CMakeLists.txt +index 1f27f34..502c1c8 100644 +--- a/pesm.so/CMakeLists.txt ++++ b/pesm.so/CMakeLists.txt +@@ -107,11 +107,11 @@ else() + endif() + + ## define include directories +-include_directories(./ ../ pci) ++include_directories(./ ../ pci ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ${HIPRAND_DIR} ${ROCRAND_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so ${PROJECT_LINK_LIBS} ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/worker.cpp) +diff --git a/pesm.so/tests.cmake b/pesm.so/tests.cmake +index 2c72658..c6acbf4 100644 +--- a/pesm.so/tests.cmake ++++ b/pesm.so/tests.cmake +@@ -30,11 +30,11 @@ set(CORE_RUNTIME_NAME "hsa-runtime") + set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") + + set(UT_LINK_LIBS libpthread.so libpci.so libm.so libdl.so "lib${ROCM_SMI_LIB}.so" +- ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} ${HIPRAND_LIB} ++ ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} "lib${HIPRAND_LIB}.so" "lib${ROCRAND_LIB}.so" + ) + + # Add directories to look for library files to link +-link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${HIPRAND_LIB_DIR}) ++link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + + set (UT_SOURCES test/unitactionbase.cpp + ) +diff --git a/rcqt.so/CMakeLists.txt b/rcqt.so/CMakeLists.txt +index c0099ab..fcc82f3 100644 +--- a/rcqt.so/CMakeLists.txt ++++ b/rcqt.so/CMakeLists.txt +@@ -108,11 +108,11 @@ else() + endif() + + ## define include directories +-include_directories(./ ../) ++include_directories(./ ../ ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ASAN_LIB_PATH} ${HSAKMT_LIB_DIR} ${ROCM_SMI_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib) ++set (PROJECT_LINK_LIBS rvslib ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES +diff --git a/rvs/CMakeLists.txt b/rvs/CMakeLists.txt +index fc0118e..04c9abf 100644 +--- a/rvs/CMakeLists.txt ++++ b/rvs/CMakeLists.txt +@@ -34,6 +34,7 @@ set ( RVS "rvs" ) + set ( RVS_PACKAGE "rvs-roct" ) + set ( RVS_COMPONENT "lib${RVS}" ) + set ( RVS_TARGET "${RVS}" ) ++set ( YAML_CPP_LIBRARIES "${YAML_CPP_LIB_PATH}") + + project ( ${RVS_TARGET} ) + +@@ -115,20 +116,22 @@ endif() + ## define include directories + include_directories(./ ../ ${YAML_CPP_INCLUDE_DIRS}) + ## define lib directories +-link_directories(${CMAKE_CURRENT_BINARY_DIR} ${RVS_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH} ${HIPRAND_LIB_PATH}) ++link_directories(${CMAKE_CURRENT_BINARY_DIR} ${RVS_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR} ${RVS_LIB_DIR}/.. ${YAML_CPP_LIBRARIES}) + + ## additional libraries +-set(ROCBLAS_LIB "rocblas") +-set(ROC_THUNK_NAME "hsakmt") +-set(CORE_RUNTIME_NAME "hsa-runtime") +-set(HIPRAND_LIB "hiprand") +-set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") +-set(PROJECT_LINK_LIBS libdl.so libpthread.so libpci.so ${YAML_CPP_LIBRARIES}) ++set(ROCBLAS_LIB "${ROCBLAS_LIB_DIR}/librocblas.so") ++set(ROC_THUNK_NAME "${HSAKMT_LIB_DIR}/libhsakmt.a") ++set(CORE_RUNTIME_NAME "${HSA_PATH}/lib/libhsa-runtime64.so") ++set(YAML_CPP_LIB "${YAML_CPP_LIBRARIES}/libyaml-cpp.a") ++set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}") ++set(PROJECT_LINK_LIBS libdl.so libpthread.so libpci.so) ++set(HIPRAND_LIB "${HIPRAND_LIB_DIR}/libhiprand.so") ++set(ROCRAND_LIB "${ROCRAND_LIB_DIR}/librocrand.so") + + ## define target + add_executable(${RVS_TARGET} src/rvs.cpp) + target_link_libraries(${RVS_TARGET} rvslib +- ${ROCBLAS_LIB} ${ROCM_SMI_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${PROJECT_LINK_LIBS} ${HIPRAND_LIB}) ++ ${ROCBLAS_LIB} ${ROCM_SMI_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${PROJECT_LINK_LIBS} ${HIPRAND_LIB} ${YAML_CPP_LIB} ${ROCRAND_LIB}) + add_dependencies(${RVS_TARGET} rvslib) + + install(TARGETS ${RVS_TARGET} +diff --git a/rvs/tests.cmake b/rvs/tests.cmake +index c519482..64a4ad0 100644 +--- a/rvs/tests.cmake ++++ b/rvs/tests.cmake +@@ -32,17 +32,18 @@ + + set(ROCBLAS_LIB "rocblas") + set(HIPRAND_LIB "hiprand") ++set(ROCRAND_LIB "rocrand") + set(ROC_THUNK_NAME "hsakmt") + set(CORE_RUNTIME_NAME "hsa-runtime") + set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") + + ## define lib directories +-link_directories(${RVS_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${HIPRAND_LIB_DIR}) ++link_directories(${RVS_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + + ## define target for "test-to-fail" + add_executable(${RVS_TARGET}fail src/rvs.cpp) + target_link_libraries(${RVS_TARGET}fail rvslib rvslibut ${PROJECT_LINK_LIBS} +- ${ROCM_SMI_LIB} ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${ROCM_CORE} ${CORE_RUNTIME_TARGET} ${HIPRAND_LIB}) ++ ${ROCM_SMI_LIB} ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${ROCM_CORE} ${CORE_RUNTIME_TARGET} ${HIPRAND_LIB} ${ROCRAND_LIB}) + + target_compile_definitions(${RVS_TARGET}fail PRIVATE RVS_INVERT_RETURN_STATUS) + set_target_properties(${RVS_TARGET}fail PROPERTIES +@@ -211,7 +212,7 @@ FOREACH(SINGLE_TEST ${TESTSOURCES}) + ${PROJECT_LINK_LIBS} + ${PROJECT_TEST_LINK_LIBS} + rvslib rvslibut gtest_main gtest pthread +- ${ROCM_SMI_LIB} ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${HIPRAND_LIB} ++ ${ROCM_SMI_LIB} ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${HIPRAND_LIB} ${ROCRAND_LIB} + ) + add_dependencies(${TEST_NAME} rvs_gtest_target) + +diff --git a/rvslib/CMakeLists.txt b/rvslib/CMakeLists.txt +index 8d29590..18eb9f4 100644 +--- a/rvslib/CMakeLists.txt ++++ b/rvslib/CMakeLists.txt +@@ -116,7 +116,7 @@ endif() + + ## define include directories + include_directories(./ ../ ../rvs +- ${ROCM_SMI_INC_DIR} ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) ++ ${ROCM_SMI_INC_DIR} ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_PATH} ${YAML_CPP_INCLUDE_DIRS} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + + link_directories(${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + +diff --git a/smqt.so/CMakeLists.txt b/smqt.so/CMakeLists.txt +index 042586f..285cb17 100644 +--- a/smqt.so/CMakeLists.txt ++++ b/smqt.so/CMakeLists.txt +@@ -106,11 +106,11 @@ else() + endif() + + ## define include directories +-include_directories(./ ../ pci) ++include_directories(./ ../ pci ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp) +diff --git a/smqt.so/tests.cmake b/smqt.so/tests.cmake +index 76766de..804441a 100644 +--- a/smqt.so/tests.cmake ++++ b/smqt.so/tests.cmake +@@ -30,11 +30,11 @@ set(CORE_RUNTIME_NAME "hsa-runtime") + set(CORE_RUNTIME_TARGET "${CORE_RUNTIME_NAME}64") + + set(UT_LINK_LIBS libpthread.so libpci.so libm.so libdl.so "lib${ROCM_SMI_LIB}.so" +- ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} ${HIPRAND_LIB} ++ ${ROCBLAS_LIB} ${ROC_THUNK_NAME} ${CORE_RUNTIME_TARGET} ${ROCM_CORE} ${YAML_CPP_LIBRARIES} "lib${HIPRAND_LIB}.so" "lib${HIPRAND_LIB}.so" + ) + + # Add directories to look for library files to link +-link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${HIPRAND_LIB_DIR}) ++link_directories(${ROCM_SMI_LIB_DIR} ${ROCT_LIB_DIR} ${ROCBLAS_LIB_DIR} ${HIPRAND_LIB_DIR} ${ROCRAND_LIB_DIR}) + + set (UT_SOURCES src/action.cpp test/unitsmqt.cpp + ) +diff --git a/testif.so/CMakeLists.txt b/testif.so/CMakeLists.txt +index 4cba0f9..691534a 100644 +--- a/testif.so/CMakeLists.txt ++++ b/testif.so/CMakeLists.txt +@@ -108,11 +108,11 @@ endif() + + + ## define include directories +-include_directories(./ ../ pci) ++include_directories(./ ../ pci ${YAML_CPP_INCLUDE_DIRS}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries +-set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so ${ROCBLAS_LIB_DIR}/librocblas.so ${HSAKMT_LIB_DIR}/libhsakmt.a ${HSA_PATH}/lib/libhsa-runtime64.so) + + ## define source files + ## set(SOURCES src/rvs_module.cpp src/action.cpp src/worker.cpp) +diff --git a/tst.so/CMakeLists.txt b/tst.so/CMakeLists.txt +index 1a1a8b0..c6e46da 100644 +--- a/tst.so/CMakeLists.txt ++++ b/tst.so/CMakeLists.txt +@@ -140,7 +140,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + endif() + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -155,11 +155,11 @@ if(DEFINED RVS_ROCMSMI) + endif() + + ## define include directories +-include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${ROCBLAS_INC_DIR} ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${ROCBLAS_INC_DIR} ${ROCR_INC_DIR} ${HIP_INC_DIR} ${HIPRAND_INC_DIR} ${ROCRAND_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH} ${HIPRAND_LIB_DIR}/.. ${ROCRAND_LIB_DIR}/..) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslib libpthread.so libpci.so libm.so ${HIPRAND_LIB_DIR}/libhiprand.so ${ROCRAND_LIB_DIR}/librocrand.so) + + set(SOURCES src/rvs_module.cpp src/action.cpp src/tst_worker.cpp ) + diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py index f3b06fefcd19b8..7cdce00f34af96 100644 --- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py @@ -58,7 +58,8 @@ class RocmValidationSuite(CMakePackage): # It expects rocm components headers and libraries in /opt/rocm # It doesn't find package to include the library and include path without this patch. patch("009-replacing-rocm-path-with-package-path.patch", when="@6.0") - patch("009-replacing-rocm-path-with-package-path-6.1.patch", when="@6.1") + patch("009-replacing-rocm-path-with-package-path-6.1.patch", when="@6.1:6.2.0") + patch("009-replacing-rocm-path-with-package-path-6.2.1.patch", when="@6.2.1") depends_on("cmake@3.5:", type="build") depends_on("zlib-api", type="link") depends_on("yaml-cpp~shared") @@ -94,6 +95,9 @@ def setup_build_environment(self, build_env): depends_on(f"rocm-smi-lib@{ver}", when=f"@{ver}") depends_on(f"hsa-rocr-dev@{ver}", when=f"@{ver}") depends_on(f"hsakmt-roct@{ver}", when=f"@{ver}") + for ver in ["6.2.1"]: + depends_on(f"hiprand@{ver}", when=f"@{ver}") + depends_on(f"rocrand@{ver}", when=f"@{ver}") def patch(self): if self.spec.satisfies("@5.2:5.4"): @@ -104,7 +108,7 @@ def patch(self): filter_file( r"@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True ) - elif self.spec.satisfies("@6.0:"): + elif self.spec.satisfies("@6.0:6.1"): filter_file( "@ROCM_PATH@/rvs", self.spec.prefix.bin, "rvs/conf/deviceid.sh.in", string=True ) @@ -119,6 +123,9 @@ def cmake_args(self): self.define("YAML_CPP_INCLUDE_DIRS", self.spec["yaml-cpp"].prefix.include), self.define("UT_INC", self.spec["googletest"].prefix.include), ] + if self.spec.satisfies("@6.2.1:"): + args.append(self.define("HIPRAND_DIR", self.spec["hiprand"].prefix)), + args.append(self.define("ROCRAND_DIR", self.spec["rocrand"].prefix)), libloc = self.spec["googletest"].prefix.lib64 if not os.path.isdir(libloc): libloc = self.spec["googletest"].prefix.lib @@ -131,20 +138,5 @@ def cmake_args(self): if not os.path.isdir(libloc): libloc = self.spec["yaml-cpp"].prefix.lib args.append(self.define("YAML_CPP_LIB_PATH", libloc)) - if self.spec.satisfies("@6.2:"): - args.append( - self.define( - "CMAKE_CXX_FLAGS", - f"-I{self.spec['rocm-smi-lib'].prefix.include} " - f"-I{self.spec['rocblas'].prefix.include} " - f"-I{self.spec['yaml-cpp'].prefix.include} " - f"-L{self.spec['hip'].prefix.lib} " - f"-L{self.spec['hsa-rocr-dev'].prefix.lib} " - f"-L{self.spec['hsakmt-roct'].prefix.lib} " - f"-L{self.spec['rocm-smi-lib'].prefix.lib} " - f"-L{self.spec['rocblas'].prefix.lib} " - f"{libloc}/libyaml-cpp.a ", - ) - ) - args.append(self.define("CPACK_PACKAGING_INSTALL_PREFIX", self.spec.prefix)) + return args From 3fed7086180966329a5cee2e3f9c73cf34c5a532 Mon Sep 17 00:00:00 2001 From: Stephen Herbener <32968781+srherbener@users.noreply.github.com> Date: Mon, 11 Nov 2024 18:55:28 -0700 Subject: [PATCH 174/208] openmpi: add two_level_namespace variant for MacOS (#47202) * Add two_level_namespace variant (default is disabled) for MacOS to enable building executables and libraries with two level namespace enabled. * Addressed reviewer comments. * Moved two_level_namespace variant ahead of the patch that uses that variant to get concretize to work properly. * Removed extra print statements --- .../repos/builtin/packages/openmpi/package.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index c82e58cfba6511..425156907245cb 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -575,6 +575,24 @@ class Openmpi(AutotoolsPackage, CudaPackage): variant("openshmem", default=False, description="Enable building OpenSHMEM") variant("debug", default=False, description="Make debug build", when="build_system=autotools") + variant( + "two_level_namespace", + default=False, + description="""Build shared libraries and programs +built with the mpicc/mpifort/etc. compiler wrappers +with '-Wl,-commons,use_dylibs' and without +'-Wl,-flat_namespace'.""", + ) + + # Patch to allow two-level namespace on a MacOS platform when building + # openmpi. Unfortuntately, the openmpi configure command has flat namespace + # hardwired in. In spack, this only works for openmpi up to versions 4, + # because for versions 5+ autoreconf is triggered (see below) and this + # patch needs to be applied (again) AFTER autoreconf ran. + @when("+two_level_namespace platform=darwin") + def patch(self): + filter_file(r"-flat_namespace", "-commons,use_dylibs", "configure") + provides("mpi@:2.0", when="@:1.2") provides("mpi@:2.1", when="@1.3:1.7.2") provides("mpi@:2.2", when="@1.7.3:1.7.4") @@ -997,11 +1015,15 @@ def die_without_fortran(self): def autoreconf(self, spec, prefix): perl = which("perl") perl("autogen.pl") + if spec.satisfies("+two_level_namespace platform=darwin"): + filter_file(r"-flat_namespace", "-commons,use_dylibs", "configure") @when("@5.0.0:5.0.1") def autoreconf(self, spec, prefix): perl = which("perl") perl("autogen.pl", "--force") + if spec.satisfies("+two_level_namespace platform=darwin"): + filter_file(r"-flat_namespace", "-commons,use_dylibs", "configure") def configure_args(self): spec = self.spec From fc7125fdf3f594683d264e48160ca86717c2109d Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 19:58:18 -0600 Subject: [PATCH 175/208] py-fsspec-xrootd: new package (#47405) * py-fsspec-xrootd: new package * py-fsspec-xrootd: depends_on python@3.8: --- .../packages/py-fsspec-xrootd/package.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-fsspec-xrootd/package.py diff --git a/var/spack/repos/builtin/packages/py-fsspec-xrootd/package.py b/var/spack/repos/builtin/packages/py-fsspec-xrootd/package.py new file mode 100644 index 00000000000000..b14291a6b51742 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fsspec-xrootd/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyFsspecXrootd(PythonPackage): + """An XRootD implementation for fsspec.""" + + homepage = "https://coffeateam.github.io/fsspec-xrootd/" + pypi = "fsspec_xrootd/fsspec_xrootd-0.4.0.tar.gz" + + maintainers("wdconinc") + + license("BSD-3-Clause", checked_by="wdconinc") + + version("0.4.0", sha256="d7f124430d26ab9139d33bc50fa8abfde3624db5dcaa5c18f56af9bf17f16f13") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools@42:", type="build") + depends_on("py-setuptools-scm@3.4:+toml", type="build") + + depends_on("py-fsspec", type=("build", "run")) From 370694f11232e33e4fce39da1a7f3644d45da9bd Mon Sep 17 00:00:00 2001 From: MatthewLieber <77356607+MatthewLieber@users.noreply.github.com> Date: Mon, 11 Nov 2024 21:01:39 -0500 Subject: [PATCH 176/208] osu-micro-benchmarks: add v7.5 (#47423) * Adding sha for 7.4 release of OSU Micro Benchmarks * Adds the sha256sum for the OSU mirco benchmarks 7.5 release. --- var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index bf2c2e505dc20c..204e4e97f74a4b 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -20,6 +20,7 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): maintainers("natshineman", "harisubramoni", "MatthewLieber") + version("7.5", sha256="1cf84ac5419456202757a757c5f9a4f5c6ecd05c65783c7976421cfd6020b3b3") version("7.4", sha256="1edd0c2efa61999409bfb28740a7f39689a5b42b1a1b4c66d1656e5637f7cefc") version("7.3", sha256="8fa25b8aaa34e4b07ab3a4f30b7690ab46b038b08d204a853a9b6aa7bdb02f2f") version("7.2", sha256="1a4e1f2aab0e65404b3414e23bd46616184b69b6231ce9313d9c630bd6e633c1") From 42fd1cafe6c5c7da2c4d0b4dbefe955e25f0c24d Mon Sep 17 00:00:00 2001 From: Sreenivasa Murthy Kolam Date: Tue, 12 Nov 2024 07:35:21 +0530 Subject: [PATCH 177/208] Fix the build error during compilation of rocdecode package (#47283) * fix the build error during compilation of rocdecode.was dependent on libva-devel packag * address review comment * address review changes.commit the changes --- .../repos/builtin/packages/libva/package.py | 46 +++++++++++++++++++ .../builtin/packages/rocdecode/package.py | 2 + 2 files changed, 48 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libva/package.py diff --git a/var/spack/repos/builtin/packages/libva/package.py b/var/spack/repos/builtin/packages/libva/package.py new file mode 100644 index 00000000000000..538ca8914ea654 --- /dev/null +++ b/var/spack/repos/builtin/packages/libva/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Libva(AutotoolsPackage): + """Libva is an implementation for VA-API (Video Acceleration API). + VA-API is an open-source library and API specification, which provides + access to graphics hardware acceleration capabilities for video + processing. It consists of a main library and driver-specific + acceleration backends for each supported hardware vendor.""" + + homepage = "https://github.com/intel/libva" + url = "https://github.com/intel/libva/archive/refs/tags/2.22.0.tar.gz" + + version("2.22.0", sha256="467c418c2640a178c6baad5be2e00d569842123763b80507721ab87eb7af8735") + + depends_on("c", type="build") + depends_on("cxx", type="build") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkgconfig", type="build") + + depends_on("libdrm") + depends_on("libx11", when="^[virtuals=gl] glx") + depends_on("libxext", when="^[virtuals=gl] glx") + + def autoreconf(self, spec, prefix): + autogen = Executable("./autogen.sh") + autogen() + + def configure_args(self): + spec = self.spec + args = ["--disable-x11", "--disable-wayland", "--disable-glx", "--enable-libdrm"] + if spec.satisfies("^[virtuals=gl] glx"): + args.append("--enable-x11") + else: + args.append("--disable-x11") + return args diff --git a/var/spack/repos/builtin/packages/rocdecode/package.py b/var/spack/repos/builtin/packages/rocdecode/package.py index 1f284de531949f..bac7679a15b5b4 100644 --- a/var/spack/repos/builtin/packages/rocdecode/package.py +++ b/var/spack/repos/builtin/packages/rocdecode/package.py @@ -33,6 +33,8 @@ class Rocdecode(CMakePackage): sticky=True, ) + depends_on("libva", type="build", when="@6.2:") + for ver in ["6.1.0", "6.1.1", "6.1.2", "6.2.0", "6.2.1"]: depends_on(f"hip@{ver}", when=f"@{ver}") From 37de92e7a23ed6d22a8713f21e195bdc379527ab Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Mon, 11 Nov 2024 20:09:03 -0600 Subject: [PATCH 178/208] extrae: Update dyninst dependency (#47359) --- .../packages/extrae/dyninst_instruction.patch | 19 ++++++++++++ .../repos/builtin/packages/extrae/package.py | 30 ++++++++++++++----- 2 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 var/spack/repos/builtin/packages/extrae/dyninst_instruction.patch diff --git a/var/spack/repos/builtin/packages/extrae/dyninst_instruction.patch b/var/spack/repos/builtin/packages/extrae/dyninst_instruction.patch new file mode 100644 index 00000000000000..08c691c4f74711 --- /dev/null +++ b/var/spack/repos/builtin/packages/extrae/dyninst_instruction.patch @@ -0,0 +1,19 @@ +diff --git a/src/launcher/dyninst/commonSnippets.C b/src/launcher/dyninst/commonSnippets.C +index 94904a23..2f918949 100644 +--- a/src/launcher/dyninst/commonSnippets.C ++++ b/src/launcher/dyninst/commonSnippets.C +@@ -482,9 +482,10 @@ string decodeBasicBlocks(BPatch_function * function, string routine) + ParseAPI::Block* b = ParseAPI::convert(block); + void * buf = b->region()->getPtrToInstruction(b->start()); + InstructionAPI::InstructionDecoder dec((unsigned char*)buf,b->size(),b->region()->getArch()); +- InstructionAPI::Instruction::Ptr insn; +- while((insn = dec.decode())) { +- res <format() << endl; ++ InstructionAPI::Instruction insn = dec.decode(); ++ while(insn.isValid()) { ++ res < Date: Mon, 11 Nov 2024 20:13:46 -0600 Subject: [PATCH 179/208] packages: new versions (`diamond`, `py-alive-progress`, `py-bakta`, `py-deepsig-biocomp`), new packages (`py-pyhmmer`, `py-pyrodigal`) (#47277) * added updated versions * added pyhmmer * updated infernal * fix blast-plus for apple-clang * fix py-biopython build on apple-clang * remove erroneous biopython dep: build issue is with python 3.8, not biopython * deepsig python 3.9: expanding unnecessary python restrictions * add pyrodigal * fix unnecessarily strict diamond version * builds and updates: blast-plus indexing broken, still need to test db download and bakta pipeline * builds and runs * revert blast-plus changes: remove my personal hacks to get blast-plus to build --- .../repos/builtin/packages/diamond/package.py | 1 + .../packages/py-alive-progress/package.py | 5 ++- .../builtin/packages/py-bakta/package.py | 43 ++++++++++++++----- .../packages/py-deepsig-biocomp/package.py | 6 +-- .../builtin/packages/py-pyhmmer/package.py | 27 ++++++++++++ .../builtin/packages/py-pyrodigal/package.py | 25 +++++++++++ 6 files changed, 92 insertions(+), 15 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-pyhmmer/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyrodigal/package.py diff --git a/var/spack/repos/builtin/packages/diamond/package.py b/var/spack/repos/builtin/packages/diamond/package.py index c87e675eaebd5d..1ce6bcae9df68f 100644 --- a/var/spack/repos/builtin/packages/diamond/package.py +++ b/var/spack/repos/builtin/packages/diamond/package.py @@ -16,6 +16,7 @@ class Diamond(CMakePackage): license("GPL-3.0-only") + version("2.1.10", sha256="c6ede5df30d9d496af314e740964c35a0e358458d9c8d9b8dd517d69828d9981") version("2.1.9", sha256="4cde9df78c63e8aef9df1e3265cd06a93ce1b047d6dba513a1437719b70e9d88") version("2.1.8", sha256="b6088259f2bc92d1f9dc4add44590cff68321bcbf91eefbc295a3525118b9415") version("2.1.7", sha256="2dcaba0e79ecb02c3d2a6816d317e714767118a9a056721643abff4c586ca95b") diff --git a/var/spack/repos/builtin/packages/py-alive-progress/package.py b/var/spack/repos/builtin/packages/py-alive-progress/package.py index 31c011e687a8e6..0342326115af66 100644 --- a/var/spack/repos/builtin/packages/py-alive-progress/package.py +++ b/var/spack/repos/builtin/packages/py-alive-progress/package.py @@ -16,12 +16,15 @@ class PyAliveProgress(PythonPackage): license("MIT") + version("3.2.0", sha256="ede29d046ff454fe56b941f686f89dd9389430c4a5b7658e445cb0b80e0e4deb") version("2.4.1", sha256="089757c8197f27ad972ba27e1060f6db92368d83c736884e159034fd74865323") version("1.6.2", sha256="642e1ce98becf226c8c36bf24e10221085998c5465a357a66fb83b7dc618b43e") depends_on("python@2.7:3", type=("build", "run")) depends_on("python@3.6:3", type=("build", "run"), when="@2:") depends_on("python@3.7:3", type=("build", "run"), when="@2.2:") + depends_on("python@3.9:3", type=("build", "run"), when="@3.2:") depends_on("py-setuptools", type="build") - depends_on("py-about-time@3.1.1", type=("build", "run"), when="@2.4.1:") + depends_on("py-about-time@3.1.1", type=("build", "run"), when="@2.4.1") + depends_on("py-about-time@4.2.1", type=("build", "run"), when="@3:") depends_on("py-grapheme@0.6.0", type=("build", "run"), when="@2.4.1:") diff --git a/var/spack/repos/builtin/packages/py-bakta/package.py b/var/spack/repos/builtin/packages/py-bakta/package.py index 57dcbc64bb398c..edec5d634fd1ac 100644 --- a/var/spack/repos/builtin/packages/py-bakta/package.py +++ b/var/spack/repos/builtin/packages/py-bakta/package.py @@ -18,21 +18,42 @@ class PyBakta(PythonPackage): license("GPL-3.0-only") + version("1.9.4", sha256="10330a10e459144dc78daa26f3a73674799706e2e1653e080366b1bbb9e5a5d9") version("1.5.1", sha256="36781612c4eaa99e6e24a00e8ab5b27dadf21c98ae6d16432f3e78c96a4adb5d") - depends_on("python@3.8:", type=("build", "run")) + variant("deepsig", default=True, description="builds with deepsig to predict signal peptides") + + depends_on("python@3.8:3.10", type=("build", "run")) depends_on("py-setuptools", type=("build", "run")) depends_on("py-biopython@1.78:", type=("build", "run")) - depends_on("py-xopen@1.1.0:", type=("build", "run")) + depends_on("py-xopen@1.5.0:", when="@1.8.2:", type=("build", "run")) + depends_on("py-xopen@1.1.0:", when="@:1.8.1", type=("build", "run")) depends_on("py-requests@2.25.1:", type=("build", "run")) - depends_on("py-alive-progress@1.6.2", type=("build", "run")) - depends_on("trnascan-se@2.0.8:", type=("build", "run")) - depends_on("aragorn@1.2.38:", type=("build", "run")) + depends_on("py-alive-progress@3.0.1:", when="@1.7.0:", type=("build", "run")) + depends_on("py-alive-progress@1.6.2", when="@:1.6.1", type=("build", "run")) + depends_on("py-pyyaml@6.0:", when="@1.6.0:", type=("build", "run")) + depends_on("trnascan-se@2.0.11:", when="@1.6.0:", type=("build", "run")) + depends_on("trnascan-se@2.0.8:", when="@:1.5.1", type=("build", "run")) + depends_on("aragorn@1.2.41:", when="@1.7.0:", type=("build", "run")) + depends_on("aragorn@1.2.38:", when="@:1.6.1", type=("build", "run")) depends_on("infernal@1.1.4:", type=("build", "run")) depends_on("pilercr@1.06:", type=("build", "run")) - depends_on("prodigal@2.6.3:", type=("build", "run")) - depends_on("hmmer@3.3.2:", type=("build", "run")) - depends_on("diamond@2.0.14:", type=("build", "run")) - depends_on("blast-plus@2.12.0:", type=("build", "run")) - depends_on("amrfinder@3.10.23:", type=("build", "run")) - depends_on("py-deepsig-biocomp@1.2.5:", type=("build", "run")) + depends_on("py-pyrodigal@3.1.0:", when="@1.9.0:", type=("build", "run")) + depends_on("py-pyrodigal@2.1.0:", when="@1.7.0:1.8.2", type=("build", "run")) + depends_on("py-pyrodigal@2.0.2:", when="@1.6.0:1.6.1", type=("build", "run")) + depends_on("prodigal@2.6.3:", when="@:1.5.1", type=("build", "run")) + depends_on("hmmer@3.3.2:", when="@:1.8.1", type=("build", "run")) + depends_on("py-pyhmmer@0.10.4:", when="@1.9.4:", type=("build", "run")) + depends_on("py-pyhmmer@0.10.0:", when="@1.8.2:1.9.3", type=("build", "run")) + # known bug with diamond v2.1.9 + # see https://github.com/oschwengers/bakta/issues/290 + depends_on("diamond@2.1.8,2.1.10:", when="@1.9.0:", type=("build", "run")) + depends_on("diamond@2.0.14:", when="@:1.8.2", type=("build", "run")) + depends_on("blast-plus@2.14.0:", when="@1.9.0:", type=("build", "run")) + depends_on("blast-plus@2.12.0:", when="@:1.8.2", type=("build", "run")) + depends_on("amrfinder@3.11.26:", when="@1.9.0:", type=("build", "run")) + depends_on("amrfinder@3.10.23:", when="@1.5.1", type=("build", "run")) + depends_on("circos@0.69.8:", when="@1.6.0:", type=("build", "run")) + depends_on("py-deepsig-biocomp@1.2.5:", when="+deepsig", type=("build", "run")) + + conflicts("platform=darwin", when="+deepsig") diff --git a/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py b/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py index 01b527251fc52f..8c1abc2e8ff378 100644 --- a/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py +++ b/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py @@ -19,11 +19,11 @@ class PyDeepsigBiocomp(PythonPackage): version("1.2.5", sha256="e954b815d63c221c564c7d3fe27123d7cd2c39b191d6107369ab095d506496e0") - depends_on("python@3.8", type=("build", "run")) + depends_on("python@3.8:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-biopython@1.78:", type=("build", "run")) - depends_on("py-keras@2.4.3", type=("build", "run")) - depends_on("py-tensorflow@2.2.0", type=("build", "run")) + depends_on("py-keras@2.4.3:", type=("build", "run")) + depends_on("py-tensorflow@2.2.0:", type=("build", "run")) depends_on("py-tensorboard", type=("build", "run")) @run_after("install") diff --git a/var/spack/repos/builtin/packages/py-pyhmmer/package.py b/var/spack/repos/builtin/packages/py-pyhmmer/package.py new file mode 100644 index 00000000000000..b263e89389ea1d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyhmmer/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPyhmmer(PythonPackage): + """HMMER is a biological sequence analysis tool that uses profile hidden + Markov models to search for sequence homologs. HMMER3 is developed and + maintained by the Eddy/Rivas Laboratory at Harvard University. pyhmmer + is a Python package, implemented using the Cython language, that provides + bindings to HMMER3.""" + + homepage = "https://github.com/althonos/pyhmmer" + pypi = "pyhmmer/pyhmmer-0.10.14.tar.gz" + + license("MIT", checked_by="luke-dt") + + version("0.10.15", sha256="bf8e97ce8da6fb5850298f3074640f3e998d5a655877f865c1592eb057dc7921") + version("0.10.14", sha256="eb50bdfdf67a3b1fecfe877d7ca6d9bade9a9f3dea3ad60c959453bbb235573d") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools@46.4:", type="build") + depends_on("py-cython@3.0", type="build") + depends_on("py-psutil@5.8:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pyrodigal/package.py b/var/spack/repos/builtin/packages/py-pyrodigal/package.py new file mode 100644 index 00000000000000..6bd206f58b1c1a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyrodigal/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPyrodigal(PythonPackage): + """Cython bindings and Python interface to Prodigal, an ORF finder for + genomes and metagenomes""" + + homepage = "https://github.com/althonos/pyrodigal" + pypi = "pyrodigal/pyrodigal-3.5.2.tar.gz" + + license("GPL-3.0", checked_by="luke-dt") + + version("3.5.2", sha256="2a40eb6113e720ada51c326958b295944cdc33ecee9f25d5bad4e9a8e6e6f7f5") + + depends_on("c", type="build") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools@46.4:", type="build") + depends_on("py-archspec@0.2.0:", type="build") + depends_on("py-cython@3.0:", type=("build", "run")) From 65929888dec905180a9330d3b19751394da39a1f Mon Sep 17 00:00:00 2001 From: Alberto Sartori Date: Tue, 12 Nov 2024 03:15:07 +0100 Subject: [PATCH 180/208] justbuild: add version 1.4.0 (#47410) --- var/spack/repos/builtin/packages/justbuild/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 18c4939dd26651..1cf96c1bbb09ca 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -24,6 +24,7 @@ class Justbuild(Package): license("Apache-2.0") version("master", branch="master") + version("1.4.0", tag="v1.4.0", commit="562bddf70175a602f896397f41ee5f5e07e834eb") version("1.3.2", tag="v1.3.2", commit="27a56845398b07471f8185648a79a63f97851659") version("1.3.1", tag="v1.3.1", commit="b248838ed0f01bc5824caee3a555e7fd22d5ad10") version("1.3.0", tag="v1.3.0", commit="a7be2417f358049e6a0e28e01bc4020d8de2fdc5") From f0f5ffa9de8e272186783f1faf1566136d5a0802 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Tue, 12 Nov 2024 03:17:36 +0100 Subject: [PATCH 181/208] libunwind: Add 1.7.2, 1.8.1, and new *-stable branches (#47412) * libunwind: Add 1.7.2 and 1.8.1 * libunwind: Remove deprecated 1.1 version * libunwind: Add newer *-stable branches: Remove 1.5-stable branch as well as cleanup. * libunwind: Use GitHub url for all versions * libunwind: Add conflict for PPC and 1.8.* * libunwind: Add conflict for aarch64 and 1.8: Build fails with aarch64/Gos-linux.c: In function '_ULaarch64_local_resume': aarch64/Gos-linux.c:147:1: error: x29 cannot be used in asm here } ^ aarch64/Gos-linux.c:147:1: error: x29 cannot be used in asm here make[2]: *** [Makefile:4795: aarch64/Los-linux.lo] Error 1 --- .../builtin/packages/libunwind/package.py | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py index 23b2b497c9dbeb..9f6df14fd642ac 100644 --- a/var/spack/repos/builtin/packages/libunwind/package.py +++ b/var/spack/repos/builtin/packages/libunwind/package.py @@ -11,25 +11,23 @@ class Libunwind(AutotoolsPackage): the call-chain of a program.""" homepage = "https://www.nongnu.org/libunwind/" - url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz" + url = "https://github.com/libunwind/libunwind/releases/download/v0.0.0/libunwind-0.0.0.tar.gz" git = "https://github.com/libunwind/libunwind" maintainers("mwkrentel") license("MIT") version("master", branch="master") + version("1.8-stable", branch="v1.8-stable") + version("1.8.1", sha256="ddf0e32dd5fafe5283198d37e4bf9decf7ba1770b6e7e006c33e6df79e6a6157") + version("1.7-stable", branch="v1.7-stable") + version("1.7.2", sha256="a18a6a24307443a8ace7a8acc2ce79fbbe6826cd0edf98d6326d0225d6a5d6e6") version("1.6-stable", branch="v1.6-stable") version("1.6.2", sha256="4a6aec666991fb45d0889c44aede8ad6eb108071c3554fcdff671f9c94794976") - version("1.5-stable", branch="v1.5-stable") version("1.5.0", sha256="90337653d92d4a13de590781371c604f9031cdb50520366aa1e3a91e1efb1017") version("1.4.0", sha256="df59c931bd4d7ebfd83ee481c943edf015138089b8e50abed8d9c57ba9338435") version("1.3.1", sha256="43997a3939b6ccdf2f669b50fdb8a4d3205374728c2923ddc2354c65260214f8") version("1.2.1", sha256="3f3ecb90e28cbe53fba7a4a27ccce7aad188d3210bb1964a923a731a27a75acb") - version( - "1.1", - sha256="9dfe0fcae2a866de9d3942c66995e4b460230446887dbdab302d41a8aee8d09a", - deprecated=True, - ) depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated @@ -83,7 +81,7 @@ class Libunwind(AutotoolsPackage): # The libunwind releases contain the autotools generated files, # but the git repo snapshots do not. - reconf_versions = "@master,1.5-stable,1.6-stable" + reconf_versions = "@master,1.6-stable,1.7-stable,1.8-stable" depends_on("autoconf", type="build", when=reconf_versions) depends_on("automake", type="build", when=reconf_versions) depends_on("libtool", type="build", when=reconf_versions) @@ -94,8 +92,21 @@ class Libunwind(AutotoolsPackage): conflicts("platform=darwin", msg="Non-GNU libunwind needs ELF libraries Darwin does not have") + # Introduced in https://github.com/libunwind/libunwind/pull/555, fixed in + # https://github.com/libunwind/libunwind/pull/723 + conflicts("target=ppc64:", when="@1.8") + conflicts("target=ppc64le:", when="@1.8") + + conflicts("target=aarch64:", when="@1.8:") + provides("unwind") + def url_for_version(self, version): + if version == Version("1.5.0"): + return f"https://github.com/libunwind/libunwind/releases/download/v{version.up_to(2)}/libunwind-{version}.tar.gz" + else: + return super().url_for_version(version) + def flag_handler(self, name, flags): wrapper_flags = [] From ac703bc88d3dbdc8b8a3c93368562e584f46c89c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= <765740+giordano@users.noreply.github.com> Date: Tue, 12 Nov 2024 04:34:32 +0000 Subject: [PATCH 182/208] prometheus: add v2.55.1 (#47544) --- var/spack/repos/builtin/packages/prometheus/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/prometheus/package.py b/var/spack/repos/builtin/packages/prometheus/package.py index 3e6c8baab10b03..1a818f8a492cf2 100644 --- a/var/spack/repos/builtin/packages/prometheus/package.py +++ b/var/spack/repos/builtin/packages/prometheus/package.py @@ -15,6 +15,7 @@ class Prometheus(MakefilePackage): license("Apache-2.0") + version("2.55.1", sha256="f48251f5c89eea6d3b43814499d558bacc4829265419ee69be49c5af98f79573") version("2.19.2", sha256="d4e84cae2fed6761bb8a80fcc69b6e0e9f274d19dffc0f38fb5845f11da1bbc3") version("2.19.1", sha256="b72b9b6bdbae246dcc29ef354d429425eb3c0a6e1596fc8b29b502578a4ce045") version("2.18.2", sha256="a26c106c97d81506e3a20699145c11ea2cce936427a0e96eb2fd0dc7cd1945ba") @@ -26,6 +27,7 @@ class Prometheus(MakefilePackage): depends_on("go", type="build") depends_on("node-js@11.10.1:", type="build") depends_on("yarn", type="build") + depends_on("npm", type="build", when="@2.55.1:") def build(self, spec, prefix): make("build", parallel=False) @@ -34,5 +36,6 @@ def install(self, spec, prefix): mkdirp(prefix.bin) install("prometheus", prefix.bin) install("promtool", prefix.bin) - install("tsdb/tsdb", prefix.bin) + if spec.satisfies("@:2.19.2"): + install("tsdb/tsdb", prefix.bin) install_tree("documentation", prefix.documentation) From def161374191190ed89940c4943955d940fd9993 Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Mon, 11 Nov 2024 22:44:33 -0600 Subject: [PATCH 183/208] gdb: add version 15.2 (#47540) --- var/spack/repos/builtin/packages/gdb/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py index 4b500ed36af40e..4d0f7d0ccfc1fd 100644 --- a/var/spack/repos/builtin/packages/gdb/package.py +++ b/var/spack/repos/builtin/packages/gdb/package.py @@ -21,6 +21,7 @@ class Gdb(AutotoolsPackage, GNUMirrorPackage): license("GPL-3.0-or-later AND LGPL-3.0-or-later") + version("15.2", sha256="9d16bc2539a2a20dc3ef99b48b8414d51c51305c8577eb7a1da00996f6dea223") version("14.2", sha256="2de5174762e959a5e529e20c20d88a04735469d8fffd98f61664e70b341dc47c") version("14.1", sha256="683e63182fb72bd5d8db32ab388143796370a8e3e71c26bc264effb487db7927") version("13.2", sha256="7ead13d9e19fa0c57bb19104e1a5f67eefa9fc79f2e6360de491e8fddeda1e30") From 91310d3ae6adb7d222e6cf2b8df7326fe72564de Mon Sep 17 00:00:00 2001 From: Xavier Delaruelle Date: Tue, 12 Nov 2024 05:45:03 +0100 Subject: [PATCH 184/208] environment-modules: add version 5.5.0 (#47543) This new version is compatible with Tcl 9.0. It also requires 'util-linux' for new logging capabilities. --- .../builtin/packages/environment-modules/package.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index 173c340162fd3c..2b28e5651d161e 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -13,12 +13,13 @@ class EnvironmentModules(Package): """ homepage = "https://cea-hpc.github.io/modules/" - url = "https://github.com/cea-hpc/modules/releases/download/v5.4.0/modules-5.4.0.tar.gz" + url = "https://github.com/cea-hpc/modules/releases/download/v5.5.0/modules-5.5.0.tar.gz" git = "https://github.com/cea-hpc/modules.git" maintainers("xdelaruelle") version("main", branch="main") + version("5.5.0", sha256="ad0e360c7adc2515a99836863d98499b3ad89cd7548625499b20293845b040cb") version("5.4.0", sha256="586245cbf9420866078d8c28fce8ef4f192530c69a0f368f51e848340dcf3b90") version("5.3.1", sha256="d02f9ce4f8baf6c99edceb7c73bfdd1e97d77bcc4725810b86efed9f58dda962") version("5.3.0", sha256="21b8daa0181044ef65097a1e3517af1f24e7c7343cc5bdaf70be11e3cb0edb51") @@ -62,6 +63,7 @@ class EnvironmentModules(Package): variant("X", default=True, description="Build with X functionality") + depends_on("util-linux", type=("build", "run"), when="@5.5:") depends_on("less", type=("build", "run"), when="@4.1:") with when("@main"): depends_on("autoconf", type="build") @@ -75,7 +77,8 @@ class EnvironmentModules(Package): # Dependencies: depends_on("tcl", type=("build", "link", "run")) depends_on("tcl@8.4:", type=("build", "link", "run"), when="@4.0.0:4.8") - depends_on("tcl@8.5:", type=("build", "link", "run"), when="@5.0.0:") + depends_on("tcl@8.5:8", type=("build", "link", "run"), when="@5.0.0:5.4.0") + depends_on("tcl@8.5:", type=("build", "link", "run"), when="@5.5.0:") def install(self, spec, prefix): tcl = spec["tcl"] @@ -95,6 +98,9 @@ def install(self, spec, prefix): if spec.satisfies("~X"): config_args = ["--without-x"] + config_args + if self.spec.satisfies("@5.5.0:"): + config_args.extend(["--enable-conflict-unload"]) + if self.spec.satisfies("@4.4.0:4.8"): config_args.extend( [ @@ -140,6 +146,9 @@ def install(self, spec, prefix): ] ) + if self.spec.satisfies("@5.5:"): + config_args.append(f"--with-logger={str(self.spec['util-linux'].prefix.bin.logger)}") + if self.spec.satisfies("@4.1:"): config_args.append(f"--with-pager={str(self.spec['less'].prefix.bin.less)}") From ada4c208d4e72d798acac54820984b74ddb8cf62 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 11 Nov 2024 23:09:33 -0600 Subject: [PATCH 185/208] py-cryptography: add v43.0.3 (switch to maturin) (#47546) * py-cryptography: add v43.0.3 (switch to maturin) * py-cryptography: deny some setuptools versions * py-cryptography: depends_on py-setuptools-rust when @42, no range --------- Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../builtin/packages/py-cryptography/package.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-cryptography/package.py b/var/spack/repos/builtin/packages/py-cryptography/package.py index a883c11e04ae4d..214deae9b1d7ed 100644 --- a/var/spack/repos/builtin/packages/py-cryptography/package.py +++ b/var/spack/repos/builtin/packages/py-cryptography/package.py @@ -15,6 +15,7 @@ class PyCryptography(PythonPackage): license("Apache-2.0") + version("43.0.3", sha256="315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805") version("42.0.8", sha256="8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2") version("41.0.7", sha256="13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc") version("41.0.3", sha256="6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34") @@ -41,9 +42,16 @@ class PyCryptography(PythonPackage): depends_on("py-setuptools@40.6:", when="@2.7:36", type="build") depends_on("py-setuptools@18.5:", when="@2.2:2.6", type="build") depends_on("py-setuptools@11.3:", when="@:2.1", type="build") - depends_on("py-setuptools-rust@1.7.0:", when="@42:", type=("build", "run")) - depends_on("py-setuptools-rust@0.11.4:", when="@3.4.2:", type="build") - depends_on("py-setuptools-rust@0.11.4:", when="@3.4:3.4.1", type=("build", "run")) + with when("@43:"): + depends_on("py-maturin@1", type="build") + conflicts( + "^py-setuptools@74.0.0,74.1.0,74.1.1,74.1.2,74.1.3,75.0.0,75.1.0,75.2.0", + msg="some setuptools version are incompatible", + ) + with when("@:42"): + depends_on("py-setuptools-rust@1.7.0:", when="@42", type=("build", "run")) + depends_on("py-setuptools-rust@0.11.4:", when="@3.4.2:", type="build") + depends_on("py-setuptools-rust@0.11.4:", when="@3.4:3.4.1", type=("build", "run")) depends_on("rust@1.56:", when="@41:", type="build") depends_on("rust@1.48:", when="@38:", type="build") depends_on("rust@1.41:", when="@3.4.5:", type="build") From e33cbac01fdfca24190bba0a9b0f28cad511788b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 12 Nov 2024 08:59:07 +0100 Subject: [PATCH 186/208] getting_started.rst: fix list of spack deps (#47557) --- lib/spack/docs/getting_started.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index cb8a586e3c94c4..6654f50e775230 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo .. code-block:: console apt update - apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip + apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd .. tab-item:: RHEL @@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo dnf install epel-release dnf group install "Development Tools" - dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3 + dnf install gcc-gfortran redhat-lsb-core python3 unzip .. tab-item:: macOS Brew .. code-block:: console brew update - brew install curl gcc git gnupg zip + brew install gcc git zip ------------ Installation From 00e68af7949452175b49693056fe892cb4aa358b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 12 Nov 2024 14:51:33 +0100 Subject: [PATCH 187/208] llvm-amdgpu: add missing dependency on libxml2 (#47560) --- var/spack/repos/builtin/packages/llvm-amdgpu/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py index 4fec91e9ea0667..6c98043a3add54 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py @@ -78,6 +78,7 @@ class LlvmAmdgpu(CMakePackage, CompilerPackage): depends_on("z3", type="link") depends_on("zlib-api", type="link") depends_on("ncurses+termlib", type="link") + depends_on("libxml2", type="link") depends_on("pkgconfig", type="build") # This flavour of LLVM doesn't work on MacOS, so we should ensure that it From 99fd37931c117c2842c503d623aa9749891a9203 Mon Sep 17 00:00:00 2001 From: Sebastian Pipping Date: Tue, 12 Nov 2024 15:10:00 +0100 Subject: [PATCH 188/208] expat: Add 2.6.4 with security fixes + deprecate vulnerable 2.6.3 (#47521) --- var/spack/repos/builtin/packages/expat/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py index a41d4912de8415..485f773a82db83 100644 --- a/var/spack/repos/builtin/packages/expat/package.py +++ b/var/spack/repos/builtin/packages/expat/package.py @@ -16,9 +16,14 @@ class Expat(AutotoolsPackage, CMakePackage): url = "https://github.com/libexpat/libexpat/releases/download/R_2_2_9/expat-2.2.9.tar.bz2" license("MIT") - - version("2.6.3", sha256="b8baef92f328eebcf731f4d18103951c61fa8c8ec21d5ff4202fb6f2198aeb2d") - # deprecate all releases before 2.6.3 because of security issues + version("2.6.4", sha256="8dc480b796163d4436e6f1352e71800a774f73dbae213f1860b60607d2a83ada") + # deprecate all releases before 2.6.4 because of security issues + # CVE-2024-50602 (fixed in 2.6.4) + version( + "2.6.3", + sha256="b8baef92f328eebcf731f4d18103951c61fa8c8ec21d5ff4202fb6f2198aeb2d", + deprecated=True, + ) # CVE-2024-45490 (fixed in 2.6.3) # CVE-2024-45491 (fixed in 2.6.3) # CVE-2024-45492 (fixed in 2.6.3) From e083acdc5dbf4b056181900c0205f3bfcf996d02 Mon Sep 17 00:00:00 2001 From: teddy Date: Tue, 12 Nov 2024 17:04:20 +0100 Subject: [PATCH 189/208] costo: new package and to fix the build, add pkgconfig dep to vtk (#47121) Co-authored-by: Bernhard Kaindl --- .../repos/builtin/packages/costo/package.py | 49 +++++++++++++++++++ .../repos/builtin/packages/vtk/package.py | 5 +- 2 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/costo/package.py diff --git a/var/spack/repos/builtin/packages/costo/package.py b/var/spack/repos/builtin/packages/costo/package.py new file mode 100644 index 00000000000000..2e509974de1d87 --- /dev/null +++ b/var/spack/repos/builtin/packages/costo/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Costo(CMakePackage): + """costo stand for COSimulation TOols. + Its a layer above MPI to share data between meshes. + """ + + homepage = "https://gitlab.com/Te_ch/costo" + git = "https://gitlab.com/Te_ch/costo.git" + + maintainers("tech-91") + + license("LGPL-3.0-or-later") + + version("0.0.5", tag="v0.0.5", preferred=True) + version("develop", branch="devel") + version("main", branch="main", deprecated=True) + + variant("shared", default=True, description="Build shared library") + variant("tests", default=False, description="Enable testing") + + depends_on("mpi", type=all) + depends_on("python@3.10:", type=all) + + depends_on("py-non-regression-test-tools", type="build") + depends_on("py-pyvista", type=("build", "run")) + depends_on("py-numpy", type=("build", "link", "run")) + depends_on("py-mpi4py", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-mgmetis", type=("build", "run")) + depends_on("py-colorama", type=("build", "run")) + depends_on("py-pip", type="build") + + def cmake_args(self): + args = [ + # self.define("COSTO_ENABLE_TESTS", "OFF"), + self.define("COSTO_ENABLE_PYTHON_BINDINGS", "OFF"), + self.define("WITH_PYTHON_MODULE", "ON"), + self.define_from_variant("WITH_SHARED_LIBS", "shared"), + self.define_from_variant("WITH_TESTS", "tests"), + ] + + return args diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index d2da1fe43e3bea..10e941e8dc07cb 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -54,8 +54,9 @@ class Vtk(CMakePackage): version("6.3.0", sha256="92a493354c5fa66bea73b5fc014154af5d9f3f6cee8d20a826f4cd5d4b0e8a5e") version("6.1.0", sha256="bd7df10a479606d529a8b71f466c44a2bdd11fd534c62ce0aa44fad91883fa34") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") + depends_on("pkgconfig", type="build", when="platform=linux") # VTK7 defaults to OpenGL2 rendering backend variant("opengl2", default=True, description="Enable OpenGL2 backend") From e7c9bb5258211bf26fcc17bce23f5ff839cfa900 Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Tue, 12 Nov 2024 17:53:34 +0000 Subject: [PATCH 190/208] py-constantly: add v23.10.4 (#47548) * py-constantly: added version 23.10.4 * py-constantly: fixed dependency on py-versioneer * py-constantly: updated py-versioneer dependency Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --------- Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- var/spack/repos/builtin/packages/arrow/package.py | 1 + var/spack/repos/builtin/packages/py-constantly/package.py | 2 ++ var/spack/repos/builtin/packages/thrift/package.py | 1 + var/spack/repos/builtin/packages/xfsprogs/package.py | 1 + 4 files changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/arrow/package.py b/var/spack/repos/builtin/packages/arrow/package.py index 994579cdb9d297..53fc4bd8c67b79 100644 --- a/var/spack/repos/builtin/packages/arrow/package.py +++ b/var/spack/repos/builtin/packages/arrow/package.py @@ -17,6 +17,7 @@ class Arrow(CMakePackage, CudaPackage): license("Apache-2.0") + version("18.0.0", sha256="9c473f2c9914c59ab571761c9497cf0e5cfd3ea335f7782ccc6121f5cb99ae9b") version("16.1.0", sha256="9762d9ecc13d09de2a03f9c625a74db0d645cb012de1e9a10dfed0b4ddc09524") version("15.0.2", sha256="4735b349845bff1fe95ed11abbfed204eb092cabc37523aa13a80cb830fe5b5e") version("14.0.2", sha256="07cdb4da6795487c800526b2865c150ab7d80b8512a31793e6a7147c8ccd270f") diff --git a/var/spack/repos/builtin/packages/py-constantly/package.py b/var/spack/repos/builtin/packages/py-constantly/package.py index b569b524348d90..4e6216ee5a2fbf 100644 --- a/var/spack/repos/builtin/packages/py-constantly/package.py +++ b/var/spack/repos/builtin/packages/py-constantly/package.py @@ -14,6 +14,8 @@ class PyConstantly(PythonPackage): license("MIT") + version("23.10.4", sha256="aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd") version("15.1.0", sha256="586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35") depends_on("py-setuptools", type="build") + depends_on("py-versioneer+toml@0.29", type="build", when="@23.10.4:") diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py index 14631e80bfa387..764b57b370dd77 100644 --- a/var/spack/repos/builtin/packages/thrift/package.py +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -65,6 +65,7 @@ class Thrift(Package): depends_on("ant", when="+java") extends("python", when="+python") + depends_on("python@:3.11.9", when="+python") depends_on("py-setuptools", type=("build", "run"), when="+python") depends_on("py-six@1.7.2:", type=("build", "run"), when="@0.10.0:+python") depends_on("py-tornado", type=("build", "run"), when="+python") diff --git a/var/spack/repos/builtin/packages/xfsprogs/package.py b/var/spack/repos/builtin/packages/xfsprogs/package.py index 2cc15a88613b0f..c80002bf29933c 100644 --- a/var/spack/repos/builtin/packages/xfsprogs/package.py +++ b/var/spack/repos/builtin/packages/xfsprogs/package.py @@ -14,6 +14,7 @@ class Xfsprogs(AutotoolsPackage): license("LGPL-2.1-or-later") + version("6.11.0", sha256="dae3bb432196f7b183b2e6bd5dc44bf33edbd7d0e85bd37d25c235df81b8100a") version("5.11.0", sha256="0e9c390fcdbb8a79e1b8f5e6e25fd529fc9f9c2ef8f2d5e647b3556b82d1b353") version("5.8.0", sha256="8ef46ed9e6bb927f407f541dc4324857c908ddf1374265edc910d23724048c6b") version("5.7.0", sha256="8f2348a68a686a3f4491dda5d62dd32d885fbc52d32875edd41e2c296e7b4f35") From acdcd1016a84ba3c7d2ecf95c6c61bc04795f4ae Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Tue, 12 Nov 2024 13:04:12 -0500 Subject: [PATCH 191/208] openssh: add v9.9p1 (#47555) --- .../repos/builtin/packages/openssh/package.py | 49 ++++++++++--------- 1 file changed, 26 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/openssh/package.py b/var/spack/repos/builtin/packages/openssh/package.py index 07ea7c9599b192..a31bbf46fb9585 100755 --- a/var/spack/repos/builtin/packages/openssh/package.py +++ b/var/spack/repos/builtin/packages/openssh/package.py @@ -25,35 +25,38 @@ class Openssh(AutotoolsPackage): license("SSH-OpenSSH") + version("9.9p1", sha256="b343fbcdbff87f15b1986e6e15d6d4fc9a7d36066be6b7fb507087ba8f966c02") version("9.8p1", sha256="dd8bd002a379b5d499dfb050dd1fa9af8029e80461f4bb6c523c49973f5a39f3") version("9.7p1", sha256="490426f766d82a2763fcacd8d83ea3d70798750c7bd2aff2e57dc5660f773ffd") version("9.6p1", sha256="910211c07255a8c5ad654391b40ee59800710dd8119dd5362de09385aa7a777c") version("9.5p1", sha256="f026e7b79ba7fb540f75182af96dc8a8f1db395f922bbc9f6ca603672686086b") version("9.4p1", sha256="3608fd9088db2163ceb3e600c85ab79d0de3d221e59192ea1923e23263866a85") version("9.3p1", sha256="e9baba7701a76a51f3d85a62c383a3c9dcd97fa900b859bc7db114c1868af8a8") - version("9.2p1", sha256="3f66dbf1655fb45f50e1c56da62ab01218c228807b21338d634ebcdf9d71cf46") - version("9.1p1", sha256="19f85009c7e3e23787f0236fbb1578392ab4d4bf9f8ec5fe6bc1cd7e8bfdd288") - version("9.0p1", sha256="03974302161e9ecce32153cfa10012f1e65c8f3750f573a73ab1befd5972a28a") - version("8.9p1", sha256="fd497654b7ab1686dac672fb83dfb4ba4096e8b5ffcdaccd262380ae58bec5e7") - version("8.8p1", sha256="4590890ea9bb9ace4f71ae331785a3a5823232435161960ed5fc86588f331fe9") - version("8.7p1", sha256="7ca34b8bb24ae9e50f33792b7091b3841d7e1b440ff57bc9fabddf01e2ed1e24") - version("8.6p1", sha256="c3e6e4da1621762c850d03b47eed1e48dff4cc9608ddeb547202a234df8ed7ae") - version("8.5p1", sha256="f52f3f41d429aa9918e38cf200af225ccdd8e66f052da572870c89737646ec25") - version("8.4p1", sha256="5a01d22e407eb1c05ba8a8f7c654d388a13e9f226e4ed33bd38748dafa1d2b24") - version("8.3p1", sha256="f2befbe0472fe7eb75d23340eb17531cb6b3aac24075e2066b41f814e12387b2") - version("8.1p1", sha256="02f5dbef3835d0753556f973cd57b4c19b6b1f6cd24c03445e23ac77ca1b93ff") - version("7.9p1", sha256="6b4b3ba2253d84ed3771c8050728d597c91cfce898713beb7b64a305b6f11aad") - version("7.6p1", sha256="a323caeeddfe145baaa0db16e98d784b1fbc7dd436a6bf1f479dfd5cd1d21723") - version("7.5p1", sha256="9846e3c5fab9f0547400b4d2c017992f914222b3fd1f8eee6c7dc6bc5e59f9f0") - version("7.4p1", sha256="1b1fc4a14e2024293181924ed24872e6f2e06293f3e8926a376b8aec481f19d1") - version("7.3p1", sha256="3ffb989a6dcaa69594c3b550d4855a5a2e1718ccdde7f5e36387b424220fbecc") - version("7.2p2", sha256="a72781d1a043876a224ff1b0032daa4094d87565a68528759c1c2cab5482548c") - version("7.1p2", sha256="dd75f024dcf21e06a0d6421d582690bf987a1f6323e32ad6619392f3bfde6bbd") - version("7.0p1", sha256="fd5932493a19f4c81153d812ee4e042b49bbd3b759ab3d9344abecc2bc1485e5") - version("6.9p1", sha256="6e074df538f357d440be6cf93dc581a21f22d39e236f217fcd8eacbb6c896cfe") - version("6.8p1", sha256="3ff64ce73ee124480b5bf767b9830d7d3c03bbcb6abe716b78f0192c37ce160e") - version("6.7p1", sha256="b2f8394eae858dabbdef7dac10b99aec00c95462753e80342e530bbb6f725507") - version("6.6p1", sha256="48c1f0664b4534875038004cc4f3555b8329c2a81c1df48db5c517800de203bb") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2023-38408 + version("9.2p1", sha256="3f66dbf1655fb45f50e1c56da62ab01218c228807b21338d634ebcdf9d71cf46") + version("9.1p1", sha256="19f85009c7e3e23787f0236fbb1578392ab4d4bf9f8ec5fe6bc1cd7e8bfdd288") + version("9.0p1", sha256="03974302161e9ecce32153cfa10012f1e65c8f3750f573a73ab1befd5972a28a") + version("8.9p1", sha256="fd497654b7ab1686dac672fb83dfb4ba4096e8b5ffcdaccd262380ae58bec5e7") + version("8.8p1", sha256="4590890ea9bb9ace4f71ae331785a3a5823232435161960ed5fc86588f331fe9") + version("8.7p1", sha256="7ca34b8bb24ae9e50f33792b7091b3841d7e1b440ff57bc9fabddf01e2ed1e24") + version("8.6p1", sha256="c3e6e4da1621762c850d03b47eed1e48dff4cc9608ddeb547202a234df8ed7ae") + version("8.5p1", sha256="f52f3f41d429aa9918e38cf200af225ccdd8e66f052da572870c89737646ec25") + version("8.4p1", sha256="5a01d22e407eb1c05ba8a8f7c654d388a13e9f226e4ed33bd38748dafa1d2b24") + version("8.3p1", sha256="f2befbe0472fe7eb75d23340eb17531cb6b3aac24075e2066b41f814e12387b2") + version("8.1p1", sha256="02f5dbef3835d0753556f973cd57b4c19b6b1f6cd24c03445e23ac77ca1b93ff") + version("7.9p1", sha256="6b4b3ba2253d84ed3771c8050728d597c91cfce898713beb7b64a305b6f11aad") + version("7.6p1", sha256="a323caeeddfe145baaa0db16e98d784b1fbc7dd436a6bf1f479dfd5cd1d21723") + version("7.5p1", sha256="9846e3c5fab9f0547400b4d2c017992f914222b3fd1f8eee6c7dc6bc5e59f9f0") + version("7.4p1", sha256="1b1fc4a14e2024293181924ed24872e6f2e06293f3e8926a376b8aec481f19d1") + version("7.3p1", sha256="3ffb989a6dcaa69594c3b550d4855a5a2e1718ccdde7f5e36387b424220fbecc") + version("7.2p2", sha256="a72781d1a043876a224ff1b0032daa4094d87565a68528759c1c2cab5482548c") + version("7.1p2", sha256="dd75f024dcf21e06a0d6421d582690bf987a1f6323e32ad6619392f3bfde6bbd") + version("7.0p1", sha256="fd5932493a19f4c81153d812ee4e042b49bbd3b759ab3d9344abecc2bc1485e5") + version("6.9p1", sha256="6e074df538f357d440be6cf93dc581a21f22d39e236f217fcd8eacbb6c896cfe") + version("6.8p1", sha256="3ff64ce73ee124480b5bf767b9830d7d3c03bbcb6abe716b78f0192c37ce160e") + version("6.7p1", sha256="b2f8394eae858dabbdef7dac10b99aec00c95462753e80342e530bbb6f725507") + version("6.6p1", sha256="48c1f0664b4534875038004cc4f3555b8329c2a81c1df48db5c517800de203bb") depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated From 57a1ebc77ee69a15902f3e65a8467d4937c3f194 Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Tue, 12 Nov 2024 18:20:48 +0000 Subject: [PATCH 192/208] xfsprogs: fix dependency on gettext (#47547) * xfsprogs: fix dependency on gettext * changed dependency on gettext in xfsprogs Co-authored-by: Wouter Deconinck --------- Co-authored-by: Wouter Deconinck --- var/spack/repos/builtin/packages/xfsprogs/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/xfsprogs/package.py b/var/spack/repos/builtin/packages/xfsprogs/package.py index c80002bf29933c..c25a95ed4c5c95 100644 --- a/var/spack/repos/builtin/packages/xfsprogs/package.py +++ b/var/spack/repos/builtin/packages/xfsprogs/package.py @@ -26,6 +26,7 @@ class Xfsprogs(AutotoolsPackage): depends_on("libinih") depends_on("gettext") + depends_on("gettext@:0.21.1", when="@:6.3") depends_on("uuid") depends_on("util-linux") @@ -33,7 +34,7 @@ def flag_handler(self, name, flags): if name == "cflags": if self.spec.satisfies("@:5.4.0 %gcc@10:"): flags.append("-fcommon") - elif name == "ldlibs": + elif name == "ldlibs" or name == "ldflags": if "intl" in self.spec["gettext"].libs.names: flags.append("-lintl") return build_system_flags(name, flags) From f6d6a5a480450c90b256c2f06104b057add8fa57 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Tue, 12 Nov 2024 13:31:57 -0500 Subject: [PATCH 193/208] parsec: update urls (#47416) * parsec: update urls * parsec: fix homepage --- .../repos/builtin/packages/parsec/package.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/var/spack/repos/builtin/packages/parsec/package.py b/var/spack/repos/builtin/packages/parsec/package.py index faba983eea22ae..e15068a15ea565 100644 --- a/var/spack/repos/builtin/packages/parsec/package.py +++ b/var/spack/repos/builtin/packages/parsec/package.py @@ -15,7 +15,7 @@ class Parsec(CMakePackage, CudaPackage): parallel execution of micro-tasks on distributed, heterogeneous systems. """ - homepage = "https://icl.utk.edu/dte" + homepage = "https://github.com/icldisco/parsec" git = "https://github.com/icldisco/parsec.git" url = "https://github.com/ICLDisco/parsec/archive/refs/tags/parsec-3.0.2012.tar.gz" list_url = "https://github.com/ICLDisco/parsec/tags" @@ -27,20 +27,12 @@ class Parsec(CMakePackage, CudaPackage): license("BSD-3-Clause-Open-MPI") version("master", branch="master") - version( - "3.0.2209", - sha256="67d383d076991484cb2a265f56420abdea7cc1f329c63ac65a3e96fbfb6cc295", - url="https://bitbucket.org/icldistcomp/parsec/get/parsec-3.0.2209.tar.bz2", - ) - version( - "3.0.2012", - sha256="f565bcfffe106be8237b6aea3e83a5770607b7236606414b6f270244fa6ec3bc", - url="https://bitbucket.org/icldistcomp/parsec/get/parsec-3.0.2012.tar.bz2", - ) + version("3.0.2209", sha256="67d383d076991484cb2a265f56420abdea7cc1f329c63ac65a3e96fbfb6cc295") + version("3.0.2012", sha256="7a8403ca67305738f3974cbc7a51b64c4ec353ae9170f2468262a9a52035eff6") version( "1.1.0", - sha256="d2928033c121000ae0a554f1e7f757c1f22274a8b74457ecd52744ae1f70b95a", - url="https://bitbucket.org/icldistcomp/parsec/get/v1.1.0.tar.bz2", + sha256="d1e038713f2c1cd7db6765c891408d85648c46ee23e780fbd5e941b53c9eef85", + url="https://github.com/ICLDisco/parsec/archive/refs/tags/v1.1.0.tar.gz", ) variant( From 751585f1e3f93a784f402fdc598949d5e63cba34 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Tue, 12 Nov 2024 11:07:34 -0800 Subject: [PATCH 194/208] glab: add v1.48.0 (#47552) --- var/spack/repos/builtin/packages/glab/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/glab/package.py b/var/spack/repos/builtin/packages/glab/package.py index 58ed827e1fd112..ed4eead79820c5 100644 --- a/var/spack/repos/builtin/packages/glab/package.py +++ b/var/spack/repos/builtin/packages/glab/package.py @@ -16,6 +16,7 @@ class Glab(GoPackage): license("MIT") + version("1.48.0", sha256="45410de23a7bad37feeae18f47f3c0113d81133ad9bb97c8f0b8afc5409272c7") version("1.46.1", sha256="935f732ddacc6e54fc83d06351fc25454ac8a58c465c3efa43e066ea226257c2") version("1.36.0", sha256="8d6c759ebfe9c6942fcdb7055a4a5c7209a3b22beb25947f906c9aef3bc067e8") version("1.35.0", sha256="7ed31c7a9b425fc15922f83c5dd8634a2758262a4f25f92583378655fcad6303") @@ -40,6 +41,7 @@ class Glab(GoPackage): depends_on("go@1.22.4:", type="build", when="@1.42:") depends_on("go@1.22.5:", type="build", when="@1.44:") depends_on("go@1.23:", type="build", when="@1.46:") + depends_on("go@1.23.2:", type="build", when="@1.48:") build_directory = "cmd/glab" From 3dadf569a4754c4254db7f411336e67dde52cbf4 Mon Sep 17 00:00:00 2001 From: Paul Gessinger Date: Tue, 12 Nov 2024 20:41:14 +0100 Subject: [PATCH 195/208] geomodel: Allow configuring C++ standard (#47422) * geomodel: Allow configuring C++ standard * drop c++11 --- var/spack/repos/builtin/packages/geomodel/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/geomodel/package.py b/var/spack/repos/builtin/packages/geomodel/package.py index 9490c569e8aa53..68d561a015a373 100644 --- a/var/spack/repos/builtin/packages/geomodel/package.py +++ b/var/spack/repos/builtin/packages/geomodel/package.py @@ -54,6 +54,14 @@ class Geomodel(CMakePackage): when="+fullsimlight", ) + variant( + "cxxstd", + default="17", + values=("17", "20", "23"), + multi=False, + description="Use the specified C++ standard when building", + ) + conflicts("+fullsimlight", when="+fsl", msg="FSL triggers the build of the FullSimLight") depends_on("cmake@3.16:", type="build") @@ -80,5 +88,6 @@ def cmake_args(self): self.define_from_variant("GEOMODEL_BUILD_FSL", "fsl"), self.define_from_variant("GEOMODEL_BUILD_EXAMPLES", "examples"), self.define_from_variant("GEOMODEL_BUILD_TOOLS", "tools"), + self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"), ] return args From 6d8fdbcf829138610fe8b78f757a1b4ca1058c00 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Tue, 12 Nov 2024 12:54:19 -0800 Subject: [PATCH 196/208] direnv: add v2.35.0 (#47551) --- var/spack/repos/builtin/packages/direnv/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/direnv/package.py b/var/spack/repos/builtin/packages/direnv/package.py index e091d1242f22d3..58c8807424f496 100644 --- a/var/spack/repos/builtin/packages/direnv/package.py +++ b/var/spack/repos/builtin/packages/direnv/package.py @@ -16,6 +16,7 @@ class Direnv(GoPackage): license("MIT") + version("2.35.0", sha256="a7aaec49d1b305f0745dad364af967fb3dc9bb5befc9f29d268d528b5a474e57") version("2.34.0", sha256="3d7067e71500e95d69eac86a271a6b6fc3f2f2817ba0e9a589524bf3e73e007c") version("2.33.0", sha256="8ef18051aa6bdcd6b59f04f02acdd0b78849b8ddbdbd372d4957af7889c903ea") version("2.32.3", sha256="c66f6d1000f28f919c6106b5dcdd0a0e54fb553602c63c60bf59d9bbdf8bd33c") From a02b40b670692cc7986a32d5a1de65e252b1791c Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Tue, 12 Nov 2024 13:15:53 -0800 Subject: [PATCH 197/208] restic: add v0.17.3 (#47553) --- var/spack/repos/builtin/packages/restic/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py index e6144806699c50..2c3489e98e3049 100644 --- a/var/spack/repos/builtin/packages/restic/package.py +++ b/var/spack/repos/builtin/packages/restic/package.py @@ -16,6 +16,7 @@ class Restic(GoPackage): license("BSD-2-Clause") + version("0.17.3", sha256="bf0dd73edfae531c24070e2e7833938613f7b179ed165e6b681098edfdf286c8") version("0.17.1", sha256="cba3a5759690d11dae4b5620c44f56be17a5688e32c9856776db8a9a93d6d59a") version("0.16.4", sha256="d736a57972bb7ee3398cf6b45f30e5455d51266f5305987534b45a4ef505f965") version("0.16.3", sha256="a94d6c1feb0034fcff3e8b4f2d65c0678f906fc21a1cf2d435341f69e7e7af52") From 1809b81e1d28e51e13846d1e981e8e49e2ea0c43 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 12 Nov 2024 14:04:47 -0800 Subject: [PATCH 198/208] parse_specs: special case for concretizing lookups quickly (#47556) We added unification semantics for parsing specs from the CLI, but there are a couple of special cases in which we can avoid calls to the concretizer for speed when the specs can all be resolved by lookups. - [x] special case 1: solving a single spec - [x] special case 2: all specs are either concrete (come from a file) or have an abstract hash. In this case if concretizer:unify:true we need an additional check to confirm the specs are compatible. - [x] add a parameterized test for unifying on the CI --------- Signed-off-by: Todd Gamblin Co-authored-by: Todd Gamblin --- lib/spack/spack/cmd/__init__.py | 38 +++++++ lib/spack/spack/spec.py | 6 +- lib/spack/spack/test/cmd/init_py_functions.py | 101 ++++++++++++++++++ lib/spack/spack/test/cmd/spec.py | 40 +++++++ 4 files changed, 182 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index c0efd5252153e7..9449face85a7fc 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -8,6 +8,7 @@ import os import re import sys +from collections import Counter from typing import List, Union import llnl.string @@ -189,6 +190,43 @@ def _concretize_spec_pairs(to_concretize, tests=False): rules from config.""" unify = spack.config.get("concretizer:unify", False) + # Special case for concretizing a single spec + if len(to_concretize) == 1: + abstract, concrete = to_concretize[0] + return [concrete or abstract.concretized()] + + # Special case if every spec is either concrete or has an abstract hash + if all( + concrete or abstract.concrete or abstract.abstract_hash + for abstract, concrete in to_concretize + ): + # Get all the concrete specs + ret = [ + concrete or (abstract if abstract.concrete else abstract.lookup_hash()) + for abstract, concrete in to_concretize + ] + + # If unify: true, check that specs don't conflict + # Since all concrete, "when_possible" is not relevant + if unify is True: # True, "when_possible", False are possible values + runtimes = spack.repo.PATH.packages_with_tags("runtime") + specs_per_name = Counter( + spec.name + for spec in traverse.traverse_nodes( + ret, deptype=("link", "run"), key=traverse.by_dag_hash + ) + if spec.name not in runtimes # runtimes are allowed multiple times + ) + + conflicts = sorted(name for name, count in specs_per_name.items() if count > 1) + if conflicts: + raise spack.error.SpecError( + "Specs conflict and `concretizer:unify` is configured true.", + f" specs depend on multiple versions of {', '.join(conflicts)}", + ) + return ret + + # Standard case concretize_method = spack.concretize.concretize_separately # unify: false if unify is True: concretize_method = spack.concretize.concretize_together diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 3d92879484f8a7..d87296a3fb7c79 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -59,7 +59,7 @@ import re import socket import warnings -from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union import archspec.cpu @@ -2828,7 +2828,7 @@ def ensure_no_deprecated(root): msg += " For each package listed, choose another spec\n" raise SpecDeprecatedError(msg) - def concretize(self, tests: Union[bool, List[str]] = False) -> None: + def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None: """Concretize the current spec. Args: @@ -2956,7 +2956,7 @@ def _finalize_concretization(self): for spec in self.traverse(): spec._cached_hash(ht.dag_hash) - def concretized(self, tests=False): + def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec": """This is a non-destructive version of concretize(). First clones, then returns a concrete version of this package diff --git a/lib/spack/spack/test/cmd/init_py_functions.py b/lib/spack/spack/test/cmd/init_py_functions.py index 4dc000edb9434d..deb6222411b725 100644 --- a/lib/spack/spack/test/cmd/init_py_functions.py +++ b/lib/spack/spack/test/cmd/init_py_functions.py @@ -4,10 +4,15 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import pytest +import spack.environment as ev +import spack.error +import spack.solver.asp as asp from spack.cmd import ( CommandNameError, PythonNameError, cmd_name, + matching_specs_from_env, + parse_specs, python_name, require_cmd_name, require_python_name, @@ -34,3 +39,99 @@ def test_require_cmd_name(): with pytest.raises(CommandNameError): require_cmd_name("okey_dokey") require_cmd_name(cmd_name("okey_dokey")) + + +@pytest.mark.parametrize( + "unify,spec_strs,error", + [ + # single spec + (True, ["zmpi"], None), + (False, ["mpileaks"], None), + # multiple specs, some from hash some from file + (True, ["zmpi", "mpileaks^zmpi", "libelf"], None), + (True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError), + (False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None), + ], +) +def test_special_cases_concretization_parse_specs( + unify, spec_strs, error, monkeypatch, mutable_config, mutable_database, tmpdir +): + """Test that special cases in parse_specs(concretize=True) bypass solver""" + + # monkeypatch to ensure we do not call the actual concretizer + def _fail(*args, **kwargs): + assert False + + monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail) + + spack.config.set("concretizer:unify", unify) + + args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs] + if len(args) > 1: + # We convert the last one to a specfile input + filename = tmpdir.join("spec.json") + spec = parse_specs(args[-1], concretize=True)[0] + with open(filename, "w") as f: + spec.to_json(f) + args[-1] = str(filename) + + if error: + with pytest.raises(error): + parse_specs(args, concretize=True) + else: + # assertion error from monkeypatch above if test fails + parse_specs(args, concretize=True) + + +@pytest.mark.parametrize( + "unify,spec_strs,error", + [ + # single spec + (True, ["zmpi"], None), + (False, ["mpileaks"], None), + # multiple specs, some from hash some from file + (True, ["zmpi", "mpileaks^zmpi", "libelf"], None), + (True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError), + (False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None), + ], +) +def test_special_cases_concretization_matching_specs_from_env( + unify, + spec_strs, + error, + monkeypatch, + mutable_config, + mutable_database, + tmpdir, + mutable_mock_env_path, +): + """Test that special cases in parse_specs(concretize=True) bypass solver""" + + # monkeypatch to ensure we do not call the actual concretizer + def _fail(*args, **kwargs): + assert False + + monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail) + + spack.config.set("concretizer:unify", unify) + + ev.create("test") + env = ev.read("test") + + args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs] + if len(args) > 1: + # We convert the last one to a specfile input + filename = tmpdir.join("spec.json") + spec = parse_specs(args[-1], concretize=True)[0] + with open(filename, "w") as f: + spec.to_json(f) + args[-1] = str(filename) + + with env: + specs = parse_specs(args, concretize=False) + if error: + with pytest.raises(error): + matching_specs_from_env(specs) + else: + # assertion error from monkeypatch above if test fails + matching_specs_from_env(specs) diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py index a57c40ec926823..1d0d08f494b595 100644 --- a/lib/spack/spack/test/cmd/spec.py +++ b/lib/spack/spack/test/cmd/spec.py @@ -179,3 +179,43 @@ def test_spec_version_assigned_git_ref_as_version(name, version, error): else: output = spec(name + "@" + version) assert version in output + + +@pytest.mark.parametrize( + "unify, spec_hash_args, match, error", + [ + # success cases with unfiy:true + (True, ["mpileaks_mpich"], "mpich", None), + (True, ["mpileaks_zmpi"], "zmpi", None), + (True, ["mpileaks_mpich", "dyninst"], "mpich", None), + (True, ["mpileaks_zmpi", "dyninst"], "zmpi", None), + # same success cases with unfiy:false + (False, ["mpileaks_mpich"], "mpich", None), + (False, ["mpileaks_zmpi"], "zmpi", None), + (False, ["mpileaks_mpich", "dyninst"], "mpich", None), + (False, ["mpileaks_zmpi", "dyninst"], "zmpi", None), + # cases with unfiy:false + (True, ["mpileaks_mpich", "mpileaks_zmpi"], "callpath, mpileaks", spack.error.SpecError), + (False, ["mpileaks_mpich", "mpileaks_zmpi"], "zmpi", None), + ], +) +def test_spec_unification_from_cli( + install_mockery, mutable_config, mutable_database, unify, spec_hash_args, match, error +): + """Ensure specs grouped together on the CLI are concretized together when unify:true.""" + spack.config.set("concretizer:unify", unify) + + db = spack.store.STORE.db + spec_lookup = { + "mpileaks_mpich": db.query_one("mpileaks ^mpich").dag_hash(), + "mpileaks_zmpi": db.query_one("mpileaks ^zmpi").dag_hash(), + "dyninst": db.query_one("dyninst").dag_hash(), + } + + hashes = [f"/{spec_lookup[name]}" for name in spec_hash_args] + if error: + with pytest.raises(error, match=match): + output = spec(*hashes) + else: + output = spec(*hashes) + assert match in output From a76e3f203025c1ab9ea12447d2852db1fd7eb326 Mon Sep 17 00:00:00 2001 From: SXS Bot <31972027+sxs-bot@users.noreply.github.com> Date: Tue, 12 Nov 2024 17:16:27 -0500 Subject: [PATCH 199/208] spectre: add v2024.03.19 (#43275) Co-authored-by: sxs-bot --- var/spack/repos/builtin/packages/spectre/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/spectre/package.py b/var/spack/repos/builtin/packages/spectre/package.py index 40282f7d5a4308..3d56114c420044 100644 --- a/var/spack/repos/builtin/packages/spectre/package.py +++ b/var/spack/repos/builtin/packages/spectre/package.py @@ -31,6 +31,9 @@ class Spectre(CMakePackage): license("MIT") version("develop", branch="develop") + version( + "2024.03.19", sha256="42a25c8827b56268d9826239cde521491be19318d83785b35cd0265a9f6a1f7c" + ) version( "2024.09.29", sha256="b5e84b4564ad7cd2e069a24c6c472aab342753fe8393242eceba378b52226acb" ) From ad518d975c711c04bdc013363d8fc33a212e9194 Mon Sep 17 00:00:00 2001 From: v <39996356+vhewes@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:34:11 -0600 Subject: [PATCH 200/208] py-nugraph, ph5concat, py-numl: Add new nugraph packages (#47315) --- .../builtin/packages/ph5concat/package.py | 35 ++++++++++++++++ .../builtin/packages/py-nugraph/package.py | 34 ++++++++++++++++ .../repos/builtin/packages/py-numl/package.py | 40 +++++++++++++++++++ 3 files changed, 109 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ph5concat/package.py create mode 100644 var/spack/repos/builtin/packages/py-nugraph/package.py create mode 100644 var/spack/repos/builtin/packages/py-numl/package.py diff --git a/var/spack/repos/builtin/packages/ph5concat/package.py b/var/spack/repos/builtin/packages/ph5concat/package.py new file mode 100644 index 00000000000000..949d86d24b66d9 --- /dev/null +++ b/var/spack/repos/builtin/packages/ph5concat/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Ph5concat(AutotoolsPackage): + """Parallel Data Concatenation for High Energy Physics Data Analysis""" + + homepage = "https://github.com/NU-CUCIS/ph5concat" + url = "https://github.com/NU-CUCIS/ph5concat/archive/v1.1.0.tar.gz" + + maintainers("vhewes") + + version("1.1.0", sha256="cecc22325a56771cda1fc186e6bd1f9bde2957beca3fa9a387d55462efd5254f") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + + depends_on("zlib") + depends_on("hdf5+hl+mpi@1.10.4:1.12") + depends_on("mpich") + + variant("profiling", default=False, description="Enable profiling support") + + def setup_build_environment(self, env): + env.set("LIBS", "-ldl -lz") + + def configure_args(self): + args = [f"--with-{pkg}={self.spec[pkg].prefix}" for pkg in ("hdf5", "mpich")] + args.extend(self.enable_or_disable("profiling")) + return args diff --git a/var/spack/repos/builtin/packages/py-nugraph/package.py b/var/spack/repos/builtin/packages/py-nugraph/package.py new file mode 100644 index 00000000000000..83b14447a26412 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-nugraph/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNugraph(PythonPackage): + """Graph Neural Network for neutrino physics event reconstruction""" + + pypi = "nugraph/nugraph-24.7.1.tar.gz" + + maintainers("vhewes") + + license("MIT", checked_by="vhewes") + + version("24.7.1", sha256="e1449e4a37049cc774ad026d4f2db339eb60bb59109a11920bb65a4061915de8") + version("24.7.0", sha256="b95d93a1cbcd280a3529ce4782ef778b982d9d4edcc19f522442c38144895f65") + version("24.4.0", sha256="5f888d065819b1ec7c33e7f829ad65eb963db2cf109a5d31b4caef49c004f86f") + version("24.2.0", sha256="4765ea73b384e95a38a598499e77d805541e415049da9f6f46193f8bc281208a") + version("23.11.1", sha256="b160996fca9615b2c7e6ed02fb780af5edaa97f6cdafd45abdf65ea0c7a6f2ca") + version("23.11.0", sha256="a1e01a8c3143fc8db2cf8a3584d192a738d89eb865b1d52cd2994b24bd4175ec") + version("23.10.0", sha256="8a0219318c6bd6d0d240e419ef88cdedd7e944276f0cce430d9ece423e06f1b8") + + depends_on("py-flit-core", type="build") + + depends_on("py-matplotlib") + depends_on("py-numl") + depends_on("py-pynvml") + depends_on("py-seaborn") + depends_on("py-pytorch-lightning") + + extends("python") diff --git a/var/spack/repos/builtin/packages/py-numl/package.py b/var/spack/repos/builtin/packages/py-numl/package.py new file mode 100644 index 00000000000000..1c61a9d53eb0b1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-numl/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNuml(PythonPackage): + """Standardised ML input processing for particle physics""" + + pypi = "pynuml/pynuml-24.7.1.tar.gz" + + maintainers("vhewes") + + license("MIT", checked_by="vhewes") + + version("24.7.1", sha256="20d2f1a07887473e67c79ecc3804b8012e22b78883199fdb0d07bb1b725b6ab0") + version("24.7.0", sha256="d47f71ead6861278595b79d04c554da4998d5c4c50587e4c90231f50db0f2e81") + version("24.6.0", sha256="357d2b0e0b9ca179514d177278620e5ac57bed37bfb6d145c172150126432613") + version("23.11.0", sha256="1a7e61864cfeb0b27c6a93646c33e3f457bbc384eb86aee4df76b5e02898d02f") + version("23.9.0", sha256="77ea8c9df541351adeb249594cce27d742973ee82a0d7f2ad8cdcffa9d3fa6b1") + version("23.8.0", sha256="0896797f3f70b3a6d3d74f7a3e7fe5eaf59a2000a47ffc7ac08b73be0aa15706") + version("23.7.0", sha256="5449dd09a7e046d036e12c7971e61d2862cdb79c7932144b038288fc05ca50a8") + version("23.6.1", sha256="fdb23a9d4f1b83b06cc35b07608fe4c2e55f8307ac47851cccc21a20b69ab674") + version("23.6.0", sha256="fcc1546b9489584f2635f6418c5e1a43f6bdf02dd5c46b7afa09ea5f247524a2") + version("23.5.2", sha256="d83576c8e25e22cc9ba68a35b9690ea861f7a4c09db65ca134849c89fba9b330") + version("23.5.1", sha256="73ef1bea1022b9ebddec35ac7d66c1394003aa5e63a4ec99bfa14d4f833e04a4") + version("23.5.0", sha256="dccb774932813ddc788b1d27e52e251d9db6ea16b303596bfa0955ae51098674") + + depends_on("py-flit-core", type="build") + + depends_on("mpich") + depends_on("py-h5py +mpi") + depends_on("py-pandas") + depends_on("py-particle") + depends_on("py-plotly") + depends_on("py-torch-geometric") + + extends("python") From bf16f0bf7424355976470cd4ae8813b9960b16fa Mon Sep 17 00:00:00 2001 From: John Gouwar Date: Tue, 12 Nov 2024 23:51:19 -0500 Subject: [PATCH 201/208] Add solver capability for synthesizing splices of ABI compatible packages. (#46729) This PR provides complementary 2 features: 1. An augmentation to the package language to express ABI compatibility relationships among packages. 2. An extension to the concretizer that can synthesize splices between ABI compatible packages. 1. The `can_splice` directive and ABI compatibility We augment the package language with a single directive: `can_splice`. Here is an example of a package `Foo` exercising the `can_splice` directive: class Foo(Package): version("1.0") version("1.1") variant("compat", default=True) variant("json", default=False) variant("pic", default=False) can_splice("foo@1.0", when="@1.1") can_splice("bar@1.0", when="@1.0+compat") can_splice("baz@1.0+compat", when="@1.0+compat", match_variants="*") can_splice("quux@1.0", when=@1.1~compat", match_variants="json") Explanations of the uses of each directive: - `can_splice("foo@1.0", when="@1.1")`: If `foo@1.0` is the dependency of an already installed spec and `foo@1.1` could be a valid dependency for the parent spec, then `foo@1.1` can be spliced in for `foo@1.0` in the parent spec. - `can_splice("bar@1.0", when="@1.0+compat")`: If `bar@1.0` is the dependency of an already installed spec and `foo@1.0+compat` could be a valid dependency for the parent spec, then `foo@1.0+compat` can be spliced in for `bar@1.0+compat` in the parent spec - `can_splice("baz@1.0", when="@1.0+compat", match_variants="*")`: If `baz@1.0+compat` is the dependency of an already installed spec and `foo@1.0+compat` could be a valid dependency for the parent spec, then `foo@1.0+compat` can be spliced in for `baz@1.0+compat` in the parent spec, provided that they have the same value for all other variants (regardless of what those values are). - `can_splice("quux@1.0", when=@1.1~compat", match_variants="json")`:If `quux@1.0` is the dependency of an already installed spec and `foo@1.1~compat` could be a valid dependency for the parent spec, then `foo@1.0~compat` can be spliced in for `quux@1.0` in the parent spec, provided that they have the same value for their `json` variant. 2. Augmenting the solver to synthesize splices ### Changes to the hash encoding in `asp.py` Previously, when including concrete specs in the solve, they would have the following form: installed_hash("foo", "xxxyyy") imposed_constraint("xxxyyy", "foo", "attr1", ...) imposed_constraint("xxxyyy", "foo", "attr2", ...) % etc. Concrete specs now have the following form: installed_hash("foo", "xxxyyy") hash_attr("xxxyyy", "foo", "attr1", ...) hash_attr("xxxyyy", "foo", "attr2", ...) This transformation allows us to control which constraints are imposed when we select a hash, to facilitate the splicing of dependencies. 2.1 Compiling `can_splice` directives in `asp.py` Consider the concrete spec: foo@2.72%gcc@11.4 arch=linux-ubuntu22.04-icelake build_system=autotools ^bar ... It will emit the following facts for reuse (below is a subset) installed_hash("foo", "xxxyyy") hash_attr("xxxyyy", "hash", "foo", "xxxyyy") hash_attr("xxxyyy", "version", "foo", "2.72") hash_attr("xxxyyy", "node_os", "ubuntu22.04") hash_attr("xxxyyy", "hash", "bar", "zzzqqq") hash_attr("xxxyyy", "depends_on", "foo", "bar", "link") Rules that derive abi_splice_conditions_hold will be generated from use of the `can_splice` directive. They will have the following form: can_splice("foo@1.0.0+a", when="@1.0.1+a", match_variants=["b"]) ---> abi_splice_conditions_hold(0, node(SID, "foo"), "foo", BaseHash) :- installed_hash("foo", BaseHash), attr("node", node(SID, SpliceName)), attr("node_version_satisfies", node(SID, "foo"), "1.0.1"), hash_attr("hash", "node_version_satisfies", "foo", "1.0.1"), attr("variant_value", node(SID, "foo"), "a", "True"), hash_attr("hash", "variant_value", "foo", "a", "True"), attr("variant_value", node(SID, "foo"), "b", VariVar0), hash_attr("hash", "variant_value", "foo", "b", VariVar0). 2.2 Synthesizing splices in `concretize.lp` and `splices.lp` The ASP solver generates "splice_at_hash" attrs to indicate that a particular node has a splice in one of its immediate dependencies. Splices can be introduced in the dependencies of concrete specs when `splices.lp` is conditionally loaded (based on the config option `concretizer:splice:True`. 2.3 Constructing spliced specs in `asp.py` The method `SpecBuilder._resolve_splices` implements a top-down memoized implementation of hybrid splicing. This is an optimization over the more general `Spec.splice`, since the solver gives a global view of exactly which specs can be shared, to ensure the minimal number of splicing operations. Misc changes to facilitate configuration and benchmarking - Added the method `Solver.solve_with_stats` to expose timers from the public interface for easier benchmarking - Added the boolean config option `concretizer:splice` to conditionally load splicing behavior Co-authored-by: Greg Becker --- etc/spack/defaults/concretizer.yaml | 10 +- lib/spack/docs/build_settings.rst | 32 +++ lib/spack/docs/packaging_guide.rst | 52 +++- lib/spack/spack/directives.py | 38 +++ lib/spack/spack/package_base.py | 1 + lib/spack/spack/schema/concretizer.py | 3 +- lib/spack/spack/solver/asp.py | 202 ++++++++++++--- lib/spack/spack/solver/concretize.lp | 71 +++++- lib/spack/spack/solver/core.py | 13 + lib/spack/spack/solver/display.lp | 1 - lib/spack/spack/solver/splices.lp | 56 +++++ lib/spack/spack/spec.py | 24 +- lib/spack/spack/test/abi_splicing.py | 234 ++++++++++++++++++ lib/spack/spack/test/cmd/pkg.py | 14 +- lib/spack/spack/test/spec_semantics.py | 4 +- .../depends-on-manyvariants/package.py | 25 ++ .../depends-on-virtual-with-abi/package.py | 19 ++ .../packages/manyvariants/package.py | 33 +++ .../builtin.mock/packages/splice-h/package.py | 9 +- .../builtin.mock/packages/splice-z/package.py | 8 +- .../packages/virtual-abi-1/package.py | 25 ++ .../packages/virtual-abi-2/package.py | 25 ++ .../packages/virtual-abi-multi/package.py | 29 +++ .../packages/virtual-with-abi/package.py | 16 ++ 24 files changed, 885 insertions(+), 59 deletions(-) create mode 100644 lib/spack/spack/solver/splices.lp create mode 100644 lib/spack/spack/test/abi_splicing.py create mode 100644 var/spack/repos/builtin.mock/packages/depends-on-manyvariants/package.py create mode 100644 var/spack/repos/builtin.mock/packages/depends-on-virtual-with-abi/package.py create mode 100644 var/spack/repos/builtin.mock/packages/manyvariants/package.py create mode 100644 var/spack/repos/builtin.mock/packages/virtual-abi-1/package.py create mode 100644 var/spack/repos/builtin.mock/packages/virtual-abi-2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/virtual-abi-multi/package.py create mode 100644 var/spack/repos/builtin.mock/packages/virtual-with-abi/package.py diff --git a/etc/spack/defaults/concretizer.yaml b/etc/spack/defaults/concretizer.yaml index fef46967a84a58..8cce19ebab9317 100644 --- a/etc/spack/defaults/concretizer.yaml +++ b/etc/spack/defaults/concretizer.yaml @@ -39,7 +39,8 @@ concretizer: # Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG. duplicates: # "none": allows a single node for any package in the DAG. - # "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.) + # "minimal": allows the duplication of 'build-tools' nodes only + # (e.g. py-setuptools, cmake etc.) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) strategy: minimal # Option to specify compatibility between operating systems for reuse of compilers and packages @@ -47,3 +48,10 @@ concretizer: # it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's # requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]} os_compatible: {} + + # Option to specify whether to support splicing. Splicing allows for + # the relinking of concrete package dependencies in order to better + # reuse already built packages with ABI compatible dependencies + splice: + explicit: [] + automatic: false diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 97c81bf17a40ea..bdad8c8a51464c 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -237,3 +237,35 @@ is optional -- by default, splices will be transitive. ``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack will warn the user in this case, but will not fail the concretization. + +.. _automatic_splicing: + +^^^^^^^^^^^^^^^^^^ +Automatic Splicing +^^^^^^^^^^^^^^^^^^ + +The Spack solver can be configured to do automatic splicing for +ABI-compatible packages. Automatic splices are enabled in the concretizer +config section + +.. code-block:: yaml + + concretizer: + splice: + automatic: True + +Packages can include ABI-compatibility information using the +``can_splice`` directive. See :ref:`the packaging +guide` for instructions on specifying ABI +compatibility using the ``can_splice`` directive. + +.. note:: + + The ``can_splice`` directive is experimental and may be changed in + future versions. + +When automatic splicing is enabled, the concretizer will combine any +number of ABI-compatible specs if possible to reuse installed packages +and packages available from binary caches. The end result of these +specs is equivalent to a series of transitive/intransitive splices, +but the series may be non-obvious. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index d9a37175b6c72d..87fb184c656d1f 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``: This feature requires ``git`` to be version ``2.25.0`` or later but is useful for large repositories that have separate portions that can be built independently. If paths provided are directories then all the subdirectories and associated files - will also be cloned. + will also be cloned. Only one of ``tag``, ``branch``, or ``commit`` can be used at a time. @@ -1367,8 +1367,8 @@ Submodules git-submodule``. Sparse-Checkout - You can supply ``git_sparse_paths`` at the package or version level to utilize git's - sparse-checkout feature. This will only clone the paths that are specified in the + You can supply ``git_sparse_paths`` at the package or version level to utilize git's + sparse-checkout feature. This will only clone the paths that are specified in the ``git_sparse_paths`` attribute for the package along with the files in the top level directory. This feature allows you to only clone what you need from a large repository. Note that this is a newer feature in git and requries git ``2.25.0`` or greater. @@ -2392,7 +2392,7 @@ by the ``--jobs`` option: .. code-block:: python :emphasize-lines: 7, 11 :linenos: - + class Xios(Package): ... def install(self, spec, prefix): @@ -5420,7 +5420,7 @@ by build recipes. Examples of checking :ref:`variant settings ` and determine whether it needs to also set up build dependencies (see :ref:`test-build-tests`). -The ``MyPackage`` package below provides two basic test examples: +The ``MyPackage`` package below provides two basic test examples: ``test_example`` and ``test_example2``. The first runs the installed ``example`` and ensures its output contains an expected string. The second runs ``example2`` without checking output so is only concerned with confirming @@ -5737,7 +5737,7 @@ subdirectory of the installation prefix. They are automatically copied to the appropriate relative paths under the test stage directory prior to executing stand-alone tests. -.. tip:: +.. tip:: *Perform test-related conversions once when copying files.* @@ -7113,6 +7113,46 @@ might write: CXXFLAGS += -I$DWARF_PREFIX/include CXXFLAGS += -L$DWARF_PREFIX/lib +.. _abi_compatibility: + +---------------------------- +Specifying ABI Compatibility +---------------------------- + +Packages can include ABI-compatibility information using the +``can_splice`` directive. For example, if ``Foo`` version 1.1 can +always replace version 1.0, then the package could have: + +.. code-block:: python + + can_splice("foo@1.0", when="@1.1") + +For virtual packages, packages can also specify ABI-compabitiliby with +other packages providing the same virtual. For example, ``zlib-ng`` +could specify: + +.. code-block:: python + + can_splice("zlib@1.3.1", when="@2.2+compat") + +Some packages have ABI-compatibility that is dependent on matching +variant values, either for all variants or for some set of +ABI-relevant variants. In those cases, it is not necessary to specify +the full combinatorial explosion. The ``match_variants`` keyword can +cover all single-value variants. + +.. code-block:: python + + can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same + can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match + +The concretizer will use ABI compatibility to determine automatic +splices when :ref:`automatic splicing` is enabled. + +.. note:: + + The ``can_splice`` directive is experimental, and may be replaced + by a higher-level interface in future versions of Spack. .. _package_class_structure: diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 0d6b66780c8921..0e3bb522cefe8b 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -77,6 +77,7 @@ class OpenMpi(Package): "build_system", "requires", "redistribute", + "can_splice", ] _patch_order_index = 0 @@ -505,6 +506,43 @@ def _execute_provides(pkg: "spack.package_base.PackageBase"): return _execute_provides +@directive("splice_specs") +def can_splice( + target: SpecType, *, when: SpecType, match_variants: Union[None, str, List[str]] = None +): + """Packages can declare whether they are ABI-compatible with another package + and thus can be spliced into concrete versions of that package. + + Args: + target: The spec that the current package is ABI-compatible with. + + when: An anonymous spec constraining current package for when it is + ABI-compatible with target. + + match_variants: A list of variants that must match + between target spec and current package, with special value '*' + which matches all variants. Example: a variant is defined on both + packages called json, and they are ABI-compatible whenever they agree on + the json variant (regardless of whether it is turned on or off). Note + that this cannot be applied to multi-valued variants and multi-valued + variants will be skipped by '*'. + """ + + def _execute_can_splice(pkg: "spack.package_base.PackageBase"): + when_spec = _make_when_spec(when) + if isinstance(match_variants, str) and match_variants != "*": + raise ValueError( + "* is the only valid string for match_variants " + "if looking to provide a single variant, use " + f"[{match_variants}] instead" + ) + if when_spec is None: + return + pkg.splice_specs[when_spec] = (spack.spec.Spec(target), match_variants) + + return _execute_can_splice + + @directive("patches") def patch( url_or_filename: str, diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index ef2f27cca63ab2..943c4eb0a4f666 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -622,6 +622,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]] variants: Dict["spack.spec.Spec", Dict[str, "spack.variant.Variant"]] languages: Dict["spack.spec.Spec", Set[str]] + splice_specs: Dict["spack.spec.Spec", Tuple["spack.spec.Spec", Union[None, str, List[str]]]] #: By default, packages are not virtual #: Virtual packages override this attribute diff --git a/lib/spack/spack/schema/concretizer.py b/lib/spack/spack/schema/concretizer.py index 86e58de2580fe6..4fba79fece55ed 100644 --- a/lib/spack/spack/schema/concretizer.py +++ b/lib/spack/spack/schema/concretizer.py @@ -78,7 +78,8 @@ "transitive": {"type": "boolean", "default": False}, }, }, - } + }, + "automatic": {"type": "boolean"}, }, }, "duplicates": { diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 24b7aeb4ff17a5..32db03f5cf906b 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -52,6 +52,7 @@ from .core import ( AspFunction, + AspVar, NodeArgument, ast_sym, ast_type, @@ -524,12 +525,14 @@ def _compute_specs_from_answer_set(self): node = SpecBuilder.make_node(pkg=providers[0]) candidate = answer.get(node) - if candidate and candidate.build_spec.satisfies(input_spec): - if not candidate.satisfies(input_spec): - tty.warn( - "explicit splice configuration has caused the concretized spec" - f" {candidate} not to satisfy the input spec {input_spec}" - ) + if candidate and candidate.satisfies(input_spec): + self._concrete_specs.append(answer[node]) + self._concrete_specs_by_input[input_spec] = answer[node] + elif candidate and candidate.build_spec.satisfies(input_spec): + tty.warn( + "explicit splice configuration has caused the concretized spec" + f" {candidate} not to satisfy the input spec {input_spec}" + ) self._concrete_specs.append(answer[node]) self._concrete_specs_by_input[input_spec] = answer[node] else: @@ -854,6 +857,8 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre self.control.load(os.path.join(parent_dir, "libc_compatibility.lp")) else: self.control.load(os.path.join(parent_dir, "os_compatibility.lp")) + if setup.enable_splicing: + self.control.load(os.path.join(parent_dir, "splices.lp")) timer.stop("load") @@ -1166,6 +1171,9 @@ def __init__(self, tests: bool = False): # list of unique libc specs targeted by compilers (or an educated guess if no compiler) self.libcs: List[spack.spec.Spec] = [] + # If true, we have to load the code for synthesizing splices + self.enable_splicing: bool = spack.config.CONFIG.get("concretizer:splice:automatic") + def pkg_version_rules(self, pkg): """Output declared versions of a package. @@ -1336,6 +1344,10 @@ def pkg_rules(self, pkg, tests): # dependencies self.package_dependencies_rules(pkg) + # splices + if self.enable_splicing: + self.package_splice_rules(pkg) + # virtual preferences self.virtual_preferences( pkg.name, @@ -1674,6 +1686,94 @@ def dependency_holds(input_spec, requirements): self.gen.newline() + def _gen_match_variant_splice_constraints( + self, + pkg, + cond_spec: "spack.spec.Spec", + splice_spec: "spack.spec.Spec", + hash_asp_var: "AspVar", + splice_node, + match_variants: List[str], + ): + # If there are no variants to match, no constraints are needed + variant_constraints = [] + for i, variant_name in enumerate(match_variants): + vari_defs = pkg.variant_definitions(variant_name) + # the spliceable config of the package always includes the variant + if vari_defs != [] and any(cond_spec.satisfies(s) for (s, _) in vari_defs): + variant = vari_defs[0][1] + if variant.multi: + continue # cannot automatically match multi-valued variants + value_var = AspVar(f"VariValue{i}") + attr_constraint = fn.attr("variant_value", splice_node, variant_name, value_var) + hash_attr_constraint = fn.hash_attr( + hash_asp_var, "variant_value", splice_spec.name, variant_name, value_var + ) + variant_constraints.append(attr_constraint) + variant_constraints.append(hash_attr_constraint) + return variant_constraints + + def package_splice_rules(self, pkg): + self.gen.h2("Splice rules") + for i, (cond, (spec_to_splice, match_variants)) in enumerate( + sorted(pkg.splice_specs.items()) + ): + with named_spec(cond, pkg.name): + self.version_constraints.add((cond.name, cond.versions)) + self.version_constraints.add((spec_to_splice.name, spec_to_splice.versions)) + hash_var = AspVar("Hash") + splice_node = fn.node(AspVar("NID"), cond.name) + when_spec_attrs = [ + fn.attr(c.args[0], splice_node, *(c.args[2:])) + for c in self.spec_clauses(cond, body=True, required_from=None) + if c.args[0] != "node" + ] + splice_spec_hash_attrs = [ + fn.hash_attr(hash_var, *(c.args)) + for c in self.spec_clauses(spec_to_splice, body=True, required_from=None) + if c.args[0] != "node" + ] + if match_variants is None: + variant_constraints = [] + elif match_variants == "*": + filt_match_variants = set() + for map in pkg.variants.values(): + for k in map: + filt_match_variants.add(k) + filt_match_variants = list(filt_match_variants) + variant_constraints = self._gen_match_variant_splice_constraints( + pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants + ) + else: + if any( + v in cond.variants or v in spec_to_splice.variants for v in match_variants + ): + raise Exception( + "Overlap between match_variants and explicitly set variants" + ) + variant_constraints = self._gen_match_variant_splice_constraints( + pkg, cond, spec_to_splice, hash_var, splice_node, match_variants + ) + + rule_head = fn.abi_splice_conditions_hold( + i, splice_node, spec_to_splice.name, hash_var + ) + rule_body_components = ( + [ + # splice_set_fact, + fn.attr("node", splice_node), + fn.installed_hash(spec_to_splice.name, hash_var), + ] + + when_spec_attrs + + splice_spec_hash_attrs + + variant_constraints + ) + rule_body = ",\n ".join(str(r) for r in rule_body_components) + rule = f"{rule_head} :-\n {rule_body}." + self.gen.append(rule) + + self.gen.newline() + def virtual_preferences(self, pkg_name, func): """Call func(vspec, provider, i) for each of pkg's provider prefs.""" config = spack.config.get("packages") @@ -2536,8 +2636,9 @@ def concrete_specs(self): for h, spec in self.reusable_and_possible.explicit_items(): # this indicates that there is a spec like this installed self.gen.fact(fn.installed_hash(spec.name, h)) - # this describes what constraints it imposes on the solve - self.impose(h, spec, body=True) + # indirection layer between hash constraints and imposition to allow for splicing + for pred in self.spec_clauses(spec, body=True, required_from=None): + self.gen.fact(fn.hash_attr(h, *pred.args)) self.gen.newline() # Declare as possible parts of specs that are not in package.py # - Add versions to possible versions @@ -3478,6 +3579,14 @@ def consume_facts(self): self._setup.effect_rules() +# This should be a dataclass, but dataclasses don't work on Python 3.6 +class Splice: + def __init__(self, splice_node: NodeArgument, child_name: str, child_hash: str): + self.splice_node = splice_node + self.child_name = child_name + self.child_hash = child_hash + + class SpecBuilder: """Class with actions to rebuild a spec from ASP results.""" @@ -3513,10 +3622,11 @@ def make_node(*, pkg: str) -> NodeArgument: """ return NodeArgument(id="0", pkg=pkg) - def __init__( - self, specs: List[spack.spec.Spec], *, hash_lookup: Optional[ConcreteSpecsByHash] = None - ): + def __init__(self, specs, hash_lookup=None): self._specs: Dict[NodeArgument, spack.spec.Spec] = {} + + # Matches parent nodes to splice node + self._splices: Dict[NodeArgument, List[Splice]] = {} self._result = None self._command_line_specs = specs self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict( @@ -3600,16 +3710,8 @@ def external_spec_selected(self, node, idx): def depends_on(self, parent_node, dependency_node, type): dependency_spec = self._specs[dependency_node] - edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name) - edges = [x for x in edges if id(x.spec) == id(dependency_spec)] depflag = dt.flag_from_string(type) - - if not edges: - self._specs[parent_node].add_dependency_edge( - self._specs[dependency_node], depflag=depflag, virtuals=() - ) - else: - edges[0].update_deptypes(depflag=depflag) + self._specs[parent_node].add_dependency_edge(dependency_spec, depflag=depflag, virtuals=()) def virtual_on_edge(self, parent_node, provider_node, virtual): dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg)) @@ -3726,6 +3828,48 @@ def _order_index(flag_group): def deprecated(self, node: NodeArgument, version: str) -> None: tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version') + def splice_at_hash( + self, + parent_node: NodeArgument, + splice_node: NodeArgument, + child_name: str, + child_hash: str, + ): + splice = Splice(splice_node, child_name=child_name, child_hash=child_hash) + self._splices.setdefault(parent_node, []).append(splice) + + def _resolve_automatic_splices(self): + """After all of the specs have been concretized, apply all immediate + splices in size order. This ensures that all dependencies are resolved + before their parents, allowing for maximal sharing and minimal copying. + """ + fixed_specs = {} + for node, spec in sorted(self._specs.items(), key=lambda x: len(x[1])): + immediate = self._splices.get(node, []) + if not immediate and not any( + edge.spec in fixed_specs for edge in spec.edges_to_dependencies() + ): + continue + new_spec = spec.copy(deps=False) + new_spec.build_spec = spec + for edge in spec.edges_to_dependencies(): + depflag = edge.depflag & ~dt.BUILD + if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate): + splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0] + new_spec.add_dependency_edge( + self._specs[splice.splice_node], depflag=depflag, virtuals=edge.virtuals + ) + elif edge.spec in fixed_specs: + new_spec.add_dependency_edge( + fixed_specs[edge.spec], depflag=depflag, virtuals=edge.virtuals + ) + else: + new_spec.add_dependency_edge( + edge.spec, depflag=depflag, virtuals=edge.virtuals + ) + self._specs[node] = new_spec + fixed_specs[spec] = new_spec + @staticmethod def sort_fn(function_tuple) -> Tuple[int, int]: """Ensure attributes are evaluated in the correct order. @@ -3755,7 +3899,6 @@ def build_specs(self, function_tuples): # them here so that directives that build objects (like node and # node_compiler) are called in the right order. self.function_tuples = sorted(set(function_tuples), key=self.sort_fn) - self._specs = {} for name, args in self.function_tuples: if SpecBuilder.ignored_attributes.match(name): @@ -3785,10 +3928,14 @@ def build_specs(self, function_tuples): continue # if we've already gotten a concrete spec for this pkg, - # do not bother calling actions on it + # do not bother calling actions on it except for node_flag_source, + # since node_flag_source is tracking information not in the spec itself + # we also need to keep track of splicing information. spec = self._specs.get(args[0]) if spec and spec.concrete: - continue + do_not_ignore_attrs = ["node_flag_source", "splice_at_hash"] + if name not in do_not_ignore_attrs: + continue action(*args) @@ -3798,7 +3945,7 @@ def build_specs(self, function_tuples): # inject patches -- note that we' can't use set() to unique the # roots here, because the specs aren't complete, and the hash # function will loop forever. - roots = [spec.root for spec in self._specs.values() if not spec.root.installed] + roots = [spec.root for spec in self._specs.values()] roots = dict((id(r), r) for r in roots) for root in roots.values(): spack.spec.Spec.inject_patches_variant(root) @@ -3814,6 +3961,8 @@ def build_specs(self, function_tuples): for root in roots.values(): root._finalize_concretization() + self._resolve_automatic_splices() + for s in self._specs.values(): spack.spec.Spec.ensure_no_deprecated(s) @@ -3828,7 +3977,6 @@ def build_specs(self, function_tuples): ) specs = self.execute_explicit_splices() - return specs def execute_explicit_splices(self): @@ -4165,7 +4313,6 @@ def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]: result = [] for reuse_source in self.reuse_sources: result.extend(reuse_source.selected_specs()) - # If we only want to reuse dependencies, remove the root specs if self.reuse_strategy == ReuseStrategy.DEPENDENCIES: result = [spec for spec in result if not any(root in spec for root in specs)] @@ -4335,11 +4482,10 @@ def __init__(self, provided, conflicts): super().__init__(msg) - self.provided = provided - # Add attribute expected of the superclass interface self.required = None self.constraint_type = None + self.provided = provided class InvalidSpliceError(spack.error.SpackError): diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index f4695be9b90fbf..63a5a711758120 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1449,25 +1449,71 @@ attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, N %----------------------------------------------------------------------------- -% Installed packages +% Installed Packages %----------------------------------------------------------------------------- -% the solver is free to choose at most one installed hash for each package -{ attr("hash", node(ID, Package), Hash) : installed_hash(Package, Hash) } 1 - :- attr("node", node(ID, Package)), internal_error("Package must resolve to at most one hash"). +#defined installed_hash/2. +#defined abi_splice_conditions_hold/4. + +% These are the previously concretized attributes of the installed package as +% a hash. It has the general form: +% hash_attr(Hash, Attribute, PackageName, Args*) +#defined hash_attr/3. +#defined hash_attr/4. +#defined hash_attr/5. +#defined hash_attr/6. +#defined hash_attr/7. + +{ attr("hash", node(ID, PackageName), Hash): installed_hash(PackageName, Hash) } 1 :- + attr("node", node(ID, PackageName)), + internal_error("Package must resolve to at most 1 hash"). % you can't choose an installed hash for a dev spec :- attr("hash", PackageNode, Hash), attr("variant_value", PackageNode, "dev_path", _). - % You can't install a hash, if it is not installed :- attr("hash", node(ID, Package), Hash), not installed_hash(Package, Hash). -% This should be redundant given the constraint above -:- attr("node", PackageNode), 2 { attr("hash", PackageNode, Hash) }. -% if a hash is selected, we impose all the constraints that implies -impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash). +% hash_attrs are versions, but can_splice_attr are usually node_version_satisfies +hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :- + hash_attr(Hash, "version", PackageName, Version), + pkg_fact(PackageName, version_satisfies(Constraint, Version)). + +% This recovers the exact semantics for hash reuse hash and depends_on are where +% splices are decided, and virtual_on_edge can result in name-changes, which is +% why they are all treated separately. +imposed_constraint(Hash, Attr, PackageName) :- + hash_attr(Hash, Attr, PackageName). +imposed_constraint(Hash, Attr, PackageName, A1) :- + hash_attr(Hash, Attr, PackageName, A1), Attr != "hash". +imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :- + hash_attr(Hash, Attr, PackageName, Arg1, Arg2), + Attr != "depends_on", + Attr != "virtual_on_edge". +imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :- + hash_attr(Hash, Attr, PackageName, A1, A2, A3). +imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash). +% Without splicing, we simply recover the exact semantics +imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :- + hash_attr(ParentHash, "hash", ChildName, ChildHash), + ChildHash != ParentHash, + not abi_splice_conditions_hold(_, _, ChildName, ChildHash). + +imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :- + hash_attr(Hash, "depends_on", PackageName, DepName, Type), + hash_attr(Hash, "hash", DepName, DepHash), + not attr("splice_at_hash", _, _, DepName, DepHash). + +imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :- + hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName), + not attr("splice_at_hash", _, _, DepName,_). + +% Rules pertaining to attr("splice_at_hash") and abi_splice_conditions_hold will +% be conditionally loaded from splices.lp + +impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash), attr("node", PackageNode). + +% If there is not a hash for a package, we build it. +build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode). -% if we haven't selected a hash for a package, we'll be building it -build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode). % Minimizing builds is tricky. We want a minimizing criterion @@ -1480,6 +1526,7 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode % criteria for built specs -- so that they take precedence over the otherwise % topmost-priority criterion to reuse what is installed. % + % The priority ranges are: % 1000+ Optimizations for concretization errors % 300 - 1000 Highest priority optimizations for valid solutions @@ -1505,12 +1552,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package pkg_fact(Package, version_declared(Version, Weight, "installed")), not optimize_for_reuse(). -#defined installed_hash/2. % This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG #edge (A, B) : depends_on(A, B). - %----------------------------------------------------------------- % Optimization to avoid errors %----------------------------------------------------------------- diff --git a/lib/spack/spack/solver/core.py b/lib/spack/spack/solver/core.py index 2530981a21dda6..ba257173a502f1 100644 --- a/lib/spack/spack/solver/core.py +++ b/lib/spack/spack/solver/core.py @@ -44,6 +44,17 @@ def _id(thing: Any) -> Union[str, AspObject]: return f'"{str(thing)}"' +class AspVar(AspObject): + """Represents a variable in an ASP rule, allows for conditionally generating + rules""" + + def __init__(self, name: str): + self.name = name + + def __str__(self) -> str: + return str(self.name) + + @lang.key_ordering class AspFunction(AspObject): """A term in the ASP logic program""" @@ -88,6 +99,8 @@ def _argify(self, arg: Any) -> Any: return clingo().Number(arg) elif isinstance(arg, AspFunction): return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True) + elif isinstance(arg, AspVar): + return clingo().Variable(arg.name) return clingo().String(str(arg)) def symbol(self): diff --git a/lib/spack/spack/solver/display.lp b/lib/spack/spack/solver/display.lp index 675a9d17d278ee..61d96b25b5ac2c 100644 --- a/lib/spack/spack/solver/display.lp +++ b/lib/spack/spack/solver/display.lp @@ -15,7 +15,6 @@ #show attr/4. #show attr/5. #show attr/6. - % names of optimization criteria #show opt_criterion/2. diff --git a/lib/spack/spack/solver/splices.lp b/lib/spack/spack/solver/splices.lp new file mode 100644 index 00000000000000..96762c456c18db --- /dev/null +++ b/lib/spack/spack/solver/splices.lp @@ -0,0 +1,56 @@ +% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +% Spack Project Developers. See the top-level COPYRIGHT file for details. +% +% SPDX-License-Identifier: (Apache-2.0 OR MIT) + +%============================================================================= +% These rules are conditionally loaded to handle the synthesis of spliced +% packages. +% ============================================================================= +% Consider the concrete spec: +% foo@2.72%gcc@11.4 arch=linux-ubuntu22.04-icelake build_system=autotools ^bar ... +% It will emit the following facts for reuse (below is a subset) +% installed_hash("foo", "xxxyyy") +% hash_attr("xxxyyy", "hash", "foo", "xxxyyy") +% hash_attr("xxxyyy", "version", "foo", "2.72") +% hash_attr("xxxyyy", "node_os", "ubuntu22.04") +% hash_attr("xxxyyy", "hash", "bar", "zzzqqq") +% hash_attr("xxxyyy", "depends_on", "foo", "bar", "link") +% Rules that derive abi_splice_conditions_hold will be generated from +% use of the `can_splice` directive. The will have the following form: +% can_splice("foo@1.0.0+a", when="@1.0.1+a", match_variants=["b"]) ---> +% abi_splice_conditions_hold(0, node(SID, "foo"), "foo", BashHash) :- +% installed_hash("foo", BaseHash), +% attr("node", node(SID, SpliceName)), +% attr("node_version_satisfies", node(SID, "foo"), "1.0.1"), +% hash_attr("hash", "node_version_satisfies", "foo", "1.0.1"), +% attr("variant_value", node(SID, "foo"), "a", "True"), +% hash_attr("hash", "variant_value", "foo", "a", "True"), +% attr("variant_value", node(SID, "foo"), "b", VariVar0), +% hash_attr("hash", "variant_value", "foo", "b", VariVar0), + +% If the splice is valid (i.e. abi_splice_conditions_hold is derived) in the +% dependency of a concrete spec the solver free to choose whether to continue +% with the exact hash semantics by simply imposing the child hash, or introducing +% a spliced node as the dependency instead +{ imposed_constraint(ParentHash, "hash", ChildName, ChildHash) } :- + hash_attr(ParentHash, "hash", ChildName, ChildHash), + abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash). + +attr("splice_at_hash", ParentNode, node(SID, SpliceName), ChildName, ChildHash) :- + attr("hash", ParentNode, ParentHash), + hash_attr(ParentHash, "hash", ChildName, ChildHash), + abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash), + ParentHash != ChildHash, + not imposed_constraint(ParentHash, "hash", ChildName, ChildHash). + +% Names and virtual providers may change when a dependency is spliced in +imposed_constraint(Hash, "dependency_holds", ParentName, SpliceName, Type) :- + hash_attr(Hash, "depends_on", ParentName, DepName, Type), + hash_attr(Hash, "hash", DepName, DepHash), + attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash). + +imposed_constraint(Hash, "virtual_on_edge", ParentName, SpliceName, VirtName) :- + hash_attr(Hash, "virtual_on_edge", ParentName, DepName, VirtName), + attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash). + diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index d87296a3fb7c79..03a372be4e5284 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1431,6 +1431,8 @@ def tree( class Spec: #: Cache for spec's prefix, computed lazily in the corresponding property _prefix = None + #: Cache for spec's length, computed lazily in the corresponding property + _length = None abstract_hash = None @staticmethod @@ -2907,7 +2909,7 @@ def _mark_concrete(self, value=True): if (not value) and s.concrete and s.installed: continue elif not value: - s.clear_cached_hashes() + s.clear_caches() s._mark_root_concrete(value) def _finalize_concretization(self): @@ -3700,6 +3702,18 @@ def __getitem__(self, name: str): return child + def __len__(self): + if not self.concrete: + raise spack.error.SpecError(f"Cannot get length of abstract spec: {self}") + + if not self._length: + self._length = 1 + sum(len(dep) for dep in self.dependencies()) + return self._length + + def __bool__(self): + # Need to define this so __len__ isn't used by default + return True + def __contains__(self, spec): """True if this spec or some dependency satisfies the spec. @@ -4256,7 +4270,7 @@ def _splice_detach_and_add_dependents(self, replacement, context): for ancestor in ancestors_in_context: # Only set it if it hasn't been spliced before ancestor._build_spec = ancestor._build_spec or ancestor.copy() - ancestor.clear_cached_hashes(ignore=(ht.package_hash.attr,)) + ancestor.clear_caches(ignore=(ht.package_hash.attr,)) for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD): if edge.depflag & ~dt.BUILD: edge.depflag &= ~dt.BUILD @@ -4450,7 +4464,7 @@ def mask_build_deps(in_spec): return spec - def clear_cached_hashes(self, ignore=()): + def clear_caches(self, ignore=()): """ Clears all cached hashes in a Spec, while preserving other properties. """ @@ -4458,7 +4472,9 @@ def clear_cached_hashes(self, ignore=()): if h.attr not in ignore: if hasattr(self, h.attr): setattr(self, h.attr, None) - self._dunder_hash = None + for attr in ("_dunder_hash", "_prefix", "_length"): + if attr not in ignore: + setattr(self, attr, None) def __hash__(self): # If the spec is concrete, we leverage the process hash and just use diff --git a/lib/spack/spack/test/abi_splicing.py b/lib/spack/spack/test/abi_splicing.py new file mode 100644 index 00000000000000..97601c578a0577 --- /dev/null +++ b/lib/spack/spack/test/abi_splicing.py @@ -0,0 +1,234 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +""" Test ABI-based splicing of dependencies """ + +from typing import List + +import pytest + +import spack.config +import spack.deptypes as dt +import spack.package_base +import spack.paths +import spack.repo +import spack.solver.asp +from spack.installer import PackageInstaller +from spack.spec import Spec + + +class CacheManager: + def __init__(self, specs: List[str]) -> None: + self.req_specs = specs + self.concr_specs: List[Spec] + self.concr_specs = [] + + def __enter__(self): + self.concr_specs = [Spec(s).concretized() for s in self.req_specs] + for s in self.concr_specs: + PackageInstaller([s.package], fake=True, explicit=True).install() + + def __exit__(self, exc_type, exc_val, exc_tb): + for s in self.concr_specs: + s.package.do_uninstall() + + +# MacOS and Windows only work if you pass this function pointer rather than a +# closure +def _mock_has_runtime_dependencies(_x): + return True + + +def _make_specs_non_buildable(specs: List[str]): + output_config = {} + for spec in specs: + output_config[spec] = {"buildable": False} + return output_config + + +@pytest.fixture +def splicing_setup(mutable_database, mock_packages, monkeypatch): + spack.config.set("concretizer:reuse", True) + monkeypatch.setattr( + spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies + ) + + +def _enable_splicing(): + spack.config.set("concretizer:splice", {"automatic": True}) + + +def _has_build_dependency(spec: Spec, name: str): + return any(s.name == name for s in spec.dependencies(None, dt.BUILD)) + + +def test_simple_reuse(splicing_setup): + with CacheManager(["splice-z@1.0.0+compat"]): + spack.config.set("packages", _make_specs_non_buildable(["splice-z"])) + assert Spec("splice-z").concretized().satisfies(Spec("splice-z")) + + +def test_simple_dep_reuse(splicing_setup): + with CacheManager(["splice-z@1.0.0+compat"]): + spack.config.set("packages", _make_specs_non_buildable(["splice-z"])) + assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1")) + + +def test_splice_installed_hash(splicing_setup): + cache = [ + "splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0", + "splice-h@1.0.2+compat ^splice-z@1.0.0", + ] + with CacheManager(cache): + packages_config = _make_specs_non_buildable(["splice-t", "splice-h"]) + spack.config.set("packages", packages_config) + goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0") + with pytest.raises(Exception): + goal_spec.concretized() + _enable_splicing() + assert goal_spec.concretized().satisfies(goal_spec) + + +def test_splice_build_splice_node(splicing_setup): + with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]): + spack.config.set("packages", _make_specs_non_buildable(["splice-t"])) + goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat") + with pytest.raises(Exception): + goal_spec.concretized() + _enable_splicing() + assert goal_spec.concretized().satisfies(goal_spec) + + +def test_double_splice(splicing_setup): + cache = [ + "splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat", + "splice-h@1.0.2+compat ^splice-z@1.0.1+compat", + "splice-z@1.0.2+compat", + ] + with CacheManager(cache): + freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"]) + spack.config.set("packages", freeze_builds_config) + goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat") + with pytest.raises(Exception): + goal_spec.concretized() + _enable_splicing() + assert goal_spec.concretized().satisfies(goal_spec) + + +# The next two tests are mirrors of one another +def test_virtual_multi_splices_in(splicing_setup): + cache = [ + "depends-on-virtual-with-abi ^virtual-abi-1", + "depends-on-virtual-with-abi ^virtual-abi-2", + ] + goal_specs = [ + "depends-on-virtual-with-abi ^virtual-abi-multi abi=one", + "depends-on-virtual-with-abi ^virtual-abi-multi abi=two", + ] + with CacheManager(cache): + spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"])) + for gs in goal_specs: + with pytest.raises(Exception): + Spec(gs).concretized() + _enable_splicing() + for gs in goal_specs: + assert Spec(gs).concretized().satisfies(gs) + + +def test_virtual_multi_can_be_spliced(splicing_setup): + cache = [ + "depends-on-virtual-with-abi ^virtual-abi-multi abi=one", + "depends-on-virtual-with-abi ^virtual-abi-multi abi=two", + ] + goal_specs = [ + "depends-on-virtual-with-abi ^virtual-abi-1", + "depends-on-virtual-with-abi ^virtual-abi-2", + ] + with CacheManager(cache): + spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"])) + with pytest.raises(Exception): + for gs in goal_specs: + Spec(gs).concretized() + _enable_splicing() + for gs in goal_specs: + assert Spec(gs).concretized().satisfies(gs) + + +def test_manyvariant_star_matching_variant_splice(splicing_setup): + cache = [ + # can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*") + "depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2", + "depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3", + ] + goal_specs = [ + Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"), + Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"), + ] + with CacheManager(cache): + freeze_build_config = {"depends-on-manyvariants": {"buildable": False}} + spack.config.set("packages", freeze_build_config) + for goal in goal_specs: + with pytest.raises(Exception): + goal.concretized() + _enable_splicing() + for goal in goal_specs: + assert goal.concretized().satisfies(goal) + + +def test_manyvariant_limited_matching(splicing_setup): + cache = [ + # can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"]) + "depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2", + # can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b") + "depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1", + ] + goal_specs = [ + Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"), + Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"), + ] + with CacheManager(cache): + freeze_build_config = {"depends-on-manyvariants": {"buildable": False}} + spack.config.set("packages", freeze_build_config) + for s in goal_specs: + with pytest.raises(Exception): + s.concretized() + _enable_splicing() + for s in goal_specs: + assert s.concretized().satisfies(s) + + +def test_external_splice_same_name(splicing_setup): + cache = [ + "splice-h@1.0.0 ^splice-z@1.0.0+compat", + "splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat", + ] + packages_yaml = { + "splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]} + } + goal_specs = [ + Spec("splice-h@1.0.0 ^splice-z@1.0.2"), + Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"), + ] + with CacheManager(cache): + spack.config.set("packages", packages_yaml) + _enable_splicing() + for s in goal_specs: + assert s.concretized().satisfies(s) + + +def test_spliced_build_deps_only_in_build_spec(splicing_setup): + cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"] + goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0") + + with CacheManager(cache): + _enable_splicing() + concr_goal = goal_spec.concretized() + build_spec = concr_goal._build_spec + # Spec has been spliced + assert build_spec is not None + # Build spec has spliced build dependencies + assert _has_build_dependency(build_spec, "splice-h") + assert _has_build_dependency(build_spec, "splice-z") + # Spliced build dependencies are removed + assert len(concr_goal.dependencies(None, dt.BUILD)) == 0 diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index d1f0ed139ecac3..1811b1a617fddd 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -311,7 +311,19 @@ def test_pkg_grep(mock_packages, capfd): output, _ = capfd.readouterr() assert output.strip() == "\n".join( spack.repo.PATH.get_pkg_class(name).module.__file__ - for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"] + for name in [ + "depends-on-manyvariants", + "manyvariants", + "splice-a", + "splice-h", + "splice-t", + "splice-vh", + "splice-vt", + "splice-z", + "virtual-abi-1", + "virtual-abi-2", + "virtual-abi-multi", + ] ) # ensure that this string isn't fouhnd diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 38424e951c55cc..9f94d11a08a37a 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -1763,8 +1763,8 @@ def test_package_hash_affects_dunder_and_dag_hash(mock_packages, default_mock_co assert a1.dag_hash() == a2.dag_hash() assert a1.process_hash() == a2.process_hash() - a1.clear_cached_hashes() - a2.clear_cached_hashes() + a1.clear_caches() + a2.clear_caches() # tweak the dag hash of one of these specs new_hash = "00000000000000000000000000000000" diff --git a/var/spack/repos/builtin.mock/packages/depends-on-manyvariants/package.py b/var/spack/repos/builtin.mock/packages/depends-on-manyvariants/package.py new file mode 100644 index 00000000000000..f1314471f917b2 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/depends-on-manyvariants/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class DependsOnManyvariants(Package): + """ + A package with a dependency on `manyvariants`, so that `manyvariants` can + be spliced in tests. + """ + + homepage = "https://www.test.com" + has_code = False + + version("1.0") + version("2.0") + + depends_on("manyvariants@1.0", when="@1.0") + depends_on("manyvariants@2.0", when="@2.0") + + def install(self, spec, prefix): + touch(prefix.bar) diff --git a/var/spack/repos/builtin.mock/packages/depends-on-virtual-with-abi/package.py b/var/spack/repos/builtin.mock/packages/depends-on-virtual-with-abi/package.py new file mode 100644 index 00000000000000..9f281f337b2d35 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/depends-on-virtual-with-abi/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class DependsOnVirtualWithAbi(Package): + """ + This has a virtual dependency on `virtual-with-abi`, mostly for testing + automatic splicing of providers. + """ + + homepage = "https://www.example.com" + has_code = False + + version("1.0") + depends_on("virtual-with-abi") diff --git a/var/spack/repos/builtin.mock/packages/manyvariants/package.py b/var/spack/repos/builtin.mock/packages/manyvariants/package.py new file mode 100644 index 00000000000000..4747fab53f8d3d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/manyvariants/package.py @@ -0,0 +1,33 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Manyvariants(Package): + """ + A package with 4 different variants of different arities to test the + `match_variants` argument to `can_splice` + """ + + homepage = "https://www.test.com" + has_code = False + + version("2.0.1") + version("2.0.0") + version("1.0.1") + version("1.0.0") + + variant("a", default=True) + variant("b", default=False) + variant("c", values=("v1", "v2", "v3"), multi=False, default="v1") + variant("d", values=("v1", "v2", "v3"), multi=False, default="v1") + + can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*") + can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"]) + can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b") + + def install(self, spec, prefix): + touch(prefix.bar) diff --git a/var/spack/repos/builtin.mock/packages/splice-h/package.py b/var/spack/repos/builtin.mock/packages/splice-h/package.py index a54f1e7f7d683a..6f86f09f92e16d 100644 --- a/var/spack/repos/builtin.mock/packages/splice-h/package.py +++ b/var/spack/repos/builtin.mock/packages/splice-h/package.py @@ -12,17 +12,24 @@ class SpliceH(Package): homepage = "http://www.example.com" url = "http://www.example.com/splice-h-1.0.tar.gz" - version("1.0", md5="0123456789abcdef0123456789abcdef") + version("1.0.2") + version("1.0.1") + version("1.0.0") variant("foo", default=False, description="nope") variant("bar", default=False, description="nope") variant("baz", default=False, description="nope") + variant("compat", default=True, description="nope") depends_on("splice-z") depends_on("splice-z+foo", when="+foo") provides("something") provides("somethingelse") + provides("virtual-abi") + + can_splice("splice-h@1.0.0 +compat", when="@1.0.1 +compat") + can_splice("splice-h@1.0.0:1.0.1 +compat", when="@1.0.2 +compat") def install(self, spec, prefix): with open(prefix.join("splice-h"), "w") as f: diff --git a/var/spack/repos/builtin.mock/packages/splice-z/package.py b/var/spack/repos/builtin.mock/packages/splice-z/package.py index ff73fbaa03701e..bac33be6000927 100644 --- a/var/spack/repos/builtin.mock/packages/splice-z/package.py +++ b/var/spack/repos/builtin.mock/packages/splice-z/package.py @@ -12,10 +12,16 @@ class SpliceZ(Package): homepage = "http://www.example.com" url = "http://www.example.com/splice-z-1.0.tar.gz" - version("1.0", md5="0123456789abcdef0123456789abcdef") + version("1.0.2") + version("1.0.1") + version("1.0.0") variant("foo", default=False, description="nope") variant("bar", default=False, description="nope") + variant("compat", default=True, description="nope") + + can_splice("splice-z@1.0.0 +compat", when="@1.0.1 +compat") + can_splice("splice-z@1.0.0:1.0.1 +compat", when="@1.0.2 +compat") def install(self, spec, prefix): with open(prefix.join("splice-z"), "w") as f: diff --git a/var/spack/repos/builtin.mock/packages/virtual-abi-1/package.py b/var/spack/repos/builtin.mock/packages/virtual-abi-1/package.py new file mode 100644 index 00000000000000..60a4c64f9e298a --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/virtual-abi-1/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class VirtualAbi1(Package): + """ + This package provides `virtual-with-abi` and is conditionally ABI + compatible with `virtual-abi-multi` + """ + + homepage = "https://www.example.com" + has_code = False + + version("1.0") + + provides("virtual-with-abi") + + can_splice("virtual-abi-multi@1.0 abi=one", when="@1.0") + + def install(self, spec, prefix): + touch(prefix.foo) diff --git a/var/spack/repos/builtin.mock/packages/virtual-abi-2/package.py b/var/spack/repos/builtin.mock/packages/virtual-abi-2/package.py new file mode 100644 index 00000000000000..5725bf504c433d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/virtual-abi-2/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class VirtualAbi2(Package): + """ + This package provides `virtual-with-abi` and is conditionally ABI + compatible with `virtual-abi-multi` + """ + + homepage = "https://www.example.com" + has_code = False + + version("1.0") + + provides("virtual-with-abi") + + can_splice("virtual-abi-multi@1.0 abi=two", when="@1.0") + + def install(self, spec, prefix): + touch(prefix.foo) diff --git a/var/spack/repos/builtin.mock/packages/virtual-abi-multi/package.py b/var/spack/repos/builtin.mock/packages/virtual-abi-multi/package.py new file mode 100644 index 00000000000000..87cfd31544aeef --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/virtual-abi-multi/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class VirtualAbiMulti(Package): + """ + This package provides `virtual-with-abi` is ABI compatible with either + `virtual-abi-1` or `virtual-abi-2` depending on the value of its `abi` + variant + """ + + homepage = "https://www.example.com" + has_code = False + + version("1.0") + + variant("abi", default="custom", multi=False, values=("one", "two", "custom")) + + provides("virtual-with-abi") + + can_splice("virtual-abi-1@1.0", when="@1.0 abi=one") + can_splice("virtual-abi-2@1.0", when="@1.0 abi=two") + + def install(self, spec, prefix): + touch(prefix.foo) diff --git a/var/spack/repos/builtin.mock/packages/virtual-with-abi/package.py b/var/spack/repos/builtin.mock/packages/virtual-with-abi/package.py new file mode 100644 index 00000000000000..1147efd20219e5 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/virtual-with-abi/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class VirtualWithAbi(Package): + """Virtual package for mocking an interface with stable ABI .""" + + homepage = "https://www.abi.org/" + virtual = True + + def test_hello(self): + print("Hello there!") From 9642b045130974a0195ad308ad20effb5c467f4d Mon Sep 17 00:00:00 2001 From: dslarm <38504854+dslarm@users.noreply.github.com> Date: Wed, 13 Nov 2024 05:09:05 +0000 Subject: [PATCH 202/208] Add SVE as a variant for Neoverse N2. Default to true, but should be (#47567) benchmarked to test if that is a correct decision. --- var/spack/repos/builtin/packages/gromacs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index d273f42299d0f1..5703587a6584f4 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -183,7 +183,7 @@ class Gromacs(CMakePackage, CudaPackage): "sve", default=True, description="Enable SVE on aarch64 if available", - when="target=neoverse_v1:,neoverse_v2:", + when="target=neoverse_v1:,neoverse_v2:,neoverse_n2:", ) variant( "sve", default=True, description="Enable SVE on aarch64 if available", when="target=a64fx" From 181c404af5761170ad512be6f530df4729cf9c37 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 13 Nov 2024 13:03:09 +0100 Subject: [PATCH 203/208] missing and redundant imports (#47577) --- lib/spack/spack/cmd/__init__.py | 1 + lib/spack/spack/mirror.py | 2 +- lib/spack/spack/test/abi_splicing.py | 3 --- lib/spack/spack/test/cmd/init_py_functions.py | 2 ++ lib/spack/spack/test/cmd/spec.py | 1 + 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 9449face85a7fc..11569a34c70b44 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -25,6 +25,7 @@ import spack.extensions import spack.parser import spack.paths +import spack.repo import spack.spec import spack.store import spack.traverse as traverse diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 328a456fc3cb17..fe38d1f963afeb 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -180,7 +180,7 @@ def ensure_mirror_usable(self, direction: str = "push"): if errors: msg = f"invalid {direction} configuration for mirror {self.name}: " msg += "\n ".join(errors) - raise spack.mirror.MirrorError(msg) + raise MirrorError(msg) def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool): # Only allow one to exist in the config diff --git a/lib/spack/spack/test/abi_splicing.py b/lib/spack/spack/test/abi_splicing.py index 97601c578a0577..b36c285b134923 100644 --- a/lib/spack/spack/test/abi_splicing.py +++ b/lib/spack/spack/test/abi_splicing.py @@ -10,9 +10,6 @@ import spack.config import spack.deptypes as dt -import spack.package_base -import spack.paths -import spack.repo import spack.solver.asp from spack.installer import PackageInstaller from spack.spec import Spec diff --git a/lib/spack/spack/test/cmd/init_py_functions.py b/lib/spack/spack/test/cmd/init_py_functions.py index deb6222411b725..d7245c6ecce824 100644 --- a/lib/spack/spack/test/cmd/init_py_functions.py +++ b/lib/spack/spack/test/cmd/init_py_functions.py @@ -4,9 +4,11 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import pytest +import spack.config import spack.environment as ev import spack.error import spack.solver.asp as asp +import spack.store from spack.cmd import ( CommandNameError, PythonNameError, diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py index 1d0d08f494b595..dda48de4df0599 100644 --- a/lib/spack/spack/test/cmd/spec.py +++ b/lib/spack/spack/test/cmd/spec.py @@ -7,6 +7,7 @@ import pytest +import spack.config import spack.environment as ev import spack.error import spack.spec From 020e30f3e675a0339d88a3a87ca75a9528d35e9d Mon Sep 17 00:00:00 2001 From: psakievich Date: Thu, 14 Nov 2024 00:15:11 -0700 Subject: [PATCH 204/208] Update tutorial version (#47593) --- lib/spack/spack/cmd/tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py index 478ca52b7f565b..678a9262369c3f 100644 --- a/lib/spack/spack/cmd/tutorial.py +++ b/lib/spack/spack/cmd/tutorial.py @@ -24,7 +24,7 @@ # tutorial configuration parameters -tutorial_branch = "releases/v0.22" +tutorial_branch = "releases/v0.23" tutorial_mirror = "file:///mirror" tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub") From e5c411d8f011c9d72ba0971e1f0df816997d4b39 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 13 Nov 2024 07:21:16 -0800 Subject: [PATCH 205/208] `spack spec`: simplify and unify output (#47574) `spack spec` output has looked like this for a while: ```console > spack spec /v5fn6xo /wd2p2v7 Input spec -------------------------------- - /v5fn6xo Concretized -------------------------------- [+] openssl@3.3.1%apple-clang@16.0.0~docs+shared build_system=generic certs=mozilla arch=darwin-sequoia-m1 [+] ^ca-certificates-mozilla@2023-05-30%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 ... Input spec -------------------------------- - /wd2p2v7 Concretized -------------------------------- [+] py-six@1.16.0%apple-clang@16.0.0 build_system=python_pip arch=darwin-sequoia-m1 [+] ^py-pip@23.1.2%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 ``` But the input spec is right there on the CLI, and it doesn't add anything to the output. Also, since #44843, specs concretized in the CLI line can be unified, so it makes sense to display them as we did in #44489 -- as one multi-root tree instead of as multiple single-root trees. With this PR, concretize output now looks like this: ```console > spack spec /v5fn6xo /wd2p2v7 [+] openssl@3.3.1%apple-clang@16.0.0~docs+shared build_system=generic certs=mozilla arch=darwin-sequoia-m1 [+] ^ca-certificates-mozilla@2023-05-30%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 [+] ^gmake@4.4.1%apple-clang@16.0.0~guile build_system=generic arch=darwin-sequoia-m1 [+] ^perl@5.40.0%apple-clang@16.0.0+cpanm+opcode+open+shared+threads build_system=generic arch=darwin-sequoia-m1 [+] ^berkeley-db@18.1.40%apple-clang@16.0.0+cxx~docs+stl build_system=autotools patches=26090f4,b231fcc arch=darwin-sequoia-m1 [+] ^bzip2@1.0.8%apple-clang@16.0.0~debug~pic+shared build_system=generic arch=darwin-sequoia-m1 [+] ^diffutils@3.10%apple-clang@16.0.0 build_system=autotools arch=darwin-sequoia-m1 [+] ^libiconv@1.17%apple-clang@16.0.0 build_system=autotools libs=shared,static arch=darwin-sequoia-m1 [+] ^gdbm@1.23%apple-clang@16.0.0 build_system=autotools arch=darwin-sequoia-m1 [+] ^readline@8.2%apple-clang@16.0.0 build_system=autotools patches=bbf97f1 arch=darwin-sequoia-m1 [+] ^ncurses@6.5%apple-clang@16.0.0~symlinks+termlib abi=none build_system=autotools patches=7a351bc arch=darwin-sequoia-m1 [+] ^pkgconf@2.2.0%apple-clang@16.0.0 build_system=autotools arch=darwin-sequoia-m1 [+] ^zlib-ng@2.2.1%apple-clang@16.0.0+compat+new_strategies+opt+pic+shared build_system=autotools arch=darwin-sequoia-m1 [+] ^gnuconfig@2022-09-17%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 [+] py-six@1.16.0%apple-clang@16.0.0 build_system=python_pip arch=darwin-sequoia-m1 [+] ^py-pip@23.1.2%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 [+] ^py-setuptools@69.2.0%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 [-] ^py-wheel@0.41.2%apple-clang@16.0.0 build_system=generic arch=darwin-sequoia-m1 ... ``` With no input spec displayed -- just the concretization output shown as one consolidated tree and multiple roots. - [x] remove "Input Spec" section and "Concretized" header from `spack spec` output - [x] print concretized specs as one BFS tree instead of multiple --------- Signed-off-by: Todd Gamblin Co-authored-by: Harmen Stoppels --- lib/spack/spack/cmd/spec.py | 65 +++++++++++++------------------------ 1 file changed, 23 insertions(+), 42 deletions(-) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 188e5360886856..d1278a71753970 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -82,14 +82,6 @@ def spec(parser, args): if args.namespaces: fmt = "{namespace}." + fmt - tree_kwargs = { - "cover": args.cover, - "format": fmt, - "hashlen": None if args.very_long else 7, - "show_types": args.types, - "status_fn": install_status_fn if args.install_status else None, - } - # use a read transaction if we are getting install status for every # spec in the DAG. This avoids repeatedly querying the DB. tree_context = lang.nullcontext @@ -99,46 +91,35 @@ def spec(parser, args): env = ev.active_environment() if args.specs: - input_specs = spack.cmd.parse_specs(args.specs) - concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True) - specs = list(zip(input_specs, concretized_specs)) + concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True) elif env: env.concretize() - specs = env.concretized_specs() - - if not args.format: - # environments are printed together in a combined tree() invocation, - # except when using --yaml or --json, which we print spec by spec below. - tree_kwargs["key"] = spack.traverse.by_dag_hash - tree_kwargs["hashes"] = args.long or args.very_long - print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs)) - return + concrete_specs = env.concrete_roots() else: tty.die("spack spec requires at least one spec or an active environment") - for input, output in specs: - # With --yaml or --json, just print the raw specs to output - if args.format: + # With --yaml, --json, or --format, just print the raw specs to output + if args.format: + for spec in concrete_specs: if args.format == "yaml": # use write because to_yaml already has a newline. - sys.stdout.write(output.to_yaml(hash=ht.dag_hash)) + sys.stdout.write(spec.to_yaml(hash=ht.dag_hash)) elif args.format == "json": - print(output.to_json(hash=ht.dag_hash)) + print(spec.to_json(hash=ht.dag_hash)) else: - print(output.format(args.format)) - continue - - with tree_context(): - # Only show the headers for input specs that are not concrete to avoid - # repeated output. This happens because parse_specs outputs concrete - # specs for `/hash` inputs. - if not input.concrete: - tree_kwargs["hashes"] = False # Always False for input spec - print("Input spec") - print("--------------------------------") - print(input.tree(**tree_kwargs)) - print("Concretized") - print("--------------------------------") - - tree_kwargs["hashes"] = args.long or args.very_long - print(output.tree(**tree_kwargs)) + print(spec.format(args.format)) + return + + with tree_context(): + print( + spack.spec.tree( + concrete_specs, + cover=args.cover, + format=fmt, + hashlen=None if args.very_long else 7, + show_types=args.types, + status_fn=install_status_fn if args.install_status else None, + hashes=args.long or args.very_long, + key=spack.traverse.by_dag_hash, + ) + ) From 48dfa3c95e812e2668a70a1ba48e4089e48d33c0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 13 Nov 2024 23:20:03 -0800 Subject: [PATCH 206/208] Spec: prefer a splice-specific method to `__len__` (#47585) Automatic splicing say `Spec` grow a `__len__` method but it's only used in one place and it's not clear the semantics are useful elsewhere. It also runs the risk of Specs one day being confused for other types of containers. Rather than introduce a new function for one algorithm, let's use a more specific method in the splice code. - [x] Use topological ordering in `_resolve_automatic_splices` instead of sorting by node count - [x] delete `Spec.__len__()` and `Spec.__bool__()` --------- Signed-off-by: Todd Gamblin Co-authored-by: Greg Becker Co-authored-by: Massimiliano Culpo --- lib/spack/spack/solver/asp.py | 15 ++++++++++--- lib/spack/spack/solver/concretize.lp | 14 ++++++------ lib/spack/spack/spec.py | 16 +------------- lib/spack/spack/test/abi_splicing.py | 16 ++++++++++++++ lib/spack/spack/test/cmd/pkg.py | 1 + .../packages/splice-depends-on-t/package.py | 22 +++++++++++++++++++ 6 files changed, 59 insertions(+), 25 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/splice-depends-on-t/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 32db03f5cf906b..5310c4e3a84c89 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -3839,12 +3839,21 @@ def splice_at_hash( self._splices.setdefault(parent_node, []).append(splice) def _resolve_automatic_splices(self): - """After all of the specs have been concretized, apply all immediate - splices in size order. This ensures that all dependencies are resolved + """After all of the specs have been concretized, apply all immediate splices. + + Use reverse topological order to ensure that all dependencies are resolved before their parents, allowing for maximal sharing and minimal copying. + """ fixed_specs = {} - for node, spec in sorted(self._specs.items(), key=lambda x: len(x[1])): + + # create a mapping from dag hash to an integer representing position in reverse topo order. + specs = self._specs.values() + topo_order = list(traverse.traverse_nodes(specs, order="topo", key=traverse.by_dag_hash)) + topo_lookup = {spec.dag_hash(): index for index, spec in enumerate(reversed(topo_order))} + + # iterate over specs, children before parents + for node, spec in sorted(self._specs.items(), key=lambda x: topo_lookup[x[1].dag_hash()]): immediate = self._splices.get(node, []) if not immediate and not any( edge.spec in fixed_specs for edge in spec.edges_to_dependencies() diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 63a5a711758120..9bb237754e1d88 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1455,7 +1455,7 @@ attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, N #defined installed_hash/2. #defined abi_splice_conditions_hold/4. -% These are the previously concretized attributes of the installed package as +% These are the previously concretized attributes of the installed package as % a hash. It has the general form: % hash_attr(Hash, Attribute, PackageName, Args*) #defined hash_attr/3. @@ -1479,12 +1479,12 @@ hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :- % This recovers the exact semantics for hash reuse hash and depends_on are where % splices are decided, and virtual_on_edge can result in name-changes, which is -% why they are all treated separately. +% why they are all treated separately. imposed_constraint(Hash, Attr, PackageName) :- hash_attr(Hash, Attr, PackageName). imposed_constraint(Hash, Attr, PackageName, A1) :- hash_attr(Hash, Attr, PackageName, A1), Attr != "hash". -imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :- +imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :- hash_attr(Hash, Attr, PackageName, Arg1, Arg2), Attr != "depends_on", Attr != "virtual_on_edge". @@ -1492,16 +1492,16 @@ imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :- hash_attr(Hash, Attr, PackageName, A1, A2, A3). imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash). % Without splicing, we simply recover the exact semantics -imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :- +imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :- hash_attr(ParentHash, "hash", ChildName, ChildHash), ChildHash != ParentHash, not abi_splice_conditions_hold(_, _, ChildName, ChildHash). - + imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :- hash_attr(Hash, "depends_on", PackageName, DepName, Type), hash_attr(Hash, "hash", DepName, DepHash), not attr("splice_at_hash", _, _, DepName, DepHash). - + imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :- hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName), not attr("splice_at_hash", _, _, DepName,_). @@ -1511,7 +1511,7 @@ imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :- impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash), attr("node", PackageNode). -% If there is not a hash for a package, we build it. +% If there is not a hash for a package, we build it. build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode). diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 03a372be4e5284..926f35a70451c2 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1431,8 +1431,6 @@ def tree( class Spec: #: Cache for spec's prefix, computed lazily in the corresponding property _prefix = None - #: Cache for spec's length, computed lazily in the corresponding property - _length = None abstract_hash = None @staticmethod @@ -3702,18 +3700,6 @@ def __getitem__(self, name: str): return child - def __len__(self): - if not self.concrete: - raise spack.error.SpecError(f"Cannot get length of abstract spec: {self}") - - if not self._length: - self._length = 1 + sum(len(dep) for dep in self.dependencies()) - return self._length - - def __bool__(self): - # Need to define this so __len__ isn't used by default - return True - def __contains__(self, spec): """True if this spec or some dependency satisfies the spec. @@ -4472,7 +4458,7 @@ def clear_caches(self, ignore=()): if h.attr not in ignore: if hasattr(self, h.attr): setattr(self, h.attr, None) - for attr in ("_dunder_hash", "_prefix", "_length"): + for attr in ("_dunder_hash", "_prefix"): if attr not in ignore: setattr(self, attr, None) diff --git a/lib/spack/spack/test/abi_splicing.py b/lib/spack/spack/test/abi_splicing.py index b36c285b134923..d647647797571f 100644 --- a/lib/spack/spack/test/abi_splicing.py +++ b/lib/spack/spack/test/abi_splicing.py @@ -229,3 +229,19 @@ def test_spliced_build_deps_only_in_build_spec(splicing_setup): assert _has_build_dependency(build_spec, "splice-z") # Spliced build dependencies are removed assert len(concr_goal.dependencies(None, dt.BUILD)) == 0 + + +def test_spliced_transitive_dependency(splicing_setup): + cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"] + goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2") + + with CacheManager(cache): + spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"])) + _enable_splicing() + concr_goal = goal_spec.concretized() + # Spec has been spliced + assert concr_goal._build_spec is not None + assert concr_goal["splice-t"]._build_spec is not None + assert concr_goal.satisfies(goal_spec) + # Spliced build dependencies are removed + assert len(concr_goal.dependencies(None, dt.BUILD)) == 0 diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 1811b1a617fddd..8dfdf4773de3f3 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -315,6 +315,7 @@ def test_pkg_grep(mock_packages, capfd): "depends-on-manyvariants", "manyvariants", "splice-a", + "splice-depends-on-t", "splice-h", "splice-t", "splice-vh", diff --git a/var/spack/repos/builtin.mock/packages/splice-depends-on-t/package.py b/var/spack/repos/builtin.mock/packages/splice-depends-on-t/package.py new file mode 100644 index 00000000000000..9f38da0daa04f0 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/splice-depends-on-t/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class SpliceDependsOnT(Package): + """Package that depends on splice-t""" + + homepage = "http://www.example.com" + url = "http://www.example.com/splice-depends-on-t-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + depends_on("splice-t") + + def install(self, spec, prefix): + with open(prefix.join("splice-depends-on-t"), "w") as f: + f.write("splice-depends-on-t: {0}".format(prefix)) + f.write("splice-t: {0}".format(spec["splice-t"].prefix)) From 08f1cf9ae2e67bdbd104e45690d27d27e3c156d8 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 13 Nov 2024 11:38:18 -0800 Subject: [PATCH 207/208] Update CHANGELOG.md for v0.23.0 --- CHANGELOG.md | 364 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 363 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d17e49d59656e..4f18c386cb796c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,365 @@ +# v0.23.0 (2024-11-13) + +`v0.23.0` is a major feature release. + +We are planning to make this the last major release before Spack `v1.0` +in June 2025. Alongside `v0.23`, we will be making pre-releases (alpha, +beta, etc.) of `v1.0`, and we encourage users to try them and send us +feedback, either on GitHub or on Slack. You can track the road to +`v1.0` here: + + * https://github.com/spack/spack/releases + * https://github.com/spack/spack/discussions/30634 + +## Features in this Release + +1. **Language virtuals** + + Your packages can now explicitly depend on the languages they require. + Historically, Spack has considered C, C++, and Fortran compiler + dependencies to be implicit. In `v0.23`, you should ensure that + new packages add relevant C, C++, and Fortran dependencies like this: + + ```python + depends_on("c", type="build") + depends_on("cxx", type="build") + depends_on("fortran", type="build") + ``` + + We encourage you to add these annotations to your packages now, to prepare + for Spack `v1.0.0`. In `v1.0.0`, these annotations will be necessary for + your package to use C, C++, and Fortran compilers. Note that you should + *not* add language dependencies to packages that don't need them, e.g., + pure python packages. + + We have already auto-generated these dependencies for packages in the + `builtin` repository (see #45217), based on the types of source files + present in each package's source code. We *may* have added too many or too + few language dependencies, so please submit pull requests to correct + packages if you find that the language dependencies are incorrect. + + Note that we have also backported support for these dependencies to + `v0.21.3` and `v0.22.2`, to make all of them forward-compatible with + `v0.23`. This should allow you to move easily between older and newer Spack + releases without breaking your packages. + +2. **Spec splicing** + + We are working to make binary installation more seamless in Spack. `v0.23` + introduces "splicing", which allows users to deploy binaries using local, + optimized versions of a binary interface, even if they were not built with + that interface. For example, this would allow you to build binaries in the + cloud using `mpich` and install them on a system using a local, optimized + version of `mvapich2` *without rebuilding*. Spack preserves full provenance + for the installed packages and knows that they were built one way but + deployed another. + + Our intent is to leverage this across many key HPC binary packages, + e.g. MPI, CUDA, ROCm, and libfabric. + + Fundamentally, splicing allows Spack to redeploy an existing spec with + different dependencies than how it was built. There are two interfaces to + splicing. + + a. Explicit Splicing + + #39136 introduced the explicit splicing interface. In the + concretizer config, you can specify a target spec and a replacement + by hash. + + ```yaml + concretizer: + splice: + explicit: + - target: mpi + replacement: mpich/abcdef + ``` + + Here, every installation that would normally use the target spec will + instead use its replacement. Above, any spec using *any* `mpi` will be + spliced to depend on the specific `mpich` installation requested. This + *can* go wrong if you try to replace something built with, e.g., + `openmpi` with `mpich`, and it is on the user to ensure ABI + compatibility between target and replacement specs. This currently + requires some expertise to use, but it will allow users to reuse the + binaries they create across more machines and environments. + + b. Automatic Splicing (experimental) + + #46729 introduced automatic splicing. In the concretizer config, enable + automatic splicing: + + ```yaml + concretizer: + splice: + automatic: true + ``` + + or run: + + ```console + spack config add concretizer:splice:automatic:true + ``` + + The concretizer will select splices for ABI compatibility to maximize + package reuse. Packages can denote ABI compatibility using the + `can_splice` directive. No packages in Spack yet use this directive, so + if you want to use this feature you will need to add `can_splice` + annotations to your packages. We are working on ways to add more ABI + compatibility information to the Spack package repository, and this + directive may change in the future. + + See the documentation for more details: + * https://spack.readthedocs.io/en/latest/build_settings.html#splicing + * https://spack.readthedocs.io/en/latest/packaging_guide.html#specifying-abi-compatibility + +3. Broader variant propagation + + Since #42931, you can specify propagated variants like `hdf5 + build_type==RelWithDebInfo` or `trilinos ++openmp` to propagate a variant + to all dependencies for which it is relevant. This is valid *even* if the + variant does not exist on the package or its dependencies. + + See https://spack.readthedocs.io/en/latest/basic_usage.html#variants. + +4. Query specs by namespace + + #45416 allows a package's namespace (indicating the repository it came from) + to be treated like a variant. You can request packages from particular repos + like this: + + ```console + spack find zlib namespace=builtin + spack find zlib namespace=myrepo + ``` + + Previously, the spec syntax only allowed namespaces to be prefixes of spec + names, e.g. `builtin.zlib`. The previous syntax still works. + +5. `spack spec` respects environment settings and `unify:true` + + `spack spec` did not previously respect environment lockfiles or + unification settings, which made it difficult to see exactly how a spec + would concretize within an environment. Now it does, so the output you get + with `spack spec` will be *the same* as what your environment will + concretize to when you run `spack concretize`. Similarly, if you provide + multiple specs on the command line with `spack spec`, it will concretize + them together if `unify:true` is set. + + See #47556 and #44843. + +6. Less noisy `spack spec` output + + `spack spec` previously showed output like this: + + ```console + > spack spec /v5fn6xo + Input spec + -------------------------------- + - /v5fn6xo + + Concretized + -------------------------------- + [+] openssl@3.3.1%apple-clang@16.0.0~docs+shared arch=darwin-sequoia-m1 + ... + ``` + + But the input spec is redundant, and we know we run `spack spec` to concretize + the input spec. `spack spec` now *only* shows the concretized spec. See #47574. + +7. Better output for `spack find -c` + + In an environmnet, `spack find -c` lets you search the concretized, but not + yet installed, specs, just as you would the installed ones. As with `spack + spec`, this should make it easier for you to see what *will* be built + before building and installing it. See #44713. + +8. `spack -C `: use an environment's configuration without activation + + Spack environments allow you to associate: + 1. a set of (possibly concretized) specs, and + 2. configuration + + When you activate an environment, you're using both of these. Previously, we + supported: + * `spack -e ` to run spack in the context of a specific environment, and + * `spack -C ` to run spack using a directory with configuration files. + + You can now also pass an environment to `spack -C` to use *only* the environment's + configuration, but not the specs or lockfile. See #45046. + +## New commands, options, and directives + +* The new `spack env track` command (#41897) takes a non-managed Spack + environment and adds a symlink to Spack's `$environments_root` directory, so + that it will be included for reference counting for commands like `spack + uninstall` and `spack gc`. If you use free-standing directory environments, + this is useful for preventing Spack from removing things required by your + environments. You can undo this tracking with the `spack env untrack` + command. + +* Add `-t` short option for `spack --backtrace` (#47227) + + `spack -d / --debug` enables backtraces on error, but it can be very + verbose, and sometimes you just want the backtrace. `spack -t / --backtrace` + provides that option. + +* `gc`: restrict to specific specs (#46790) + + If you only want to garbage-collect specific packages, you can now provide + them on the command line. This gives users finer-grained control over what + is uninstalled. + +* oci buildcaches now support `--only=package`. You can now push *just* a + package and not its dependencies to an OCI registry. This allows dependents + of non-redistributable specs to be stored in OCI registries without an + error. See #45775. + +## Notable refactors +* Variants are now fully conditional + + The `variants` dictionary on packages was previously keyed by variant name, + and allowed only one definition of any given variant. Spack is now smart + enough to understand that variants may have different values and defaults + for different versions. For example, `warpx` prior to `23.06` only supported + builds for one dimensionality, and newer `warpx` versions could be built + with support for many different dimensions: + + ```python + variant( + "dims", + default="3", + values=("1", "2", "3", "rz"), + multi=False, + description="Number of spatial dimensions", + when="@:23.05", + ) + variant( + "dims", + default="1,2,rz,3", + values=("1", "2", "3", "rz"), + multi=True, + description="Number of spatial dimensions", + when="@23.06:", + ) + ``` + + Previously, the default for the old version of `warpx` was not respected and + had to be specified manually. Now, Spack will select the right variant + definition for each version at concretization time. This allows variants to + evolve more smoothly over time. See #44425 for details. + +## Highlighted bugfixes + +1. Externals no longer override the preferred provider (#45025). + + External definitions could interfere with package preferences. Now, if + `openmpi` is the preferred `mpi`, and an external `mpich` is defined, a new + `openmpi` *will* be built if building it is possible. Previously we would + prefer `mpich` despite the preference. + +2. Composable `cflags` (#41049). + + This release fixes a longstanding bug that concretization would fail if + there were different `cflags` specified in `packages.yaml`, + `compilers.yaml`, or on `the` CLI. Flags and their ordering are now tracked + in the concretizer and flags from multiple sources will be merged. + +3. Fix concretizer Unification for included environments (#45139). + +## Deprecations, removals, and syntax changes + +1. The old concretizer has been removed from Spack, along with the + `config:concretizer` config option. Spack will emit a warning if the option + is present in user configuration, since it now has no effect. Spack now + uses a simpler bootstrapping mechanism, where a JSON prototype is tweaked + slightly to get an initial concrete spec to download. See #45215. + +2. Best-effort expansion of spec matrices has been removed. This feature did + not work with the "new" ASP-based concretizer, and did not work with + `unify: True` or `unify: when_possible`. Use the + [exclude key](https://spack.readthedocs.io/en/latest/environments.html#spec-matrices) + for the environment to exclude invalid components, or use multiple spec + matrices to combine the list of specs for which the constraint is valid and + the list of specs for which it is not. See #40792. + +3. The old Cray `platform` (based on Cray PE modules) has been removed, and + `platform=cray` is no longer supported. Since `v0.19`, Spack has handled + Cray machines like Linux clusters with extra packages, and we have + encouraged using this option to support Cray. The new approach allows us to + correctly handle Cray machines with non-SLES operating systems, and it is + much more reliable than making assumptions about Cray modules. See the + `v0.19` release notes and #43796 for more details. + +4. The `config:install_missing_compilers` config option has been deprecated, + and it is a no-op when set in `v0.23`. Our new compiler dependency model + will replace it with a much more reliable and robust mechanism in `v1.0`. + See #46237. + +5. Config options that deprecated in `v0.21` have been removed in `v0.23`. You + can now only specify preferences for `compilers`, `targets`, and + `providers` globally via the `packages:all:` section. Similarly, you can + only specify `versions:` locally for a specific package. See #44061 and + #31261 for details. + +6. Spack's old test interface has been removed (#45752), having been + deprecated in `v0.22.0` (#34236). All `builtin` packages have been updated + to use the new interface. See the [stand-alone test documentation]( + https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests) + +7. The `spack versions --safe-only` option, deprecated since `v0.21.0`, has + been removed. See #45765. + +* The `--dependencies` and `--optimize` arguments to `spack ci` have been + deprecated. See #45005. + +## Binary caches +1. Public binary caches now include an ML stack for Linux/aarch64 (#39666)We + now build an ML stack for Linux/aarch64 for all pull requests and on + develop. The ML stack includes both CPU-only and CUDA builds for Horovod, + Hugging Face, JAX, Keras, PyTorch,scikit-learn, TensorBoard, and + TensorFlow, and related packages. The CPU-only stack also includes XGBoost. + See https://cache.spack.io/tag/develop/?stack=ml-linux-aarch64-cuda. + +2. There is also now an stack of developer tools for macOS (#46910), which is + analogous to the Linux devtools stack. You can use this to avoid building + many common build dependencies. See + https://cache.spack.io/tag/develop/?stack=developer-tools-darwin. + +## Architecture support +* archspec has been updated to `v0.2.5`, with support for `zen5` +* Spack's CUDA package now supports the Grace Hopper `9.0a` compute capability (#45540) + +## Windows +* Windows bootstrapping: `file` and `gpg` (#41810) +* `scripts` directory added to PATH on Windows for python extensions (#45427) +* Fix `spack load --list` and `spack unload` on Windows (#35720) + +## Other notable changes +* Bugfix: `spack find -x` in environments (#46798) +* Spec splices are now robust to duplicate nodes with the same name in a spec (#46382) +* Cache per-compiler libc calculations for performance (#47213) +* Fixed a bug in external detection for openmpi (#47541) +* Mirror configuration allows username/password as environment variables (#46549) +* Default library search caps maximum depth (#41945) +* Unify interface for `spack spec` and `spack solve` commands (#47182) +* Spack no longer RPATHs directories in the default library search path (#44686) +* Improved performance of Spack database (#46554) +* Enable package reuse for packages with versions from git refs (#43859) +* Improved handling for `uuid` virtual on macos (#43002) +* Improved tracking of task queueing/requeueing in the installer (#46293) + +## Spack community stats + +* Over 2,000 pull requests updated package recipes +* 8,307 total packages, 329 new since `v0.22.0` + * 140 new Python packages + * 14 new R packages +* 373 people contributed to this release + * 357 committers to packages + * 60 committers to core + + # v0.22.2 (2024-09-21) ## Bugfixes @@ -419,7 +781,7 @@ - spack graph: fix coloring with environments (#41240) - spack info: sort variants in --variants-by-name (#41389) - Spec.format: error on old style format strings (#41934) -- ASP-based solver: +- ASP-based solver: - fix infinite recursion when computing concretization errors (#41061) - don't error for type mismatch on preferences (#41138) - don't emit spurious debug output (#41218) From c6d4037758140fe15913c29e80cd1547f388ae51 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 13 Nov 2024 01:04:58 -0800 Subject: [PATCH 208/208] update version number to 0.23.0 --- lib/spack/spack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 9a83564db7daa9..8c35e70370f25d 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -11,7 +11,7 @@ import spack.util.git #: PEP440 canonical ... string -__version__ = "0.23.0.dev0" +__version__ = "0.23.0" spack_version = __version__