From 254afc22dbdbbd7d1850e1572956579750442c8f Mon Sep 17 00:00:00 2001 From: bcumming Date: Fri, 20 Mar 2026 15:35:01 +0100 Subject: [PATCH] fix build cache configuration --- stackinator/builder.py | 6 +- stackinator/mirror.py | 80 ++++++++++++--------- stackinator/recipe.py | 6 +- stackinator/templates/Makefile.compilers | 4 +- stackinator/templates/Makefile.environments | 4 +- 5 files changed, 55 insertions(+), 45 deletions(-) diff --git a/stackinator/builder.py b/stackinator/builder.py index 47a73b05..2dc69233 100644 --- a/stackinator/builder.py +++ b/stackinator/builder.py @@ -168,10 +168,12 @@ def generate(self, recipe): # make the paths, in case bwrap is not used, directly write to recipe.mount store_path = self.path / "store" if not recipe.no_bwrap else pathlib.Path(recipe.mount) tmp_path = self.path / "tmp" + config_path = self.path / "config" self.path.mkdir(exist_ok=True, parents=True) store_path.mkdir(exist_ok=True) tmp_path.mkdir(exist_ok=True) + config_path.mkdir(exist_ok=True) # check out the version of spack spack_version = recipe.spack_version @@ -232,7 +234,7 @@ def generate(self, recipe): pre_install_hook=recipe.pre_install_hook, spack_version=spack_version, spack_meta=spack_meta, - gpg_keys=recipe.mirrors.keys, + gpg_keys=recipe.mirrors.key_files(config_path), cache=recipe.build_cache_mirror, exclude_from_cache=["nvhpc", "cuda", "perl"], verbose=False, @@ -301,8 +303,6 @@ def generate(self, recipe): # Generate the system configuration: the compilers, environments, etc. # that are defined for the target cluster. - config_path = self.path / "config" - config_path.mkdir(exist_ok=True) packages_path = config_path / "packages.yaml" # the packages.yaml configuration that will be used when building all environments diff --git a/stackinator/mirror.py b/stackinator/mirror.py index e6ba268b..66fa1d12 100644 --- a/stackinator/mirror.py +++ b/stackinator/mirror.py @@ -1,13 +1,15 @@ -from typing import Optional, List, Dict +from typing import Optional, Dict import base64 import io -import magic import os import pathlib +import shutil import urllib.error import urllib.request import yaml +import magic + from . import schema, root_logger @@ -50,7 +52,8 @@ def __init__( self.bootstrap_mirrors = [name for name, mirror in self.mirrors.items() if mirror.get("bootstrap", False)] # Will hold a list of all the gpg keys (public and private) - self._keys: Optional[List[pathlib.Path]] = [] + # self._keys: Optional[List[pathlib.Path]] = [] + self._keys = self._key_init() def _load_mirrors(self, cmdline_cache: Optional[pathlib.Path]) -> Dict[str, Dict]: """Load the mirrors file, if one exists.""" @@ -157,14 +160,12 @@ def _check_mirrors(self): f"Check the url listed in mirrors.yaml in system config. \n{e.reason}" ) - @property - def keys(self): + def key_files(self, config_root: pathlib.Path): """Return the list of public and private key file paths.""" - if self._keys is None: raise RuntimeError("The mirror.keys method was accessed before setup_configs() was called.") - - return self._keys + key_dir = config_root / self.KEY_STORE_DIR + return [key_dir / info["path"] for info in self._keys] def setup_configs(self, config_root: pathlib.Path): """Setup all mirror configs in the given config_root.""" @@ -229,34 +230,27 @@ def _create_bootstrap_configs(self, config_root: pathlib.Path): with (config_root / "bootstrap.yaml").open("w") as file: yaml.dump(bootstrap_yaml, file, default_flow_style=False) - def _key_setup(self, key_store: pathlib.Path): - """Validate mirror keys, relocate to key_store, and update mirror config with new key paths.""" - - self._keys = [] - key_store.mkdir(exist_ok=True) + def _key_init(self): + key_info = [] for name, mirror in self.mirrors.items(): - if mirror.get("public_key") is None: + if mirror.get("private_key") is None: continue - key = mirror["public_key"] - - # key will be saved under key_store/mirror_name.gpg - - dest = pathlib.Path(key_store / f"{name}.gpg") + key = mirror["private_key"] # if path, check if abs path, if not, append sys config path in front and check again path = pathlib.Path(os.path.expandvars(key)) + if not path.is_absolute(): # try prepending system config path path = self._system_config_root / path if path.is_file(): - with open(path, "rb") as reader: - binary_key = reader.read() - - # convert base64 key to binary + # use the user-provided file + key_info.append({"path": pathlib.Path(f"{name}.pgp"), "source": path}) else: + # convert base64 key to binary try: binary_key = base64.b64decode(key) except ValueError: @@ -266,16 +260,34 @@ def _key_setup(self, key_store: pathlib.Path): f"Check the key listed in mirrors.yaml in system config." ) - file_type = magic.from_buffer(binary_key, mime=True) - if file_type not in ("application/x-gnupg-keyring", "application/pgp-keys"): - raise MirrorError( - f"Key for mirror {name} is not a valid GPG key. \n" - f"The file (or base64) was readable, but the data itself was not a PGP key.\n" - f"Check the key listed in mirrors.yaml in system config." - ) + file_type = magic.from_buffer(binary_key, mime=True) + if file_type not in ("application/x-gnupg-keyring", "application/pgp-keys"): + raise MirrorError( + f"Key for mirror {name} is not a valid GPG key. \n" + f"The file (or base64) was readable, but the data itself was not a PGP key.\n" + f"Check the key listed in mirrors.yaml in system config." + ) + + key_info.append({"path": pathlib.Path(f"{name}.pgp"), "source": binary_key}) - # copy key to new destination in key store - with open(dest, "wb") as writer: - writer.write(binary_key) + return key_info + + def _key_setup(self, key_store: pathlib.Path): + """Validate mirror keys, relocate to key_store, and update mirror config with new key paths.""" + + key_store.mkdir(exist_ok=True) - self._keys.append(dest) + for key_info in self._keys: + path = key_store / key_info["path"] + source = key_info["source"] + + match source: + case pathlib.Path(): + # copy source -> path + shutil.copy2(source, path) + case bytes(): + # open path and copy in bytes + with open(path, "wb") as writer: + writer.write(source) + case _: + raise TypeError(f"Expected Path or bytes, got {type(source).__name__}") diff --git a/stackinator/recipe.py b/stackinator/recipe.py index ff3d8e27..0688d0d6 100644 --- a/stackinator/recipe.py +++ b/stackinator/recipe.py @@ -517,10 +517,9 @@ def compiler_files(self): ) makefile_template = env.get_template("Makefile.compilers") - push_to_cache = self.build_cache_mirror is not None files["makefile"] = makefile_template.render( compilers=self.compilers, - push_to_cache=push_to_cache, + buildcache=self.build_cache_mirror, spack_version=self.spack_version, ) @@ -548,10 +547,9 @@ def environment_files(self): jenv.filters["py2yaml"] = schema.py2yaml makefile_template = jenv.get_template("Makefile.environments") - push_to_cache = self.build_cache_mirror is not None files["makefile"] = makefile_template.render( environments=self.environments, - push_to_cache=push_to_cache, + buildcache=self.build_cache_mirror, spack_version=self.spack_version, ) diff --git a/stackinator/templates/Makefile.compilers b/stackinator/templates/Makefile.compilers index f9520bd6..d534b310 100644 --- a/stackinator/templates/Makefile.compilers +++ b/stackinator/templates/Makefile.compilers @@ -18,8 +18,8 @@ all:{% for compiler in compilers %} {{ compiler }}/generated/build_cache{% endfo {% for compiler, config in compilers.items() %} {{ compiler }}/generated/build_cache: {{ compiler }}/generated/env -{% if push_to_cache %} - $(SPACK) -e ./{{ compiler }} buildcache create --rebuild-index --only=package alpscache \ +{% if buildcache %} + $(SPACK) -e ./{{ compiler }} buildcache create --rebuild-index --only=package {{ buildcache }} \ $$($(SPACK_HELPER) -e ./{{ compiler }} find --format '{name};{/hash}' \ | grep -v -E '^({% for p in config.exclude_from_cache %}{{ pipejoiner() }}{{ p }}{% endfor %});'\ | cut -d ';' -f2) diff --git a/stackinator/templates/Makefile.environments b/stackinator/templates/Makefile.environments index 5a530232..929e40bc 100644 --- a/stackinator/templates/Makefile.environments +++ b/stackinator/templates/Makefile.environments @@ -17,8 +17,8 @@ all:{% for env in environments %} {{ env }}/generated/build_cache{% endfor %} # Push built packages to a binary cache if a key has been provided {% for env, config in environments.items() %} {{ env }}/generated/build_cache: {{ env }}/generated/view_config -{% if push_to_cache %} - $(SPACK) --color=never -e ./{{ env }} buildcache create --rebuild-index --only=package alpscache \ +{% if buildcache %} + $(SPACK) --color=never -e ./{{ env }} buildcache create --rebuild-index --only=package {{ buildcache }} \ $$($(SPACK_HELPER) -e ./{{ env }} find --format '{name};{/hash};version={version}' \ | grep -v -E '^({% for p in config.exclude_from_cache %}{{ pipejoiner() }}{{ p }}{% endfor %});'\ | grep -v -E 'version=git\.'\