From 23837f04e6a2dd730afd50166528f1d9d7305e91 Mon Sep 17 00:00:00 2001 From: Martin Basti Date: Fri, 18 Oct 2024 11:33:27 +0200 Subject: [PATCH] cachi2: postprocess Postprocssing plugin to take cachi2 generated dependencies and generate expected metadata for OSBS and prepare sources into build dirs. Signed-off-by: Martin Basti --- atomic_reactor/cli/parser.py | 7 + atomic_reactor/cli/task.py | 12 +- atomic_reactor/constants.py | 1 + atomic_reactor/plugins/cachi2_postprocess.py | 243 +++++++++ atomic_reactor/tasks/binary.py | 10 + tekton/tasks/binary-container-cachi2.yaml | 20 + tests/cli/test_parser.py | 4 + tests/cli/test_task.py | 6 + tests/plugins/test_cachi2_postprocess.py | 487 +++++++++++++++++++ 9 files changed, 789 insertions(+), 1 deletion(-) create mode 100644 atomic_reactor/plugins/cachi2_postprocess.py create mode 100644 tests/plugins/test_cachi2_postprocess.py diff --git a/atomic_reactor/cli/parser.py b/atomic_reactor/cli/parser.py index a38cd88ba..22ebdbc5f 100644 --- a/atomic_reactor/cli/parser.py +++ b/atomic_reactor/cli/parser.py @@ -101,6 +101,13 @@ def parse_args(args: Optional[Sequence[str]] = None) -> dict: ) binary_container_cachi2_init.set_defaults(func=task.binary_container_cachi2_init) + binary_container_cachi2_postprocess = tasks.add_parser( + "binary-container-cachi2-postprocess", + help="binary container cachi2 init step", + description="Execute binary container cachi2 postprocess step.", + ) + binary_container_cachi2_postprocess.set_defaults(func=task.binary_container_cachi2_postprocess) + binary_container_prebuild = tasks.add_parser( "binary-container-prebuild", help="binary container pre-build step", diff --git a/atomic_reactor/cli/task.py b/atomic_reactor/cli/task.py index d9003a5dd..cc64bae91 100644 --- a/atomic_reactor/cli/task.py +++ b/atomic_reactor/cli/task.py @@ -7,7 +7,7 @@ """ from atomic_reactor.tasks.binary import (BinaryExitTask, BinaryPostBuildTask, BinaryPreBuildTask, BinaryInitTask, BinaryCachitoTask, - BinaryCachi2InitTask, + BinaryCachi2InitTask, BinaryCachi2PostprocessTask, InitTaskParams, BinaryExitTaskParams) from atomic_reactor.tasks.binary_container_build import BinaryBuildTask, BinaryBuildTaskParams from atomic_reactor.tasks.clone import CloneTask @@ -76,6 +76,16 @@ def binary_container_cachi2_init(task_args: dict): return task.run(init_build_dirs=True) +def binary_container_cachi2_postprocess(task_args: dict): + """Run binary container Cachi2 postprocess step. + + :param task_args: CLI arguments for a binary-container-cachi2-postprocess task + """ + params = TaskParams.from_cli_args(task_args) + task = BinaryCachi2PostprocessTask(params) + return task.run(init_build_dirs=True) + + def binary_container_prebuild(task_args: dict): """Run binary container pre-build steps. diff --git a/atomic_reactor/constants.py b/atomic_reactor/constants.py index 0eb4877db..61c96ba6b 100644 --- a/atomic_reactor/constants.py +++ b/atomic_reactor/constants.py @@ -113,6 +113,7 @@ PLUGIN_GENERATE_SBOM = 'generate_sbom' PLUGIN_RPMQA = 'all_rpm_packages' PLUGIN_CACHI2_INIT = "cachi2_init" +PLUGIN_CACHI2_POSTPROCESS = "cachi2_postprocess" # some shared dict keys for build metadata that gets recorded with koji. # for consistency of metadata in historical builds, these values basically cannot change. diff --git a/atomic_reactor/plugins/cachi2_postprocess.py b/atomic_reactor/plugins/cachi2_postprocess.py new file mode 100644 index 000000000..bc0c4da4e --- /dev/null +++ b/atomic_reactor/plugins/cachi2_postprocess.py @@ -0,0 +1,243 @@ +""" +Copyright (c) 2024 Red Hat, Inc +All rights reserved. + +This software may be modified and distributed under the terms +of the BSD license. See the LICENSE file for details. +""" +import functools +import json +import os.path +import shlex +from dataclasses import dataclass +from pathlib import Path +from shutil import copytree +from typing import Any, Optional, List, Dict + +from atomic_reactor.constants import ( + CACHITO_ENV_ARG_ALIAS, + CACHITO_ENV_FILENAME, + PLUGIN_CACHI2_INIT, + PLUGIN_CACHI2_POSTPROCESS, + REMOTE_SOURCE_DIR, + REMOTE_SOURCE_JSON_FILENAME, + REMOTE_SOURCE_TARBALL_FILENAME, + REMOTE_SOURCE_JSON_ENV_FILENAME, +) +from atomic_reactor.dirs import BuildDir +from atomic_reactor.plugin import Plugin + +from atomic_reactor.utils.cachi2 import generate_request_json + + +@dataclass(frozen=True) +class Cachi2RemoteSource: + """Represents a processed remote source. + + name: the name that identifies this remote source (if multiple remote sources were used) + json_data: subset of the JSON representation of the Cachito request (source_request_to_json) + build_args: environment variables for this remote source + tarball_path: the path of the tarball downloaded from Cachito + """ + + name: Optional[str] + json_data: dict + json_env_data: List[Dict[str, str]] + tarball_path: Path + sources_path: Path + + @classmethod + def tarball_filename(cls, name: Optional[str]): + if name: + return f"remote-source-{name}.tar.gz" + else: + return REMOTE_SOURCE_TARBALL_FILENAME + + @classmethod + def json_filename(cls, name: Optional[str]): + if name: + return f"remote-source-{name}.json" + else: + return REMOTE_SOURCE_JSON_FILENAME + + @classmethod + def json_env_filename(cls, name: Optional[str]): + if name: + return f"remote-source-{name}.env.json" + else: + return REMOTE_SOURCE_JSON_ENV_FILENAME + + @property + def build_args(self) -> Dict[str, str]: + + return { + env_var['name']: env_var['value'] + for env_var in self.json_env_data + } + + +class Cachi2PostprocessPlugin(Plugin): + """Postprocess cachi2 results + + This plugin will postprocess cachi2 results and provide required metadata + """ + + key = PLUGIN_CACHI2_POSTPROCESS + is_allowed_to_fail = False + REMOTE_SOURCE = "unpacked_remote_sources" + + def __init__(self, workflow): + """ + :param workflow: DockerBuildWorkflow instance + + """ + super(Cachi2PostprocessPlugin, self).__init__(workflow) + self._osbs = None + self.single_remote_source_params = self.workflow.source.config.remote_source + self.multiple_remote_sources_params = self.workflow.source.config.remote_sources + self.init_plugin_data = self.workflow.data.plugins_results.get(PLUGIN_CACHI2_INIT) + + def run(self) -> Optional[List[Dict[str, Any]]]: + if not self.init_plugin_data: + self.log.info('Aborting plugin execution: no cachi2 data provided') + return None + + if not (self.single_remote_source_params or self.multiple_remote_sources_params): + self.log.info('Aborting plugin execution: missing remote source configuration') + return None + + processed_remote_sources = self.postprocess_remote_sources() + self.inject_remote_sources(processed_remote_sources) + + return [ + self.remote_source_to_output(remote_source) + for remote_source in processed_remote_sources + ] + + def postprocess_remote_sources(self) -> List[Cachi2RemoteSource]: + """Process remote source requests and return information about the processed sources.""" + + processed_remote_sources = [] + + for remote_source in self.init_plugin_data: + + json_env_path = os.path.join(remote_source['source_path'], 'cachi2.env.json') + with open(json_env_path, 'r') as json_f: + json_env_data = json.load(json_f) + + sbom_path = os.path.join(remote_source['source_path'], 'bom.json') + with open(sbom_path, 'r') as sbom_f: + sbom_data = json.load(sbom_f) + + remote_source_obj = Cachi2RemoteSource( + name=remote_source['name'], + tarball_path=Path(remote_source['source_path'], 'remote-source.tar.gz'), + sources_path=Path(remote_source['source_path']), + json_data=generate_request_json( + remote_source['remote_source'], sbom_data, json_env_data), + json_env_data=json_env_data, + ) + processed_remote_sources.append(remote_source_obj) + return processed_remote_sources + + def inject_remote_sources(self, remote_sources: List[Cachi2RemoteSource]) -> None: + """Inject processed remote sources into build dirs and add build args to workflow.""" + inject_sources = functools.partial(self.inject_into_build_dir, remote_sources) + self.workflow.build_dir.for_all_platforms_copy(inject_sources) + + # For single remote_source workflow, inject all build args directly + if self.single_remote_source_params: + self.workflow.data.buildargs.update(remote_sources[0].build_args) + + self.add_general_buildargs() + + def inject_into_build_dir( + self, remote_sources: List[Cachi2RemoteSource], build_dir: BuildDir, + ) -> List[Path]: + """Inject processed remote sources into a build directory. + + For each remote source, create a dedicated directory, unpack the downloaded tarball + into it and inject the configuration files and an environment file. + + Return a list of the newly created directories. + """ + created_dirs = [] + + for remote_source in remote_sources: + dest_dir = build_dir.path.joinpath(self.REMOTE_SOURCE, remote_source.name or "") + + if dest_dir.exists(): + raise RuntimeError( + f"Conflicting path {dest_dir.relative_to(build_dir.path)} already exists " + "in the dist-git repository" + ) + + dest_dir.mkdir(parents=True) + created_dirs.append(dest_dir) + + # copy app and deps generated by cachito into build_dir + # TODO: reflink? + copytree(remote_source.sources_path/'app', dest_dir/'app', symlinks=True) + copytree(remote_source.sources_path/'deps', dest_dir/'deps', symlinks=True) + + # Create cachito.env file with environment variables received from cachito request + self.generate_cachito_env_file(dest_dir, remote_source.build_args) + + return created_dirs + + def remote_source_to_output(self, remote_source: Cachi2RemoteSource) -> Dict[str, Any]: + """Convert a processed remote source to a dict to be used as output of this plugin.""" + + return { + "name": remote_source.name, + "remote_source_json": { + "json": remote_source.json_data, + "filename": Cachi2RemoteSource.json_filename(remote_source.name), + }, + "remote_source_json_env": { + "json": remote_source.json_env_data, + "filename": Cachi2RemoteSource.json_env_filename(remote_source.name), + }, + "remote_source_tarball": { + "filename": Cachi2RemoteSource.tarball_filename(remote_source.name), + "path": str(remote_source.tarball_path), + }, + } + + def generate_cachito_env_file(self, dest_dir: Path, build_args: Dict[str, str]) -> None: + """ + Generate cachito.env file with exported environment variables received from + cachito request. + + :param dest_dir: destination directory for env file + :param build_args: build arguments to set + """ + self.log.info('Creating %s file with environment variables ' + 'received from cachi2', CACHITO_ENV_FILENAME) + + # Use dedicated dir in container build workdir for cachito.env + abs_path = dest_dir / CACHITO_ENV_FILENAME + with open(abs_path, 'w') as f: + f.write('#!/bin/bash\n') + for env_var, value in build_args.items(): + f.write('export {}={}\n'.format(env_var, shlex.quote(value))) + + def add_general_buildargs(self) -> None: + """Adds general build arguments + + To copy the sources into the build image, Dockerfile should contain + COPY $REMOTE_SOURCE $REMOTE_SOURCE_DIR + or COPY $REMOTE_SOURCES $REMOTE_SOURCES_DIR + """ + if self.multiple_remote_sources_params: + args_for_dockerfile_to_add = { + 'REMOTE_SOURCES': self.REMOTE_SOURCE, + 'REMOTE_SOURCES_DIR': REMOTE_SOURCE_DIR, + } + else: + args_for_dockerfile_to_add = { + 'REMOTE_SOURCE': self.REMOTE_SOURCE, + 'REMOTE_SOURCE_DIR': REMOTE_SOURCE_DIR, + CACHITO_ENV_ARG_ALIAS: os.path.join(REMOTE_SOURCE_DIR, CACHITO_ENV_FILENAME), + } + self.workflow.data.buildargs.update(args_for_dockerfile_to_add) diff --git a/atomic_reactor/tasks/binary.py b/atomic_reactor/tasks/binary.py index 3b7e9b076..6289169a4 100644 --- a/atomic_reactor/tasks/binary.py +++ b/atomic_reactor/tasks/binary.py @@ -14,6 +14,7 @@ from atomic_reactor import inner from atomic_reactor.constants import ( PLUGIN_CACHI2_INIT, + PLUGIN_CACHI2_POSTPROCESS, DOCKERFILE_FILENAME, ) from atomic_reactor.tasks import plugin_based @@ -89,6 +90,15 @@ class BinaryCachi2InitTask(plugin_based.PluginBasedTask[TaskParams]): ] +class BinaryCachi2PostprocessTask(plugin_based.PluginBasedTask[TaskParams]): + """Binary container Cachi2 postprocess task.""" + + task_name = 'binary_container_cachi2_postprocess' + plugins_conf = [ + {"name": PLUGIN_CACHI2_POSTPROCESS}, + ] + + class BinaryPreBuildTask(plugin_based.PluginBasedTask[TaskParams]): """Binary container pre-build task.""" diff --git a/tekton/tasks/binary-container-cachi2.yaml b/tekton/tasks/binary-container-cachi2.yaml index 456cb4f5a..5b43d2b25 100644 --- a/tekton/tasks/binary-container-cachi2.yaml +++ b/tekton/tasks/binary-container-cachi2.yaml @@ -121,3 +121,23 @@ spec: # single SBOM is the final SBOM cp "${SBOMS[0]}" "${CACHI2_DIR}/bom.json" fi + - name: binary-container-cachi2-postprocess + image: $(params.osbs-image) + workingDir: $(workspaces.ws-home-dir.path) + resources: + requests: + memory: 512Mi + cpu: 250m + limits: + memory: 1Gi + cpu: 395m + script: | + set -x + atomic-reactor -v task \ + --user-params="$(params.user-params)" \ + --build-dir="$(workspaces.ws-build-dir.path)" \ + --context-dir="$(workspaces.ws-context-dir.path)" \ + --config-file="$(workspaces.ws-reactor-config-map.path)/config.yaml" \ + --namespace="$(context.taskRun.namespace)" \ + --pipeline-run-name="$(params.pipeline-run-name)" \ + binary-container-cachi2-postprocess diff --git a/tests/cli/test_parser.py b/tests/cli/test_parser.py index 7fe6eb0a5..8fd8ba0a7 100644 --- a/tests/cli/test_parser.py +++ b/tests/cli/test_parser.py @@ -91,6 +91,10 @@ def test_parse_args_version(capsys): ["task", *REQUIRED_COMMON_ARGS, "binary-container-cachi2-init"], {**EXPECTED_ARGS, "func": task.binary_container_cachi2_init}, ), + ( + ["task", *REQUIRED_COMMON_ARGS, "binary-container-cachi2-postprocess"], + {**EXPECTED_ARGS, "func": task.binary_container_cachi2_postprocess}, + ), ( ["task", *REQUIRED_COMMON_ARGS, "binary-container-prebuild"], {**EXPECTED_ARGS, "func": task.binary_container_prebuild}, diff --git a/tests/cli/test_task.py b/tests/cli/test_task.py index f26616331..69d750e13 100644 --- a/tests/cli/test_task.py +++ b/tests/cli/test_task.py @@ -73,6 +73,12 @@ def test_binary_container_cachi2_init(): mock(binary.BinaryCachi2InitTask, task_args=TASK_ARGS) assert task.binary_container_cachi2_init(TASK_ARGS) == TASK_RESULT + +def test_binary_container_cachi2_postprocess(): + mock(binary.BinaryCachi2PostprocessTask, task_args=TASK_ARGS) + assert task.binary_container_cachi2_postprocess(TASK_ARGS) == TASK_RESULT + + def test_binary_container_prebuild(): mock(binary.BinaryPreBuildTask, task_args=TASK_ARGS) assert task.binary_container_prebuild(TASK_ARGS) == TASK_RESULT diff --git a/tests/plugins/test_cachi2_postprocess.py b/tests/plugins/test_cachi2_postprocess.py new file mode 100644 index 000000000..662b7e409 --- /dev/null +++ b/tests/plugins/test_cachi2_postprocess.py @@ -0,0 +1,487 @@ +""" +Copyright (c) 2019-2022 Red Hat, Inc +All rights reserved. + +This software may be modified and distributed under the terms +of the BSD license. See the LICENSE file for details. +""" + +import json +import io +import tarfile +from collections import namedtuple +from pathlib import Path +from textwrap import dedent +from typing import Callable, Dict + +import pytest +import yaml + +from atomic_reactor.dirs import BuildDir +from atomic_reactor.inner import DockerBuildWorkflow +from atomic_reactor.constants import ( + CACHI2_BUILD_DIR, + CACHI2_BUILD_APP_DIR, + CACHI2_SINGLE_REMOTE_SOURCE_NAME, + CACHITO_ENV_ARG_ALIAS, + CACHITO_ENV_FILENAME, + REMOTE_SOURCE_DIR, + REMOTE_SOURCE_TARBALL_FILENAME, + REMOTE_SOURCE_JSON_FILENAME, + REMOTE_SOURCE_JSON_ENV_FILENAME, + PLUGIN_CACHI2_INIT, +) +from atomic_reactor.plugin import PluginFailedException +from atomic_reactor.plugins.cachi2_postprocess import ( + Cachi2PostprocessPlugin, + Cachi2RemoteSource, +) +from atomic_reactor.source import SourceConfig +from atomic_reactor.utils.cachi2 import generate_request_json + +from tests.mock_env import MockEnv +from tests.stubs import StubSource + + +FIRST_REMOTE_SOURCE_NAME = "first" +SECOND_REMOTE_SOURCE_NAME = "second" +REMOTE_SOURCE_REPO = 'https://git.example.com/team/repo.git' +REMOTE_SOURCE_REF = 'b55c00f45ec3dfee0c766cea3d395d6e21cc2e5a' +SECOND_REMOTE_SOURCE_REPO = 'https://git.example.com/other-team/other-repo.git' +SECOND_REMOTE_SOURCE_REF = 'd55c00f45ec3dfee0c766cea3d395d6e21cc2e5c' + + +RemoteSourceInitResult = namedtuple('RemoteSourceInitResult', ['result', 'env_vars', 'sbom']) + + +def mock_cachi2_init_and_run_plugin( + workflow, *args: RemoteSourceInitResult): + + plugin_result = [] + + for arg in args: + plugin_result.append(arg.result) + + source_root_path = Path(arg.result["source_path"]) + source_root_path.mkdir(parents=True) + + app_dir = source_root_path / CACHI2_BUILD_APP_DIR + app_dir.mkdir() + + name = arg.result["name"] or "single source" + with open(app_dir / "app.txt", "w") as f: + f.write(f"test app {name}") + f.flush() + + deps_dir = source_root_path / "deps" + deps_dir.mkdir() + with open(deps_dir / "dep.txt", "w") as f: + f.write(f"dependency for {name}") + f.flush() + + with open(source_root_path / "cachi2.env.json", "w") as f: + json.dump(arg.env_vars, f) + + with open(source_root_path / "bom.json", "w") as f: + json.dump(arg.sbom, f) + + mock_cachi2_output_tarball(source_root_path / "remote-source.tar.gz") + + workflow.data.plugins_results[PLUGIN_CACHI2_INIT] = plugin_result + + +def mock_reactor_config(workflow, data=None): + config = yaml.safe_load(data) + workflow.conf.conf = config + + +def mock_repo_config(workflow, data=None): + if data is None: + data = dedent("""\ + remote_source: + repo: {} + ref: {} + """.format(REMOTE_SOURCE_REPO, REMOTE_SOURCE_REF)) + + workflow._tmpdir.joinpath('container.yaml').write_text(data, "utf-8") + + # The repo config is read when SourceConfig is initialized. Force + # reloading here to make usage easier. + workflow.source.config = SourceConfig(str(workflow._tmpdir)) + + +@pytest.fixture +def workflow(workflow: DockerBuildWorkflow, source_dir): + # Stash the tmpdir in workflow so it can be used later + workflow._tmpdir = source_dir + + class MockSource(StubSource): + + def __init__(self, workdir): + super(MockSource, self).__init__() + self.workdir = workdir + self.path = workdir + + workflow.source = MockSource(str(source_dir)) + + mock_repo_config(workflow) + + workflow.build_dir.init_build_dirs(["x86_64", "ppc64le"], workflow.source) + + return workflow + + +def expected_build_dir(workflow) -> str: + """The primary build_dir that the plugin is expected to work with.""" + return str(workflow.build_dir.any_platform.path) + + +def mock_cachi2_output_tarball(create_at_path) -> str: + """Create a mocked tarball for a remote source at the specified path.""" + create_at_path = Path(create_at_path) + file_content = f"Content of {create_at_path.name}".encode("utf-8") + + readme = tarfile.TarInfo("app/app.txt") + readme.size = len(file_content) + + with tarfile.open(create_at_path, 'w:gz') as tar: + tar.addfile(readme, io.BytesIO(file_content)) + + return str(create_at_path) + + +def check_injected_files(expected_files: Dict[str, str]) -> Callable[[BuildDir], None]: + """Make a callable that checks expected files in a BuildDir.""" + + def check_files(build_dir: BuildDir) -> None: + """Check the presence and content of files in the unpacked_remote_sources directory.""" + unpacked_remote_sources = build_dir.path / Cachi2PostprocessPlugin.REMOTE_SOURCE + + for path, expected_content in expected_files.items(): + abspath = unpacked_remote_sources / path + assert abspath.read_text() == expected_content + + return check_files + + +def test_skip_when_no_results_from_init(workflow): + """Plugin should skip if there are no results from cachi2_init plugin""" + assert run_plugin_with_args(workflow) is None + + +def test_resolve_remote_source_single(workflow): + + remote_source_sbom = { + "bomFormat": "CycloneDX", + "components": [ + { + "name": "bytes", + "purl": "pkg:golang/bytes?type=package", + "properties": [ + { + "name": "cachi2:found_by", + "value": "cachi2" + } + ], + "type": "library" + }, + ], + } + + remote_source_env_json = [ + { + "name": "GOCACHE", + "value": "/remote-source/deps/gomod", + }, + ] + + single_source = { + "name": None, + "source_path": str( + workflow.build_dir.path / CACHI2_BUILD_DIR / CACHI2_SINGLE_REMOTE_SOURCE_NAME), + "remote_source": { + "repo": REMOTE_SOURCE_REPO, + "ref": REMOTE_SOURCE_REF, + } + } + + mock_cachi2_init_and_run_plugin( + workflow, + RemoteSourceInitResult( + single_source, remote_source_env_json, remote_source_sbom + ) + ) + expected_plugin_results = [ + { + "name": None, + "remote_source_json": { + "json": generate_request_json( + single_source["remote_source"], remote_source_sbom, + remote_source_env_json), + "filename": REMOTE_SOURCE_JSON_FILENAME, + }, + "remote_source_json_env": { + "json": remote_source_env_json, + "filename": REMOTE_SOURCE_JSON_ENV_FILENAME, + }, + "remote_source_tarball": { + "filename": REMOTE_SOURCE_TARBALL_FILENAME, + "path": str(Path(single_source["source_path"]) / "remote-source.tar.gz"), + }, + }, + ] + + run_plugin_with_args( + workflow, + expected_plugin_results=expected_plugin_results, + ) + + cachito_env_content = dedent( + """\ + #!/bin/bash + export GOCACHE=/remote-source/deps/gomod + """ + ) + + workflow.build_dir.for_each_platform( + check_injected_files( + { + "cachito.env": cachito_env_content, + "app/app.txt": "test app single source", + "deps/dep.txt": "dependency for single source", + }, + ) + ) + + assert workflow.data.buildargs == { + "GOCACHE": "/remote-source/deps/gomod", + "REMOTE_SOURCE": Cachi2PostprocessPlugin.REMOTE_SOURCE, + "REMOTE_SOURCE_DIR": REMOTE_SOURCE_DIR, + CACHITO_ENV_ARG_ALIAS: str(Path(REMOTE_SOURCE_DIR, CACHITO_ENV_FILENAME)), + } + + +def test_multiple_remote_sources(workflow): + + container_yaml_config = dedent( + f"""\ + remote_sources: + - name: {FIRST_REMOTE_SOURCE_NAME} + remote_source: + repo: {REMOTE_SOURCE_REPO} + ref: {REMOTE_SOURCE_REF} + - name: {SECOND_REMOTE_SOURCE_NAME} + remote_source: + repo: {REMOTE_SOURCE_REPO} + ref: {REMOTE_SOURCE_REF} + """ + ) + + reactor_config = dedent("""\ + version: 1 + allow_multiple_remote_sources: true + """) + + first_remote_source_sbom = { + "bomFormat": "CycloneDX", + "components": [ + { + "name": "bytes", + "purl": "pkg:golang/bytes?type=package", + "properties": [ + { + "name": "cachi2:found_by", + "value": "cachi2" + } + ], + "type": "library" + }, + ], + } + + second_remote_source_sbom = { + "bomFormat": "CycloneDX", + "components": [ + { + "name": "bytes", + "purl": "pkg:pip/bytes?type=package", + "properties": [ + { + "name": "cachi2:found_by", + "value": "cachi2" + } + ], + "type": "library" + }, + ], + } + + first_remote_source_env_json = [ + { + "name": "GOCACHE", + "value": "/remote-source/deps/gomod", + }, + ] + + second_remote_source_env_json = [ + { + "name": "PIP_INDEX", + "value": "/remote-source/deps/somewhere-here", + }, + ] + + first_source = { + "name": FIRST_REMOTE_SOURCE_NAME, + "source_path": str(workflow.build_dir.path / CACHI2_BUILD_DIR / FIRST_REMOTE_SOURCE_NAME), + "remote_source": { + "repo": REMOTE_SOURCE_REPO, + "ref": REMOTE_SOURCE_REF, + "pkg_managers": ["gomod"], + "flags": ["gomod-vendor"], + } + } + + second_source = { + "name": SECOND_REMOTE_SOURCE_NAME, + "source_path": str(workflow.build_dir.path / CACHI2_BUILD_DIR / SECOND_REMOTE_SOURCE_NAME), + "remote_source": { + "repo": SECOND_REMOTE_SOURCE_REPO, + "ref": SECOND_REMOTE_SOURCE_REF, + } + } + + mock_repo_config(workflow, data=container_yaml_config) + mock_reactor_config(workflow, reactor_config) + mock_cachi2_init_and_run_plugin( + workflow, + RemoteSourceInitResult( + first_source, first_remote_source_env_json, first_remote_source_sbom), + RemoteSourceInitResult( + second_source, second_remote_source_env_json, second_remote_source_sbom), + ) + expected_plugin_results = [ + { + "name": FIRST_REMOTE_SOURCE_NAME, + "remote_source_json": { + "json": generate_request_json( + first_source["remote_source"], first_remote_source_sbom, + first_remote_source_env_json), + "filename": "remote-source-first.json", + }, + "remote_source_json_env": { + "json": first_remote_source_env_json, + "filename": "remote-source-first.env.json", + }, + "remote_source_tarball": { + "filename": "remote-source-first.tar.gz", + "path": str(Path(first_source["source_path"]) / "remote-source.tar.gz"), + }, + }, + { + "name": SECOND_REMOTE_SOURCE_NAME, + "remote_source_json": { + "json": generate_request_json( + second_source["remote_source"], second_remote_source_sbom, + second_remote_source_env_json), + "filename": "remote-source-second.json", + }, + "remote_source_json_env": { + "json": second_remote_source_env_json, + "filename": "remote-source-second.env.json", + }, + "remote_source_tarball": { + "filename": "remote-source-second.tar.gz", + "path": str(Path(second_source["source_path"]) / "remote-source.tar.gz"), + }, + }, + ] + + run_plugin_with_args(workflow, expected_plugin_results=expected_plugin_results) + + first_cachito_env = dedent( + """\ + #!/bin/bash + export GOCACHE=/remote-source/deps/gomod + """ + ) + second_cachito_env = dedent( + """\ + #!/bin/bash + export PIP_INDEX=/remote-source/deps/somewhere-here + """ + ) + + workflow.build_dir.for_each_platform( + check_injected_files( + { + f"{FIRST_REMOTE_SOURCE_NAME}/cachito.env": first_cachito_env, + f"{FIRST_REMOTE_SOURCE_NAME}/app/app.txt": f"test app {FIRST_REMOTE_SOURCE_NAME}", + f"{FIRST_REMOTE_SOURCE_NAME}/deps/dep.txt": ( + f"dependency for {FIRST_REMOTE_SOURCE_NAME}"), + f"{SECOND_REMOTE_SOURCE_NAME}/cachito.env": second_cachito_env, + f"{SECOND_REMOTE_SOURCE_NAME}/app/app.txt": f"test app {SECOND_REMOTE_SOURCE_NAME}", + f"{SECOND_REMOTE_SOURCE_NAME}/deps/dep.txt": ( + f"dependency for {SECOND_REMOTE_SOURCE_NAME}"), + }, + ) + ) + + assert workflow.data.buildargs == { + "REMOTE_SOURCES": Cachi2PostprocessPlugin.REMOTE_SOURCE, + "REMOTE_SOURCES_DIR": REMOTE_SOURCE_DIR, + } + + +def run_plugin_with_args(workflow, expect_error=None, + expect_result=True, expected_plugin_results=None): + runner = (MockEnv(workflow) + .for_plugin(Cachi2PostprocessPlugin.key) + .create_runner()) + + if expect_error: + with pytest.raises(PluginFailedException, match=expect_error): + runner.run() + return + + results = runner.run()[Cachi2PostprocessPlugin.key] + + if expect_result: + assert results == expected_plugin_results + + return results + + +def test_inject_remote_sources_dest_already_exists(workflow): + plugin = Cachi2PostprocessPlugin(workflow) + + processed_remote_sources = [ + Cachi2RemoteSource( + name=None, + json_data={}, + json_env_data={}, + tarball_path=Path("/does/not/matter"), + sources_path="/" + ), + ] + + builddir_path = Path(expected_build_dir(workflow)) + builddir_path.joinpath(Cachi2PostprocessPlugin.REMOTE_SOURCE).mkdir() + + err_msg = "Conflicting path unpacked_remote_sources already exists" + with pytest.raises(RuntimeError, match=err_msg): + plugin.inject_remote_sources(processed_remote_sources) + + +def test_generate_cachito_env_file_shell_quoting(workflow): + plugin = Cachi2PostprocessPlugin(workflow) + + dest_dir = Path(expected_build_dir(workflow)) + plugin.generate_cachito_env_file(dest_dir, {"foo": "somefile; rm -rf ~"}) + + cachito_env = dest_dir / "cachito.env" + assert cachito_env.read_text() == dedent( + """\ + #!/bin/bash + export foo='somefile; rm -rf ~' + """ + )