diff --git a/.github/workflows/security-scan.yaml b/.github/workflows/security-scan.yaml
new file mode 100644
index 00000000..35b2bed9
--- /dev/null
+++ b/.github/workflows/security-scan.yaml
@@ -0,0 +1,15 @@
+name: Security scan
+on:
+ pull_request:
+ push:
+ branches:
+ - main
+ - hotfix/*
+ - work/secscan # For development
+
+jobs:
+ python-scans:
+ name: Scan Python project
+ uses: canonical/starflow/.github/workflows/scan-python.yaml@main
+ with:
+ packages: python-apt-dev
diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml
index a402e28b..ad9d5328 100644
--- a/.github/workflows/tests.yaml
+++ b/.github/workflows/tests.yaml
@@ -60,10 +60,6 @@ jobs:
3.10
3.12
cache: 'pip'
- - name: Setup LXD
- uses: canonical/setup-lxd@v0.1.1
- with:
- channel: latest/stable
- name: Configure environment
run: |
echo "::group::apt-get"
@@ -111,10 +107,12 @@ jobs:
cache: 'pip'
- name: Setup LXD
uses: canonical/setup-lxd@v0.1.1
- with:
- channel: latest/stable
- name: Configure environment
run: |
+ echo "::group::Begin snap install"
+ echo "Installing snaps in the background while running apt and pip..."
+ sudo snap install --no-wait --channel=beta fetch-service
+ echo "::endgroup::"
echo "::group::apt-get"
sudo apt update
sudo apt-get install -y libapt-pkg-dev
@@ -123,6 +121,9 @@ jobs:
python -m pip install tox
echo "::endgroup::"
mkdir -p results
+ echo "::group::Wait for snap to complete"
+ snap watch --last=install
+ echo "::endgroup::"
- name: Setup Tox environments
run: tox run -e integration-${{ matrix.python }} --notest
- name: Integration tests
diff --git a/craft_application/application.py b/craft_application/application.py
index 90900a01..8a51ab31 100644
--- a/craft_application/application.py
+++ b/craft_application/application.py
@@ -156,6 +156,11 @@ def __init__(
else:
self._work_dir = pathlib.Path.cwd()
+ # Whether the command execution should use the fetch-service
+ self._use_fetch_service = False
+ # The kind of sessions that the fetch-service service should create
+ self._fetch_service_policy = "strict"
+
@property
def app_config(self) -> dict[str, Any]:
"""Get the configuration passed to dispatcher.load_command().
@@ -238,6 +243,11 @@ def _configure_services(self, provider_name: str | None) -> None:
build_plan=self._build_plan,
provider_name=provider_name,
)
+ self.services.update_kwargs(
+ "fetch",
+ build_plan=self._build_plan,
+ session_policy=self._fetch_service_policy,
+ )
def _resolve_project_path(self, project_dir: pathlib.Path | None) -> pathlib.Path:
"""Find the project file for the current project.
@@ -368,8 +378,14 @@ def run_managed(self, platform: str | None, build_for: str | None) -> None:
instance_path = pathlib.PosixPath("/root/project")
with self.services.provider.instance(
- build_info, work_dir=self._work_dir
+ build_info,
+ work_dir=self._work_dir,
+ clean_existing=self._use_fetch_service,
) as instance:
+ if self._use_fetch_service:
+ session_env = self.services.fetch.create_session(instance)
+ env.update(session_env)
+
cmd = [self.app.name, *sys.argv[1:]]
craft_cli.emit.debug(
f"Executing {cmd} in instance location {instance_path} with {extra_args}."
@@ -387,6 +403,12 @@ def run_managed(self, platform: str | None, build_for: str | None) -> None:
raise craft_providers.ProviderError(
f"Failed to execute {self.app.name} in instance."
) from exc
+ finally:
+ if self._use_fetch_service:
+ self.services.fetch.teardown_session()
+
+ if self._use_fetch_service:
+ self.services.fetch.shutdown(force=True)
def configure(self, global_args: dict[str, Any]) -> None:
"""Configure the application using any global arguments."""
@@ -482,12 +504,14 @@ def _pre_run(self, dispatcher: craft_cli.Dispatcher) -> None:
At the time this is run, the command is loaded in the dispatcher, but
the project has not yet been loaded.
"""
+ args = dispatcher.parsed_args()
+
# Some commands might have a project_dir parameter. Those commands and
# only those commands should get a project directory, but only when
# not managed.
if self.is_managed():
self.project_dir = pathlib.Path("/root/project")
- elif project_dir := getattr(dispatcher.parsed_args(), "project_dir", None):
+ elif project_dir := getattr(args, "project_dir", None):
self.project_dir = pathlib.Path(project_dir).expanduser().resolve()
if self.project_dir.exists() and not self.project_dir.is_dir():
raise errors.ProjectFileMissingError(
@@ -496,6 +520,11 @@ def _pre_run(self, dispatcher: craft_cli.Dispatcher) -> None:
resolution="Ensure the path entered is correct.",
)
+ fetch_service_policy: str | None = getattr(args, "fetch_service_policy", None)
+ if fetch_service_policy:
+ self._use_fetch_service = True
+ self._fetch_service_policy = fetch_service_policy
+
def get_arg_or_config(
self, parsed_args: argparse.Namespace, item: str
) -> Any: # noqa: ANN401
@@ -682,6 +711,7 @@ def _expand_environment(self, yaml_data: dict[str, Any], build_for: str) -> None
application_name=self.app.name, # not used in environment expansion
cache_dir=pathlib.Path(), # not used in environment expansion
arch=build_for_arch,
+ parallel_build_count=util.get_parallel_build_count(self.app.name),
project_name=yaml_data.get("name", ""),
project_dirs=project_dirs,
project_vars=environment_vars,
diff --git a/craft_application/commands/lifecycle.py b/craft_application/commands/lifecycle.py
index 2d77285c..e1ec5a34 100644
--- a/craft_application/commands/lifecycle.py
+++ b/craft_application/commands/lifecycle.py
@@ -14,11 +14,12 @@
"""Basic lifecycle commands for a Craft Application."""
from __future__ import annotations
+import argparse
import os
import pathlib
import subprocess
import textwrap
-from typing import TYPE_CHECKING, Any
+from typing import Any
from craft_cli import CommandGroup, emit
from craft_parts.features import Features
@@ -26,9 +27,6 @@
from craft_application.commands import base
-if TYPE_CHECKING: # pragma: no cover
- import argparse
-
def get_lifecycle_command_group() -> CommandGroup:
"""Return the lifecycle related command group."""
@@ -355,6 +353,14 @@ def _fill_parser(self, parser: argparse.ArgumentParser) -> None:
help="Output directory for created packages.",
)
+ parser.add_argument(
+ "--use-fetch-service",
+ help=argparse.SUPPRESS,
+ choices=("strict", "permissive"),
+ metavar="policy",
+ dest="fetch_service_policy",
+ )
+
@override
def _run(
self,
@@ -392,6 +398,9 @@ def _run(
_launch_shell()
raise
+ if parsed_args.fetch_service_policy and packages:
+ self._services.fetch.create_project_manifest(packages)
+
if not packages:
emit.progress("No packages created.", permanent=True)
elif len(packages) == 1:
@@ -427,6 +436,12 @@ def _fill_parser(self, parser: argparse.ArgumentParser) -> None:
nargs="*",
help="Optional list of parts to process",
)
+ parser.add_argument(
+ "--platform",
+ type=str,
+ metavar="name",
+ help="Platform to clean",
+ )
@override
def _run(
diff --git a/craft_application/errors.py b/craft_application/errors.py
index cb209a18..0c4a74af 100644
--- a/craft_application/errors.py
+++ b/craft_application/errors.py
@@ -242,3 +242,7 @@ def __init__( # (too many arguments)
reportable=reportable,
retcode=retcode,
)
+
+
+class FetchServiceError(CraftError):
+ """Errors related to the fetch-service."""
diff --git a/craft_application/fetch.py b/craft_application/fetch.py
new file mode 100644
index 00000000..4f78472e
--- /dev/null
+++ b/craft_application/fetch.py
@@ -0,0 +1,523 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Utilities to interact with the fetch-service."""
+import contextlib
+import io
+import os
+import pathlib
+import shlex
+import signal
+import subprocess
+import time
+from dataclasses import dataclass
+from typing import Any, cast
+
+import craft_providers
+import requests
+from craft_cli import emit
+from pydantic import Field
+from requests.auth import HTTPBasicAuth
+
+from craft_application import errors, util
+from craft_application.models import CraftBaseModel
+from craft_application.util import retry
+
+
+@dataclass(frozen=True)
+class FetchServiceConfig:
+ """Dataclass for the ports that a fetch-service instance uses."""
+
+ proxy: int
+ """The proxy port, to be passed to the applications to be proxied."""
+ control: int
+ """The control port, to create/terminate sessions, get status, etc."""
+ username: str
+ """The username for auth."""
+ password: str
+ """The password for auth."""
+
+ @property
+ def auth(self) -> str:
+ """Authentication in user:passwd format."""
+ return f"{self.username}:{self.password}"
+
+
+_FETCH_BINARY = "/snap/bin/fetch-service"
+
+_DEFAULT_CONFIG = FetchServiceConfig(
+ proxy=13444,
+ control=13555,
+ username="craft",
+ password="craft", # noqa: S106 (hardcoded-password-func-arg)
+)
+
+# The path to the fetch-service's certificate inside the build instance.
+_FETCH_CERT_INSTANCE_PATH = pathlib.Path(
+ "/usr/local/share/ca-certificates/local-ca.crt"
+)
+
+
+class SessionData(CraftBaseModel):
+ """Fetch service session data."""
+
+ session_id: str = Field(alias="id")
+ token: str
+
+
+class NetInfo:
+ """Network and proxy info linking a fetch-service session and a build instance."""
+
+ def __init__(
+ self, instance: craft_providers.Executor, session_data: SessionData
+ ) -> None:
+ self._gateway = _get_gateway(instance)
+ self._session_data = session_data
+
+ @property
+ def http_proxy(self) -> str:
+ """Proxy string in the 'http://:@:/."""
+ session = self._session_data
+ port = _DEFAULT_CONFIG.proxy
+ gw = self._gateway
+ return f"http://{session.session_id}:{session.token}@{gw}:{port}/"
+
+ @property
+ def env(self) -> dict[str, str]:
+ """Environment variables to use for the proxy."""
+ return {
+ "http_proxy": self.http_proxy,
+ "https_proxy": self.http_proxy,
+ # This makes the requests lib take our cert into account.
+ "REQUESTS_CA_BUNDLE": str(_FETCH_CERT_INSTANCE_PATH),
+ # Same, but for cargo.
+ "CARGO_HTTP_CAINFO": str(_FETCH_CERT_INSTANCE_PATH),
+ # Have go download directly from repositories
+ "GOPROXY": "direct",
+ }
+
+
+def is_service_online() -> bool:
+ """Whether the fetch-service is up and listening."""
+ try:
+ status = get_service_status()
+ except errors.FetchServiceError:
+ return False
+ return "uptime" in status
+
+
+def get_service_status() -> dict[str, Any]:
+ """Get the JSON status of the fetch-service.
+
+ :raises errors.FetchServiceError: if a connection error happens.
+ """
+ response = _service_request("get", "status")
+ return cast(dict[str, Any], response.json())
+
+
+def start_service() -> subprocess.Popen[str] | None:
+ """Start the fetch-service with default ports and auth."""
+ if is_service_online():
+ # Nothing to do, service is already up.
+ return None
+
+ # Check that the fetch service is actually installed
+ if not _check_installed():
+ raise errors.FetchServiceError(
+ "The 'fetch-service' snap is not installed.",
+ resolution=(
+ "Install the fetch-service snap via "
+ "'snap install --channel=beta fetch-service'."
+ ),
+ )
+
+ cmd = [_FETCH_BINARY]
+
+ env = {"FETCH_SERVICE_AUTH": _DEFAULT_CONFIG.auth}
+
+ # Add the public key for the Ubuntu archives
+ archive_keyring = (
+ "/snap/fetch-service/current/usr/share/keyrings/ubuntu-archive-keyring.gpg"
+ )
+ archive_key_id = "F6ECB3762474EDA9D21B7022871920D1991BC93C"
+ archive_key = subprocess.check_output(
+ [
+ "gpg",
+ "--export",
+ "--armor",
+ "--no-default-keyring",
+ "--keyring",
+ archive_keyring,
+ archive_key_id,
+ ],
+ text=True,
+ )
+ env["FETCH_APT_RELEASE_PUBLIC_KEY"] = archive_key
+
+ # Add the ports
+ cmd.append(f"--control-port={_DEFAULT_CONFIG.control}")
+ cmd.append(f"--proxy-port={_DEFAULT_CONFIG.proxy}")
+
+ # Set config and spool directories
+ base_dir = _get_service_base_dir()
+
+ for dir_name in ("config", "spool"):
+ dir_path = base_dir / dir_name
+ dir_path.mkdir(exist_ok=True)
+ cmd.append(f"--{dir_name}={dir_path}")
+
+ cert, cert_key = _obtain_certificate()
+
+ cmd.append(f"--cert={cert}")
+ cmd.append(f"--key={cert_key}")
+
+ # Accept permissive sessions
+ cmd.append("--permissive-mode")
+
+ # Shutdown after 5 minutes with no live sessions
+ cmd.append("--idle-shutdown=300")
+
+ log_filepath = _get_log_filepath()
+ log_filepath.parent.mkdir(parents=True, exist_ok=True)
+
+ str_cmd = f"{shlex.join(cmd)} > {log_filepath.absolute()}"
+ emit.debug(f"Launching fetch-service with '{str_cmd}'")
+
+ fetch_process = subprocess.Popen(
+ ["bash", "-c", str_cmd],
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ # Start a new session because when killing the service we need to kill
+ # both 'bash' and the 'fetch' it spawns.
+ start_new_session=True,
+ )
+
+ # Wait a bit for the service to come online
+ with contextlib.suppress(subprocess.TimeoutExpired):
+ fetch_process.wait(0.1)
+
+ if fetch_process.poll() is not None:
+ # fetch-service already exited, something is wrong
+ log = log_filepath.read_text()
+ lines = log.splitlines()
+ error_lines = [line for line in lines if "ERROR:" in line]
+ error_text = "\n".join(error_lines)
+
+ if "bind: address already in use" in error_text:
+ proxy, control = _DEFAULT_CONFIG.proxy, _DEFAULT_CONFIG.control
+ message = f"fetch-service ports {proxy} and {control} are already in use."
+ details = None
+ else:
+ message = "Error spawning the fetch-service."
+ details = error_text
+ raise errors.FetchServiceError(message, details=details)
+
+ status = retry(
+ "wait for fetch-service to come online",
+ errors.FetchServiceError,
+ get_service_status, # pyright: ignore[reportArgumentType]
+ )
+ if "uptime" not in status:
+ stop_service(fetch_process)
+ raise errors.FetchServiceError(
+ f"Fetch service did not start correctly: {status}"
+ )
+
+ return fetch_process
+
+
+def stop_service(fetch_process: subprocess.Popen[str]) -> None:
+ """Stop the fetch-service.
+
+ This function first calls terminate(), and then kill() after a short time.
+ """
+ try:
+ os.killpg(os.getpgid(fetch_process.pid), signal.SIGTERM)
+ except ProcessLookupError:
+ return
+
+ # Give the shell and fetch-service a chance to terminate
+ time.sleep(0.2)
+
+ with contextlib.suppress(ProcessLookupError):
+ os.killpg(os.getpgid(fetch_process.pid), signal.SIGKILL)
+
+
+def create_session(*, strict: bool) -> SessionData:
+ """Create a new fetch-service session.
+
+ :param strict: Whether the created session should be strict.
+ :return: a SessionData object containing the session's id and token.
+ """
+ json = {"policy": "strict" if strict else "permissive"}
+ data = _service_request("post", "session", json=json).json()
+
+ return SessionData.unmarshal(data=data)
+
+
+def teardown_session(session_data: SessionData) -> dict[str, Any]:
+ """Stop and cleanup a running fetch-service session.
+
+ :param SessionData: the data of a previously-created session.
+ :return: A dict containing the session's report (the contents and format
+ of this dict are still subject to change).
+ """
+ session_id = session_data.session_id
+ session_token = session_data.token
+
+ # Revoke token
+ _revoke_data = _service_request(
+ "delete", f"session/{session_id}/token", json={"token": session_token}
+ ).json()
+
+ # Get session report
+ session_report = _service_request("get", f"session/{session_id}", json={}).json()
+
+ # Delete session
+ _service_request("delete", f"session/{session_id}")
+
+ # Delete session resources
+ _service_request("delete", f"resources/{session_id}")
+
+ return cast(dict[str, Any], session_report)
+
+
+def configure_instance(
+ instance: craft_providers.Executor, session_data: SessionData
+) -> dict[str, str]:
+ """Configure a build instance to use a given fetch-service session."""
+ net_info = NetInfo(instance, session_data)
+
+ _install_certificate(instance)
+ _configure_pip(instance)
+ _configure_snapd(instance, net_info)
+ _configure_apt(instance, net_info)
+
+ return net_info.env
+
+
+def _service_request(
+ verb: str, endpoint: str, json: dict[str, Any] | None = None
+) -> requests.Response:
+ headers = {
+ "Content-type": "application/json",
+ }
+ auth = HTTPBasicAuth(_DEFAULT_CONFIG.username, _DEFAULT_CONFIG.password)
+ try:
+ response = requests.request(
+ verb,
+ f"http://localhost:{_DEFAULT_CONFIG.control}/{endpoint}",
+ auth=auth,
+ headers=headers,
+ json=json, # Use defaults
+ timeout=0.1,
+ )
+ response.raise_for_status()
+ except requests.RequestException as err:
+ message = f"Error with fetch-service {verb.upper()}: {str(err)}"
+ raise errors.FetchServiceError(message)
+
+ return response
+
+
+def _get_service_base_dir() -> pathlib.Path:
+ """Get the base directory to contain the fetch-service's runtime files."""
+ input_line = "sh -c 'echo $SNAP_USER_COMMON'"
+ output = subprocess.check_output(
+ ["snap", "run", "--shell", "fetch-service"], text=True, input=input_line
+ )
+ return pathlib.Path(output.strip())
+
+
+def _install_certificate(instance: craft_providers.Executor) -> None:
+
+ # Push the local certificate
+ cert, _key = _obtain_certificate()
+ instance.push_file(
+ source=cert,
+ destination=_FETCH_CERT_INSTANCE_PATH,
+ )
+ # Update the certificates db
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["/bin/sh", "-c", "/usr/sbin/update-ca-certificates > /dev/null"],
+ check=True,
+ )
+
+
+def _configure_pip(instance: craft_providers.Executor) -> None:
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["mkdir", "-p", "/root/.pip"]
+ )
+ pip_config = b"[global]\ncert=/usr/local/share/ca-certificates/local-ca.crt"
+ instance.push_file_io(
+ destination=pathlib.Path("/root/.pip/pip.conf"),
+ content=io.BytesIO(pip_config),
+ file_mode="0644",
+ )
+
+
+def _configure_snapd(instance: craft_providers.Executor, net_info: NetInfo) -> None:
+ """Configure snapd to use the proxy and see our certificate.
+
+ Note: This *must* be called *after* _install_certificate(), to ensure that
+ when the snapd restart happens the new cert is there.
+ """
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["systemctl", "restart", "snapd"]
+ )
+ for config in ("proxy.http", "proxy.https"):
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["snap", "set", "system", f"{config}={net_info.http_proxy}"]
+ )
+
+
+def _configure_apt(instance: craft_providers.Executor, net_info: NetInfo) -> None:
+ apt_config = f'Acquire::http::Proxy "{net_info.http_proxy}";\n'
+ apt_config += f'Acquire::https::Proxy "{net_info.http_proxy}";\n'
+
+ instance.push_file_io(
+ destination=pathlib.Path("/etc/apt/apt.conf.d/99proxy"),
+ content=io.BytesIO(apt_config.encode("utf-8")),
+ file_mode="0644",
+ )
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["/bin/rm", "-Rf", "/var/lib/apt/lists"],
+ check=True,
+ )
+ env = cast(dict[str, str | None], net_info.env)
+ with emit.open_stream() as fd:
+ instance.execute_run( # pyright: ignore[reportUnknownMemberType]
+ ["apt", "update"], env=env, check=True, stdout=fd, stderr=fd
+ )
+
+
+def _get_gateway(instance: craft_providers.Executor) -> str:
+ from craft_providers.lxd import LXDInstance
+
+ if not isinstance(instance, LXDInstance):
+ raise TypeError("Don't know how to handle non-lxd instances")
+
+ instance_name = instance.instance_name
+ project = instance.project
+ output = subprocess.check_output(
+ ["lxc", "--project", project, "config", "show", instance_name, "--expanded"],
+ text=True,
+ )
+ config = util.safe_yaml_load(io.StringIO(output))
+ network = config["devices"]["eth0"]["network"]
+
+ route = subprocess.check_output(
+ ["ip", "route", "show", "dev", network],
+ text=True,
+ )
+ return route.strip().split()[-1]
+
+
+def _obtain_certificate() -> tuple[pathlib.Path, pathlib.Path]:
+ """Retrieve, possibly creating, the certificate and key for the fetch service.
+
+ :return: The full paths to the self-signed certificate and its private key.
+ """
+ cert_dir = _get_certificate_dir()
+
+ cert_dir.mkdir(parents=True, exist_ok=True)
+
+ cert = cert_dir / "local-ca.pem"
+ key = cert_dir / "local-ca.key.pem"
+
+ if cert.is_file() and key.is_file():
+ # Certificate and key already generated
+ # TODO check that the certificate hasn't expired
+ return cert, key
+
+ # At least one is missing, regenerate both
+ key_tmp = cert_dir / "key-tmp.pem"
+ cert_tmp = cert_dir / "cert-tmp.pem"
+
+ # Create the key
+ subprocess.run(
+ [
+ "openssl",
+ "genrsa",
+ "-aes256",
+ "-passout",
+ "pass:1",
+ "-out",
+ key_tmp,
+ "4096",
+ ],
+ check=True,
+ )
+
+ subprocess.run(
+ [
+ "openssl",
+ "rsa",
+ "-passin",
+ "pass:1",
+ "-in",
+ key_tmp,
+ "-out",
+ key_tmp,
+ ],
+ check=True,
+ )
+
+ # Create a certificate with the key
+ subprocess.run(
+ [
+ "openssl",
+ "req",
+ "-subj",
+ "/CN=root@localhost",
+ "-key",
+ key_tmp,
+ "-new",
+ "-x509",
+ "-days",
+ "7300",
+ "-sha256",
+ "-extensions",
+ "v3_ca",
+ "-out",
+ cert_tmp,
+ ],
+ check=True,
+ )
+
+ cert_tmp.rename(cert)
+ key_tmp.rename(key)
+
+ return cert, key
+
+
+def _get_certificate_dir() -> pathlib.Path:
+ """Get the location that should contain the fetch-service certificate and key."""
+ base_dir = _get_service_base_dir()
+
+ return base_dir / "craft/fetch-certificate"
+
+
+def _check_installed() -> bool:
+ """Check whether the fetch-service is installed."""
+ return pathlib.Path(_FETCH_BINARY).is_file()
+
+
+def _get_log_filepath() -> pathlib.Path:
+ base_dir = _get_service_base_dir()
+
+ return base_dir / "craft/fetch-log.txt"
diff --git a/craft_application/models/manifest.py b/craft_application/models/manifest.py
new file mode 100644
index 00000000..078f96c1
--- /dev/null
+++ b/craft_application/models/manifest.py
@@ -0,0 +1,160 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Models representing manifests for projects and fetch-service assets."""
+import hashlib
+import pathlib
+from datetime import datetime, timezone
+from typing import Any, Literal
+
+from pydantic import Field
+from typing_extensions import Self, override
+
+from craft_application import models
+from craft_application.models import CraftBaseModel
+
+
+class Hashes(CraftBaseModel):
+ """Digests identifying an artifact/asset."""
+
+ sha1: str
+ sha256: str
+
+ @classmethod
+ def from_path(cls, path: pathlib.Path) -> Self:
+ """Compute digests for a given path."""
+ read_bytes = path.read_bytes()
+
+ return cls(
+ sha1=hashlib.sha1( # noqa: S324 (insecure hash function)
+ read_bytes
+ ).hexdigest(),
+ sha256=hashlib.sha256(read_bytes).hexdigest(),
+ )
+
+
+class ComponentID(CraftBaseModel):
+ """Unique identifications for an artifact/asset."""
+
+ hashes: Hashes
+
+
+class BaseManifestModel(CraftBaseModel):
+ """Common properties shared between project and fetch-service manifests."""
+
+ component_name: str
+ component_version: str
+ component_description: str
+ component_id: ComponentID
+ architecture: str
+
+
+class ProjectManifest(BaseManifestModel):
+ """Model for the project-specific properties of the craft manifest."""
+
+ license: str | None = None
+ comment: str | None = None
+ metadata_generator: Literal["Craft Application"] = "Craft Application"
+ creation_timestamp: str
+
+ @override
+ def marshal(self) -> dict[str, str | list[str] | dict[str, Any]]:
+ """Overridden to include the metadata_generator constant field."""
+ return self.model_dump(
+ mode="json",
+ by_alias=True,
+ exclude_none=True,
+ exclude_defaults=False, # to include 'metadata_generator'
+ )
+
+ @classmethod
+ def from_packed_artifact(
+ cls,
+ project: models.Project,
+ build_info: models.BuildInfo,
+ artifact: pathlib.Path,
+ ) -> Self:
+ """Create the project manifest for a packed artifact."""
+ hashes = Hashes.from_path(artifact)
+
+ now = datetime.now(timezone.utc)
+
+ return cls.unmarshal(
+ {
+ "component-name": project.name,
+ "component-version": project.version,
+ "component-description": project.summary,
+ "component-id": {"hashes": hashes.marshal()},
+ "architecture": build_info.build_for,
+ "license": project.license,
+ "creation_timestamp": now.isoformat(),
+ }
+ )
+
+
+class SessionArtifactManifest(BaseManifestModel):
+ """Model for an artifact downloaded during the fetch-service session."""
+
+ component_type: str = Field(alias="type")
+ component_author: str
+ component_vendor: str
+ size: int
+ url: list[str]
+
+ @classmethod
+ def from_session_report(cls, report: dict[str, Any]) -> list[Self]:
+ """Create session manifests from a fetch-session report."""
+ artifacts: list[Self] = []
+ for artifact in report["artefacts"]:
+ metadata = artifact["metadata"]
+ data = {
+ "type": metadata["type"],
+ "component-name": metadata["name"],
+ "component-version": metadata["version"],
+ "component-description": metadata["description"],
+ # "architecture" is only present on the metadata if applicable.
+ "architecture": metadata.get("architecture", ""),
+ "component-id": {
+ "hashes": {"sha1": metadata["sha1"], "sha256": metadata["sha256"]}
+ },
+ "component-author": metadata["author"],
+ "component-vendor": metadata["vendor"],
+ "size": metadata["size"],
+ "url": [d["url"] for d in artifact["downloads"]],
+ }
+ artifacts.append(cls.unmarshal(data))
+
+ return artifacts
+
+
+class CraftManifest(ProjectManifest):
+ """Full manifest for a generated artifact.
+
+ Includes project metadata and information on assets downloaded through a
+ fetch-service session.
+ """
+
+ dependencies: list[SessionArtifactManifest]
+
+ @classmethod
+ def create_craft_manifest(
+ cls, project_manifest_path: pathlib.Path, session_report: dict[str, Any]
+ ) -> Self:
+ """Create the full Craft manifest from a project and session report."""
+ project = ProjectManifest.from_yaml_file(project_manifest_path)
+ session_deps = SessionArtifactManifest.from_session_report(session_report)
+
+ data = {**project.marshal(), "dependencies": session_deps}
+ return cls.model_validate(data)
diff --git a/craft_application/models/project.py b/craft_application/models/project.py
index 81f1acfe..b7b92286 100644
--- a/craft_application/models/project.py
+++ b/craft_application/models/project.py
@@ -19,15 +19,18 @@
"""
import abc
import dataclasses
+import warnings
from collections.abc import Mapping
-from typing import Annotated, Any
+from typing import Annotated, Any, cast
import craft_parts
+import craft_platforms
import craft_providers.bases
import pydantic
from craft_cli import emit
from craft_providers import bases
from craft_providers.errors import BaseConfigurationError
+from typing_extensions import Self
from craft_application import errors
from craft_application.models import base
@@ -86,6 +89,30 @@ class BuildInfo:
base: craft_providers.bases.BaseName
"""The base to build on."""
+ def __post_init__(self) -> None:
+ warnings.warn(
+ "BuildInfo is pending deprecation and will be replaced with craft_platforms.BuildInfo.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ @classmethod
+ def from_platforms(cls, info: craft_platforms.BuildInfo) -> Self:
+ """Convert a craft-platforms BuildInfo to a craft-application BuildInfo."""
+ build_for = (
+ "all"
+ if info.build_for == "all"
+ else craft_platforms.DebianArchitecture(info.build_for)
+ )
+ return cls(
+ platform=info.platform,
+ build_on=craft_platforms.DebianArchitecture(info.build_on),
+ build_for=build_for,
+ base=craft_providers.bases.BaseName(
+ name=info.build_base.distribution, version=info.build_base.series
+ ),
+ )
+
class Platform(base.CraftBaseModel):
"""Project platform definition."""
@@ -127,6 +154,18 @@ def _validate_platform_set(
return values
+ @classmethod
+ def from_platforms(cls, platforms: craft_platforms.Platforms) -> dict[str, Self]:
+ """Create a dictionary ofthese objects from craft_platforms PlatformDicts."""
+ result: dict[str, Self] = {}
+ for key, value in platforms.items():
+ name = str(key)
+ platform = (
+ {"build-on": [name], "build-for": [name]} if value is None else value
+ )
+ result[name] = cls.model_validate(platform)
+ return result
+
def _populate_platforms(platforms: dict[str, Any]) -> dict[str, Any]:
"""Populate empty platform entries.
@@ -160,6 +199,15 @@ class BuildPlanner(base.CraftBaseModel, metaclass=abc.ABCMeta):
base: str | None = None
build_base: str | None = None
+ @pydantic.model_validator(mode="after")
+ def _warn_deprecation(self) -> Self:
+ warnings.warn(
+ "The craft-application BuildPlanner is pending deprecation in favour of functions that create build plans in craft-platforms.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ return self
+
@pydantic.field_validator("platforms", mode="before")
@classmethod
def _populate_platforms(cls, platforms: dict[str, Any]) -> dict[str, Any]:
@@ -214,21 +262,22 @@ def effective_base(self) -> bases.BaseName:
def get_build_plan(self) -> list[BuildInfo]:
"""Obtain the list of architectures and bases from the Project."""
- build_infos: list[BuildInfo] = []
-
- for platform_label, platform in self.platforms.items():
- for build_for in platform.build_for or [platform_label]:
- for build_on in platform.build_on or [platform_label]:
- build_infos.append(
- BuildInfo(
- platform=platform_label,
- build_on=build_on,
- build_for=build_for,
- base=self.effective_base,
- )
- )
-
- return build_infos
+ effective_base = self.effective_base
+ base = craft_platforms.DistroBase(
+ distribution=effective_base.name, series=effective_base.version
+ )
+ platforms = cast(
+ craft_platforms.Platforms,
+ {key: value.marshal() for key, value in self.platforms.items()},
+ )
+
+ return [
+ BuildInfo.from_platforms(info)
+ for info in craft_platforms.get_platforms_build_plan(
+ base=base,
+ platforms=platforms,
+ )
+ ]
def _validate_package_repository(repository: dict[str, Any]) -> dict[str, Any]:
diff --git a/craft_application/services/__init__.py b/craft_application/services/__init__.py
index 76d6704c..accbd923 100644
--- a/craft_application/services/__init__.py
+++ b/craft_application/services/__init__.py
@@ -17,6 +17,7 @@
from craft_application.services.base import AppService, ProjectService
from craft_application.services.config import ConfigService
+from craft_application.services.fetch import FetchService
from craft_application.services.lifecycle import LifecycleService
from craft_application.services.package import PackageService
from craft_application.services.provider import ProviderService
@@ -26,6 +27,7 @@
__all__ = [
"AppService",
+ "FetchService",
"ProjectService",
"ConfigService",
"LifecycleService",
diff --git a/craft_application/services/fetch.py b/craft_application/services/fetch.py
new file mode 100644
index 00000000..6e4d492a
--- /dev/null
+++ b/craft_application/services/fetch.py
@@ -0,0 +1,175 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Service class to communicate with the fetch-service."""
+from __future__ import annotations
+
+import json
+import pathlib
+import subprocess
+import typing
+
+import craft_providers
+from craft_cli import emit
+from typing_extensions import override
+
+from craft_application import fetch, models, services
+from craft_application.models.manifest import CraftManifest, ProjectManifest
+
+if typing.TYPE_CHECKING:
+ from craft_application.application import AppMetadata
+
+
+_PROJECT_MANIFEST_MANAGED_PATH = pathlib.Path(
+ "/tmp/craft-project-manifest.yaml" # noqa: S108 (possibly insecure)
+)
+
+
+class FetchService(services.ProjectService):
+ """Service class that handles communication with the fetch-service.
+
+ This Service is able to spawn a fetch-service instance and create sessions
+ to be used in managed runs. The general usage flow is this:
+
+ - Initialise a fetch-service via setup() (done automatically by the service
+ factory);
+ - For each managed execution:
+ - Create a new session with create_session(), passing the new managed
+ instance;
+ - Teardown/close the session with teardown_session();
+ - Stop the fetch-service via shutdown().
+ """
+
+ _fetch_process: subprocess.Popen[str] | None
+ _session_data: fetch.SessionData | None
+ _instance: craft_providers.Executor | None
+
+ def __init__(
+ self,
+ app: AppMetadata,
+ services: services.ServiceFactory,
+ *,
+ project: models.Project,
+ build_plan: list[models.BuildInfo],
+ session_policy: str,
+ ) -> None:
+ """Create a new FetchService.
+
+ :param session_policy: Whether the created fetch-service sessions should
+ be "strict" or "permissive".
+ """
+ super().__init__(app, services, project=project)
+ self._fetch_process = None
+ self._session_data = None
+ self._build_plan = build_plan
+ self._session_policy = session_policy
+ self._instance = None
+
+ @override
+ def setup(self) -> None:
+ """Start the fetch-service process with proper arguments."""
+ super().setup()
+
+ if not self._services.ProviderClass.is_managed():
+ # Emit a warning, but only on the host-side.
+ emit.message(
+ "Warning: the fetch-service integration is experimental "
+ "and still in development."
+ )
+
+ self._fetch_process = fetch.start_service()
+
+ def create_session(self, instance: craft_providers.Executor) -> dict[str, str]:
+ """Create a new session.
+
+ :return: The environment variables that must be used by any process
+ that will use the new session.
+ """
+ if self._session_data is not None:
+ raise ValueError(
+ "create_session() called but there's already a live fetch-service session."
+ )
+
+ strict_session = self._session_policy == "strict"
+ self._session_data = fetch.create_session(strict=strict_session)
+ self._instance = instance
+ return fetch.configure_instance(instance, self._session_data)
+
+ def teardown_session(self) -> dict[str, typing.Any]:
+ """Teardown and cleanup a previously-created session."""
+ if self._session_data is None or self._instance is None:
+ raise ValueError(
+ "teardown_session() called with no live fetch-service session."
+ )
+ report = fetch.teardown_session(self._session_data)
+
+ instance = self._instance
+ instance_path = _PROJECT_MANIFEST_MANAGED_PATH
+ with instance.temporarily_pull_file(source=instance_path, missing_ok=True) as f:
+ if f is not None:
+ # Project manifest was generated; we can create the full manifest
+ self._create_craft_manifest(f, report)
+ else:
+ emit.debug("Project manifest file missing in managed instance.")
+
+ self._session_data = None
+ self._instance = None
+
+ return report
+
+ def shutdown(self, *, force: bool = False) -> None:
+ """Stop the fetch-service.
+
+ The default behavior is a no-op; the Application never shuts down the
+ fetch-service so that it stays up and ready to serve other craft
+ applications.
+
+ :param force: Whether the fetch-service should be, in fact, stopped.
+ """
+ if force and self._fetch_process:
+ fetch.stop_service(self._fetch_process)
+
+ def create_project_manifest(self, artifacts: list[pathlib.Path]) -> None:
+ """Create the project manifest for the artifact in ``artifacts``.
+
+ Only supports a single generated artifact, and only in managed runs.
+ """
+ if not self._services.ProviderClass.is_managed():
+ emit.debug("Unable to generate the project manifest on the host.")
+ return
+
+ emit.debug(f"Generating project manifest at {_PROJECT_MANIFEST_MANAGED_PATH}")
+ project_manifest = ProjectManifest.from_packed_artifact(
+ self._project, self._build_plan[0], artifacts[0]
+ )
+ project_manifest.to_yaml_file(_PROJECT_MANIFEST_MANAGED_PATH)
+
+ def _create_craft_manifest(
+ self, project_manifest: pathlib.Path, session_report: dict[str, typing.Any]
+ ) -> None:
+ name = self._project.name
+ version = self._project.version
+ platform = self._build_plan[0].platform
+
+ manifest_path = pathlib.Path(f"{name}_{version}_{platform}.json")
+ emit.debug(f"Generating craft manifest at {manifest_path}")
+
+ craft_manifest = CraftManifest.create_craft_manifest(
+ project_manifest, session_report
+ )
+ data = craft_manifest.marshal()
+
+ with manifest_path.open("w") as f:
+ json.dump(data, f, ensure_ascii=False, indent=2)
diff --git a/craft_application/services/lifecycle.py b/craft_application/services/lifecycle.py
index 6e959b0e..50024c1c 100644
--- a/craft_application/services/lifecycle.py
+++ b/craft_application/services/lifecycle.py
@@ -17,7 +17,6 @@
from __future__ import annotations
import contextlib
-import os
import types
from typing import TYPE_CHECKING, Any
@@ -165,7 +164,7 @@ def _get_build_for(self) -> str:
# something else like clean() is called).
# We also use the host arch if the build-for is 'all'
if self._build_plan and self._build_plan[0].build_for != "all":
- return self._build_plan[0].build_for
+ return str(self._build_plan[0].build_for)
return util.get_host_architecture()
def _init_lifecycle_manager(self) -> LifecycleManager:
@@ -200,7 +199,7 @@ def _init_lifecycle_manager(self) -> LifecycleManager:
cache_dir=self._cache_dir,
work_dir=self._work_dir,
ignore_local_sources=self._app.source_ignore_patterns,
- parallel_build_count=self._get_parallel_build_count(),
+ parallel_build_count=util.get_parallel_build_count(self._app.name),
project_vars_part_name=self._project.adopt_info,
project_vars=self._project_vars,
track_stage_packages=True,
@@ -324,92 +323,6 @@ def __repr__(self) -> str:
f"{work_dir=}, {cache_dir=}, {plan=}, **{self._manager_kwargs!r})"
)
- def _verify_parallel_build_count(
- self, env_name: str, parallel_build_count: int | str
- ) -> int:
- """Verify the parallel build count is valid.
-
- :param env_name: The name of the environment variable being checked.
- :param parallel_build_count: The value of the variable.
- :return: The parallel build count as an integer.
- """
- try:
- parallel_build_count = int(parallel_build_count)
- except ValueError as err:
- raise errors.InvalidParameterError(
- env_name, str(os.environ[env_name])
- ) from err
-
- # Ensure the value is valid positive integer
- if parallel_build_count < 1:
- raise errors.InvalidParameterError(env_name, str(parallel_build_count))
-
- return parallel_build_count
-
- def _get_parallel_build_count(self) -> int:
- """Get the number of parallel builds to run.
-
- The parallel build count is determined by the first available of the
- following environment variables in the order:
-
- - _PARALLEL_BUILD_COUNT
- - CRAFT_PARALLEL_BUILD_COUNT
- - _MAX_PARALLEL_BUILD_COUNT
- - CRAFT_MAX_PARALLEL_BUILD_COUNT
-
- where the MAX_PARALLEL_BUILD_COUNT variables are dynamically compared to
- the number of CPUs, and the smaller of the two is used.
-
- If no environment variable is set, the CPU count is used.
- If the CPU count is not available for some reason, 1 is used as a fallback.
- """
- parallel_build_count = None
-
- # fixed parallel build count environment variable
- for env_name in [
- (self._app.name + "_PARALLEL_BUILD_COUNT").upper(),
- "CRAFT_PARALLEL_BUILD_COUNT",
- ]:
- if os.environ.get(env_name):
- parallel_build_count = self._verify_parallel_build_count(
- env_name, os.environ[env_name]
- )
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- f"from environment variable {env_name!r}"
- )
- break
-
- # CPU count related max parallel build count environment variable
- if parallel_build_count is None:
- cpu_count = os.cpu_count() or 1
- for env_name in [
- (self._app.name + "_MAX_PARALLEL_BUILD_COUNT").upper(),
- "CRAFT_MAX_PARALLEL_BUILD_COUNT",
- ]:
- if os.environ.get(env_name):
- parallel_build_count = min(
- cpu_count,
- self._verify_parallel_build_count(
- env_name, os.environ[env_name]
- ),
- )
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- f"from environment variable {env_name!r}"
- )
- break
-
- # Default to CPU count if no max environment variable is set
- if parallel_build_count is None:
- parallel_build_count = cpu_count
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- "from CPU count"
- )
-
- return parallel_build_count
-
def _get_local_keys_path(self) -> Path | None:
"""Return a directory with public keys for package-repositories.
diff --git a/craft_application/services/provider.py b/craft_application/services/provider.py
index c746500c..bdbc8417 100644
--- a/craft_application/services/provider.py
+++ b/craft_application/services/provider.py
@@ -116,6 +116,7 @@ def instance(
*,
work_dir: pathlib.Path,
allow_unstable: bool = True,
+ clean_existing: bool = False,
**kwargs: bool | str | None,
) -> Generator[craft_providers.Executor, None, None]:
"""Context manager for getting a provider instance.
@@ -123,6 +124,8 @@ def instance(
:param build_info: Build information for the instance.
:param work_dir: Local path to mount inside the provider instance.
:param allow_unstable: Whether to allow the use of unstable images.
+ :param clean_existing: Whether pre-existing instances should be wiped
+ and re-created.
:returns: a context manager of the provider instance.
"""
instance_name = self._get_instance_name(work_dir, build_info)
@@ -133,6 +136,9 @@ def instance(
provider.ensure_provider_is_available()
+ if clean_existing:
+ self._clean_instance(provider, work_dir, build_info)
+
emit.progress(f"Launching managed {base_name[0]} {base_name[1]} instance...")
with provider.launched_environment(
project_name=self._project.name,
@@ -267,9 +273,7 @@ def clean_instances(self) -> None:
emit.progress(f"Cleaning build {target}")
for info in build_plan:
- instance_name = self._get_instance_name(self._work_dir, info)
- emit.debug(f"Cleaning instance {instance_name}")
- provider.clean_project_environments(instance_name=instance_name)
+ self._clean_instance(provider, self._work_dir, info)
def _get_instance_name(
self, work_dir: pathlib.Path, build_info: models.BuildInfo
@@ -332,3 +336,14 @@ def _setup_instance_bashrc(self, instance: craft_providers.Executor) -> None:
content=io.BytesIO(bashrc),
file_mode="644",
)
+
+ def _clean_instance(
+ self,
+ provider: craft_providers.Provider,
+ work_dir: pathlib.Path,
+ info: models.BuildInfo,
+ ) -> None:
+ """Clean an instance, if it exists."""
+ instance_name = self._get_instance_name(work_dir, info)
+ emit.debug(f"Cleaning instance {instance_name}")
+ provider.clean_project_environments(instance_name=instance_name)
diff --git a/craft_application/services/service_factory.py b/craft_application/services/service_factory.py
index 9d01a9da..d7c3cf4f 100644
--- a/craft_application/services/service_factory.py
+++ b/craft_application/services/service_factory.py
@@ -43,6 +43,7 @@ class ServiceFactory:
RemoteBuildClass: type[services.RemoteBuildService] = services.RemoteBuildService
RequestClass: type[services.RequestService] = services.RequestService
ConfigClass: type[services.ConfigService] = services.ConfigService
+ FetchClass: type[services.FetchService] = services.FetchService
project: models.Project | None = None
@@ -55,6 +56,7 @@ class ServiceFactory:
remote_build: services.RemoteBuildService = None # type: ignore[assignment]
request: services.RequestService = None # type: ignore[assignment]
config: services.ConfigService = None # type: ignore[assignment]
+ fetch: services.FetchService = None # type: ignore[assignment]
def __post_init__(self) -> None:
self._service_kwargs: dict[str, dict[str, Any]] = {}
diff --git a/craft_application/util/__init__.py b/craft_application/util/__init__.py
index 0330165a..6bd33ead 100644
--- a/craft_application/util/__init__.py
+++ b/craft_application/util/__init__.py
@@ -32,6 +32,7 @@
is_running_from_snap,
)
from craft_application.util.string import humanize_list, strtobool
+from craft_application.util.system import get_parallel_build_count
from craft_application.util.yaml import dump_yaml, safe_yaml_load
__all__ = [
@@ -52,4 +53,5 @@
"dump_yaml",
"safe_yaml_load",
"retry",
+ "get_parallel_build_count",
]
diff --git a/craft_application/util/system.py b/craft_application/util/system.py
new file mode 100644
index 00000000..f59d89ed
--- /dev/null
+++ b/craft_application/util/system.py
@@ -0,0 +1,105 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""System-level util functions."""
+from __future__ import annotations
+
+import os
+
+from craft_cli import emit
+
+from craft_application.errors import InvalidParameterError
+
+
+def _verify_parallel_build_count(env_name: str, parallel_build_count: int | str) -> int:
+ """Verify the parallel build count is valid.
+
+ :param env_name: The name of the environment variable being checked.
+ :param parallel_build_count: The value of the variable.
+ :return: The parallel build count as an integer.
+ """
+ try:
+ parallel_build_count = int(parallel_build_count)
+ except ValueError as err:
+ raise InvalidParameterError(env_name, str(os.environ[env_name])) from err
+
+ # Ensure the value is valid positive integer
+ if parallel_build_count < 1:
+ raise InvalidParameterError(env_name, str(parallel_build_count))
+
+ return parallel_build_count
+
+
+def get_parallel_build_count(app_name: str) -> int:
+ """Get the number of parallel builds to run.
+
+ The parallel build count is determined by the first available of the
+ following environment variables in the order:
+
+ - _PARALLEL_BUILD_COUNT
+ - CRAFT_PARALLEL_BUILD_COUNT
+ - _MAX_PARALLEL_BUILD_COUNT
+ - CRAFT_MAX_PARALLEL_BUILD_COUNT
+
+ where the MAX_PARALLEL_BUILD_COUNT variables are dynamically compared to
+ the number of CPUs, and the smaller of the two is used.
+
+ If no environment variable is set, the CPU count is used.
+ If the CPU count is not available for some reason, 1 is used as a fallback.
+ """
+ parallel_build_count = None
+
+ # fixed parallel build count environment variable
+ for env_name in [
+ (app_name + "_PARALLEL_BUILD_COUNT").upper(),
+ "CRAFT_PARALLEL_BUILD_COUNT",
+ ]:
+ if os.environ.get(env_name):
+ parallel_build_count = _verify_parallel_build_count(
+ env_name, os.environ[env_name]
+ )
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ f"from environment variable {env_name!r}"
+ )
+ break
+
+ # CPU count related max parallel build count environment variable
+ if parallel_build_count is None:
+ cpu_count = os.cpu_count() or 1
+ for env_name in [
+ (app_name + "_MAX_PARALLEL_BUILD_COUNT").upper(),
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT",
+ ]:
+ if os.environ.get(env_name):
+ parallel_build_count = min(
+ cpu_count,
+ _verify_parallel_build_count(env_name, os.environ[env_name]),
+ )
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ f"from environment variable {env_name!r}"
+ )
+ break
+
+ # Default to CPU count if no max environment variable is set
+ if parallel_build_count is None:
+ parallel_build_count = cpu_count
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ "from CPU count"
+ )
+
+ return parallel_build_count
diff --git a/docs/howto/partitions.rst b/docs/howto/partitions.rst
index f3dd4d02..72d4a9d6 100644
--- a/docs/howto/partitions.rst
+++ b/docs/howto/partitions.rst
@@ -45,45 +45,57 @@ Required application changes
To add partition support to an application, two basic changes are needed:
-#. Enable the feature
+#. Enable the feature.
- Use the :class:`Features ` class to specify that the
- application will use partitions:
+ In your Application subclass, override the following method and invoke the
+ :class:`Features ` class:
.. code-block:: python
from craft_parts import Features
- Features.reset()
- Features(enable_partitions=True)
+ class ExampleApplication(Application):
- .. NOTE::
- The ``craft-application`` class :class:`AppFeatures
- ` has a similar name and serves a similar
- purpose to ``craft-parts``'s :class:`Features `,
- but partitions cannot be enabled via :class:`AppFeatures
- `!
+ ...
-#. Define the list of partitions
+ @override
+ def _enable_craft_parts_features(self) -> None:
+ Features(enable_partitions=True)
- We need to tell the :class:`LifecycleManager `
- class about our partitions, but applications do not usually directly
- instantiate the LifecycleManager.
+ You can only be enable partitions with the :class:`Features
+ ` class from craft-parts. In craft-application
+ there's a similarly-named :class:`AppFeatures
+ ` class which serves a similar purpose,
+ but it can't enable partitions.
- Instead, override your :class:`Application
- `'s ``_setup_partitions`` method, and return
- a list of the partitions, which will eventually be passed to the
- :class:`LifecycleManager `:
+ .. Tip::
+ In unit tests, the :class:`Features ` global
+ singleton may raise exceptions when successive tests repeatedly try to
+ enable partitions.
+
+ To prevent these errors, reset the features at the start of each test:
+
+ .. code-block:: python
+
+ Features.reset()
+
+
+
+#. Define the list of partitions.
+
+ Override the ``_setup_partitions`` method of your :class:`Application
+ ` class and return the list of the
+ partitions.
.. code-block:: python
- class SnackcraftApplication(Application):
+ class ExampleApplication(Application):
- ...
+ ...
- @override
- def _setup_partitions(self, yaml_data: dict[str, Any]) -> list[str] | None:
- return ["default", "kernel", "component/bar-baz"]
+ @override
+ def _setup_partitions(self, yaml_data: dict[str, Any]) -> list[str] | None:
+ return ["default", "kernel", "component/bar-baz"]
Using the partitions
====================
diff --git a/docs/index.rst b/docs/index.rst
index 02c4cfc9..7caea660 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -12,26 +12,16 @@ Craft-Application
reference/index
explanation/index
-.. grid:: 1 1 2 2
-
- .. grid-item-card:: :ref:`Tutorial `
-
- **Get started** with a hands-on introduction to Craft-Application
-
- .. grid-item-card:: :ref:`How-to guides `
-
- **Step-by-step guides** covering key operations and common tasks
-
-.. grid:: 1 1 2 2
- :reverse:
-
- .. grid-item-card:: :ref:`Reference `
-
- **Technical information** about Craft-Application
-
- .. grid-item-card:: :ref:`Explanation `
-
- **Discussion and clarification** of key topics
+.. list-table::
+
+ * - | :ref:`Tutorial `
+ | **Get started** with a hands-on introduction to craft-application
+ - | :ref:`How-to guides `
+ | **Step-by-step guides** covering key operations and common tasks
+ * - | :ref:`Reference `
+ | **Technical information** about craft-application
+ - | :ref:`Explanation `
+ | **Discussion and clarification** of key topics
Project and community
=====================
diff --git a/docs/reference/changelog.rst b/docs/reference/changelog.rst
index e5e57414..4983a672 100644
--- a/docs/reference/changelog.rst
+++ b/docs/reference/changelog.rst
@@ -2,6 +2,20 @@
Changelog
*********
+X.Y.Z (yyyy-mmm-dd)
+-------------------
+
+Application
+===========
+
+- Fix: set CRAFT_PARALLEL_BUILD_COUNT correctly in ``override-`` scripts.
+
+Commands
+========
+
+- The ``clean`` command now supports the ``--platform`` argument to filter
+ which build environments to clean.
+
4.2.6 (2024-Oct-04)
-------------------
@@ -58,6 +72,16 @@ Models
For a complete list of commits, check out the `4.2.1`_ release on GitHub.
+4.1.3 (2024-Sep-12)
+-------------------
+
+Models
+======
+
+- Fix a regression where numeric part properties could not be parsed.
+
+For a complete list of commits, check out the `4.1.3`_ release on GitHub.
+
4.2.0 (2024-Sep-12)
-------------------
@@ -299,6 +323,7 @@ For a complete list of commits, check out the `2.7.0`_ release on GitHub.
.. _4.1.0: https://github.com/canonical/craft-application/releases/tag/4.1.0
.. _4.1.1: https://github.com/canonical/craft-application/releases/tag/4.1.1
.. _4.1.2: https://github.com/canonical/craft-application/releases/tag/4.1.2
+.. _4.1.3: https://github.com/canonical/craft-application/releases/tag/4.1.3
.. _4.2.0: https://github.com/canonical/craft-application/releases/tag/4.2.0
.. _4.2.1: https://github.com/canonical/craft-application/releases/tag/4.2.1
.. _4.2.2: https://github.com/canonical/craft-application/releases/tag/4.2.2
diff --git a/pyproject.toml b/pyproject.toml
index 0d124366..5827fac8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,6 +7,7 @@ dependencies = [
"craft-cli>=2.6.0",
"craft-grammar>=2.0.0",
"craft-parts>=2.1.1",
+ "craft-platforms>=0.3.1",
"craft-providers>=2.0.4",
"snap-helpers>=0.4.2",
"platformdirs>=3.10",
@@ -53,6 +54,7 @@ dev = [
"pytest==8.3.2",
"pytest-check==2.4.1",
"pytest-cov==5.0.0",
+ "pytest-freezegun==0.4.2",
"pytest-mock==3.14.0",
"pytest-rerunfailures==14.0",
"pytest-subprocess~=1.5.2",
@@ -72,12 +74,12 @@ types = [
"types-urllib3",
]
docs = [
- "canonical-sphinx~=0.1.0",
+ "canonical-sphinx~=0.2.0",
"sphinx-autobuild==2024.9.3",
"sphinx-lint==0.9.1",
]
apt = [
- "python-apt>=2.4.0;sys_platform=='linux'"
+ "python-apt>=2.4.0;sys_platform=='linux'",
]
[build-system]
@@ -139,6 +141,10 @@ xfail_strict = true
markers = [
"enable_features: Tests that require specific features",
]
+filterwarnings = [
+ "ignore:The craft-application BuildPlanner:PendingDeprecationWarning",
+ "ignore:BuildInfo:PendingDeprecationWarning",
+]
[tool.coverage.run]
branch = true
@@ -281,6 +287,7 @@ lint.ignore = [
"ANN", # Ignore type annotations in tests
"S101", # Allow assertions in tests
"S103", # Allow `os.chmod` setting a permissive mask `0o555` on file or directory
+ "S105", "S106", "S107", # Allow hardcoded "passwords" in test files.
"S108", # Allow Probable insecure usage of temporary file or directory
"PLR0913", # Allow many arguments for test functions
"PT004", # Allow fixtures that don't return anything to not start with underscores
diff --git a/tests/conftest.py b/tests/conftest.py
index 93387265..9da4a645 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -29,6 +29,7 @@
from craft_application import application, launchpad, models, services, util
from craft_cli import EmitterMode, emit
from craft_providers import bases
+from typing_extensions import override
if TYPE_CHECKING: # pragma: no cover
from collections.abc import Iterator
@@ -304,3 +305,37 @@ def fake_services(
PackageClass=fake_package_service_class,
LifecycleClass=fake_lifecycle_service_class,
)
+
+
+class FakeApplication(application.Application):
+ """An application class explicitly for testing. Adds some convenient test hooks."""
+
+ platform: str = "unknown-platform"
+ build_on: str = "unknown-build-on"
+ build_for: str | None = "unknown-build-for"
+
+ def set_project(self, project):
+ self._Application__project = project
+
+ @override
+ def _extra_yaml_transform(
+ self,
+ yaml_data: dict[str, Any],
+ *,
+ build_on: str,
+ build_for: str | None,
+ ) -> dict[str, Any]:
+ self.build_on = build_on
+ self.build_for = build_for
+
+ return yaml_data
+
+
+@pytest.fixture
+def app(app_metadata, fake_services):
+ return FakeApplication(app_metadata, fake_services)
+
+
+@pytest.fixture
+def manifest_data_dir():
+ return pathlib.Path(__file__).parent / "data/manifest"
diff --git a/tests/data/manifest/craft-manifest-expected.json b/tests/data/manifest/craft-manifest-expected.json
new file mode 100644
index 00000000..329b489c
--- /dev/null
+++ b/tests/data/manifest/craft-manifest-expected.json
@@ -0,0 +1,74 @@
+{
+ "component-name": "full-project",
+ "component-version": "1.0.0.post64+git12345678",
+ "component-description": "A fully-defined craft-application project.",
+ "component-id": {
+ "hashes": {
+ "sha1": "27d3150b433071ec1e2bd5bf04bc6de92b8b12b5",
+ "sha256": "9d7f74856a64282de8cb743fafdba600f18eef2a6f6049746b7cb842e47a3123"
+ }
+ },
+ "architecture": "amd64",
+ "license": "LGPLv3",
+ "metadata-generator": "Craft Application",
+ "creation-timestamp": "2024-09-16T01:02:03.456789+00:00",
+ "dependencies": [
+ {
+ "component-name": "Translation",
+ "component-version": "",
+ "component-description": "",
+ "component-id": {
+ "hashes": {
+ "sha1": "af5834abfa1a537fd383d41f1be33cea47c7b6a7",
+ "sha256": "38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ }
+ },
+ "architecture": "",
+ "type": "application/x.apt.translation",
+ "component-author": "Ubuntu",
+ "component-vendor": "Ubuntu",
+ "size": 111544,
+ "url": [
+ "http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ ]
+ },
+ {
+ "component-name": "Packages.xz",
+ "component-version": "jammy",
+ "component-description": "jammy main Packages file",
+ "component-id": {
+ "hashes": {
+ "sha1": "370c66437d49460dbc16be011209c4de9977212d",
+ "sha256": "37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ }
+ },
+ "architecture": "amd64",
+ "type": "application/x.apt.packages",
+ "component-author": "Ubuntu",
+ "component-vendor": "Ubuntu",
+ "size": 1394768,
+ "url": [
+ "http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ ]
+ },
+ {
+ "component-name": "go",
+ "component-version": "10660",
+ "component-description": "The Go programming language",
+ "component-id": {
+ "hashes": {
+ "sha1": "376506001849698af3f9e07a236a47ee8cddded0",
+ "sha256": "cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577"
+ }
+ },
+ "architecture": "amd64",
+ "type": "application/x.canonical.snap-package",
+ "component-author": "",
+ "component-vendor": "Canonical",
+ "size": 64892928,
+ "url": [
+ "https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40"
+ ]
+ }
+ ]
+}
diff --git a/tests/data/manifest/project-expected.yaml b/tests/data/manifest/project-expected.yaml
new file mode 100644
index 00000000..6cb1cdc2
--- /dev/null
+++ b/tests/data/manifest/project-expected.yaml
@@ -0,0 +1,11 @@
+component-name: full-project
+component-version: 1.0.0.post64+git12345678
+component-description: A fully-defined craft-application project.
+component-id:
+ hashes:
+ sha1: 27d3150b433071ec1e2bd5bf04bc6de92b8b12b5
+ sha256: 9d7f74856a64282de8cb743fafdba600f18eef2a6f6049746b7cb842e47a3123
+architecture: amd64
+license: LGPLv3
+metadata-generator: Craft Application
+creation-timestamp: '2024-09-16T01:02:03.456789+00:00'
diff --git a/tests/data/manifest/session-manifest-expected.yaml b/tests/data/manifest/session-manifest-expected.yaml
new file mode 100644
index 00000000..faa44402
--- /dev/null
+++ b/tests/data/manifest/session-manifest-expected.yaml
@@ -0,0 +1,42 @@
+- component-name: Translation
+ component-version: ''
+ component-description: ''
+ component-id:
+ hashes:
+ sha1: af5834abfa1a537fd383d41f1be33cea47c7b6a7
+ sha256: 38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d
+ architecture: ''
+ type: application/x.apt.translation
+ component-author: Ubuntu
+ component-vendor: Ubuntu
+ size: 111544
+ url:
+ - http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d
+- component-name: Packages.xz
+ component-version: jammy
+ component-description: jammy main Packages file
+ component-id:
+ hashes:
+ sha1: 370c66437d49460dbc16be011209c4de9977212d
+ sha256: 37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11
+ architecture: amd64
+ type: application/x.apt.packages
+ component-author: Ubuntu
+ component-vendor: Ubuntu
+ size: 1394768
+ url:
+ - http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11
+- component-name: go
+ component-version: '10660'
+ component-description: The Go programming language
+ component-id:
+ hashes:
+ sha1: 376506001849698af3f9e07a236a47ee8cddded0
+ sha256: cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577
+ architecture: amd64
+ type: application/x.canonical.snap-package
+ component-author: ''
+ component-vendor: Canonical
+ size: 64892928
+ url:
+ - https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40
diff --git a/tests/data/manifest/session-report.json b/tests/data/manifest/session-report.json
new file mode 100644
index 00000000..a49f2cff
--- /dev/null
+++ b/tests/data/manifest/session-report.json
@@ -0,0 +1,67 @@
+{
+ "THIS IS A STRIPPED DOWN SESSION REPORT FOR TESTING PURPOSES": 1,
+ "comment": "Metadata format is unstable and may change without prior notice.",
+ "session-id": "f17e28e952c84d7c955a1eb5277de201",
+ "policy": "",
+ "artefacts": [
+ {
+ "metadata": {
+ "type": "application/x.apt.translation",
+ "sha1": "af5834abfa1a537fd383d41f1be33cea47c7b6a7",
+ "sha256": "38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d",
+ "size": 111544,
+ "name": "Translation",
+ "version": "",
+ "vendor": "Ubuntu",
+ "description": "",
+ "author": "Ubuntu",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ }
+ ]
+ },
+ {
+ "metadata": {
+ "type": "application/x.apt.packages",
+ "sha1": "370c66437d49460dbc16be011209c4de9977212d",
+ "sha256": "37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11",
+ "size": 1394768,
+ "name": "Packages.xz",
+ "version": "jammy",
+ "vendor": "Ubuntu",
+ "description": "jammy main Packages file",
+ "author": "Ubuntu",
+ "architecture": "amd64",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ }
+ ]
+ },
+ {
+ "metadata": {
+ "type": "application/x.canonical.snap-package",
+ "sha1": "376506001849698af3f9e07a236a47ee8cddded0",
+ "sha256": "cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577",
+ "size": 64892928,
+ "name": "go",
+ "version": "10660",
+ "vendor": "Canonical",
+ "description": "The Go programming language",
+ "author": "",
+ "architecture": "amd64",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40"
+ }
+ ]
+ }
+ ]
+}
diff --git a/tests/integration/data/build-secrets/testcraft.yaml b/tests/integration/data/build-secrets/testcraft.yaml
index ba649b06..5650369b 100644
--- a/tests/integration/data/build-secrets/testcraft.yaml
+++ b/tests/integration/data/build-secrets/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/invalid_projects/build-error/testcraft.yaml b/tests/integration/data/invalid_projects/build-error/testcraft.yaml
index 33e45d52..0d3f9bd3 100644
--- a/tests/integration/data/invalid_projects/build-error/testcraft.yaml
+++ b/tests/integration/data/invalid_projects/build-error/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/adoption/testcraft.yaml b/tests/integration/data/valid_projects/adoption/testcraft.yaml
index 4cbf1d76..1f147afb 100644
--- a/tests/integration/data/valid_projects/adoption/testcraft.yaml
+++ b/tests/integration/data/valid_projects/adoption/testcraft.yaml
@@ -6,7 +6,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/basic/testcraft.yaml b/tests/integration/data/valid_projects/basic/testcraft.yaml
index 88da18b9..8ed5b9cd 100644
--- a/tests/integration/data/valid_projects/basic/testcraft.yaml
+++ b/tests/integration/data/valid_projects/basic/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/build-for-all/testcraft.yaml b/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
index a774850b..222be5cb 100644
--- a/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
+++ b/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
@@ -4,7 +4,7 @@ version: 1.0
base: "ubuntu@22.04"
platforms:
platform1:
- build-on: [amd64, arm64, armhf, i386, powerpc, ppc64el, riscv64, s390x]
+ build-on: [amd64, arm64, armhf, i386, ppc64el, riscv64, s390x]
build-for: [all]
parts:
diff --git a/tests/integration/data/valid_projects/environment/testcraft.yaml b/tests/integration/data/valid_projects/environment/testcraft.yaml
index 0e7474e2..a0e995db 100644
--- a/tests/integration/data/valid_projects/environment/testcraft.yaml
+++ b/tests/integration/data/valid_projects/environment/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
@@ -23,5 +22,6 @@ parts:
echo "project_version: \"${CRAFT_PROJECT_VERSION}\"" >> $target_file
echo "arch_build_for: \"${CRAFT_ARCH_BUILD_FOR}\"" >> $target_file
echo "arch_triplet_build_for: \"${CRAFT_ARCH_TRIPLET_BUILD_FOR}\"" >> $target_file
- echo "arch_build_on: \"${CRAFT_ARCH_BUILD_ON}\"" >> $target_file
- echo "arch_triplet_build_on: \"${CRAFT_ARCH_TRIPLET_BUILD_ON}\"" >> $target_file
+ echo "arch_build_on: \"${CRAFT_ARCH_BUILD_ON}\"" >> $target_file
+ echo "arch_triplet_build_on: \"${CRAFT_ARCH_TRIPLET_BUILD_ON}\"" >> $target_file
+ echo "parallel_build_count: \"${CRAFT_PARALLEL_BUILD_COUNT}\"" >> $target_file
diff --git a/tests/integration/data/valid_projects/grammar/testcraft.yaml b/tests/integration/data/valid_projects/grammar/testcraft.yaml
index 2e4be47e..574d00f2 100644
--- a/tests/integration/data/valid_projects/grammar/testcraft.yaml
+++ b/tests/integration/data/valid_projects/grammar/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/services/test_fetch.py b/tests/integration/services/test_fetch.py
new file mode 100644
index 00000000..e4bd1040
--- /dev/null
+++ b/tests/integration/services/test_fetch.py
@@ -0,0 +1,331 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Tests for FetchService."""
+import contextlib
+import io
+import json
+import pathlib
+import shutil
+import socket
+import textwrap
+from functools import cache
+from unittest import mock
+
+import craft_providers
+import pytest
+from craft_application import errors, fetch, services, util
+from craft_application.models import BuildInfo
+from craft_application.services.fetch import _PROJECT_MANIFEST_MANAGED_PATH
+from craft_providers import bases
+
+
+@cache
+def _get_fake_certificate_dir():
+ base_dir = fetch._get_service_base_dir()
+
+ return base_dir / "test-craft-app/fetch-certificate"
+
+
+@pytest.fixture(autouse=True, scope="session")
+def _set_test_certificate_dir():
+ """A session-scoped fixture so that we generate the certificate only once"""
+ cert_dir = _get_fake_certificate_dir()
+ if cert_dir.is_dir():
+ shutil.rmtree(cert_dir)
+
+ with mock.patch.object(fetch, "_get_certificate_dir", return_value=cert_dir):
+ fetch._obtain_certificate()
+
+
+@pytest.fixture(autouse=True)
+def _set_test_base_dirs(mocker):
+ original = fetch._get_service_base_dir()
+ test_dir = original / "test"
+ if test_dir.exists():
+ shutil.rmtree(test_dir)
+ test_dir.mkdir()
+ mocker.patch.object(fetch, "_get_service_base_dir", return_value=test_dir)
+
+ cert_dir = _get_fake_certificate_dir()
+ mocker.patch.object(fetch, "_get_certificate_dir", return_value=cert_dir)
+
+
+@pytest.fixture
+def mock_instance():
+ @contextlib.contextmanager
+ def temporarily_pull_file(*, source, missing_ok): # noqa: ARG001 (unused arguments)
+ yield None
+
+ instance = mock.Mock(spec=craft_providers.Executor)
+ instance.temporarily_pull_file = temporarily_pull_file
+
+ return instance
+
+
+@pytest.fixture
+def app_service(app_metadata, fake_services, fake_project, fake_build_plan):
+ fetch_service = services.FetchService(
+ app_metadata,
+ fake_services,
+ project=fake_project,
+ build_plan=fake_build_plan,
+ session_policy="permissive",
+ )
+ yield fetch_service
+ fetch_service.shutdown(force=True)
+
+
+def test_start_service(app_service):
+ assert not fetch.is_service_online()
+ app_service.setup()
+ assert fetch.is_service_online()
+
+
+def test_start_service_already_up(app_service, request):
+ # Create a fetch-service "manually"
+ fetch_process = fetch.start_service()
+ assert fetch.is_service_online()
+ # Ensure its cleaned up when the test is done
+ if fetch_process is not None:
+ request.addfinalizer(lambda: fetch.stop_service(fetch_process))
+
+ app_service.setup()
+ assert fetch.is_service_online()
+
+
+@pytest.mark.parametrize(
+ "port",
+ [
+ pytest.param(
+ fetch._DEFAULT_CONFIG.control,
+ marks=pytest.mark.xfail(
+ reason="Needs https://github.com/canonical/fetch-service/issues/208 fixed",
+ strict=True,
+ ),
+ ),
+ fetch._DEFAULT_CONFIG.proxy,
+ ],
+)
+def test_start_service_port_taken(app_service, request, port):
+ # "Occupy" one of the necessary ports manually.
+ soc = socket.create_server(("localhost", port), reuse_port=True)
+ request.addfinalizer(soc.close)
+
+ assert not fetch.is_service_online()
+
+ proxy = fetch._DEFAULT_CONFIG.proxy
+ control = fetch._DEFAULT_CONFIG.control
+
+ expected = f"fetch-service ports {proxy} and {control} are already in use."
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ app_service.setup()
+
+
+def test_shutdown_service(app_service):
+ assert not fetch.is_service_online()
+
+ app_service.setup()
+ assert fetch.is_service_online()
+
+ # By default, shutdown() without parameters doesn't actually stop the
+ # fetch-service.
+ app_service.shutdown()
+ assert fetch.is_service_online()
+
+ # shutdown(force=True) must stop the fetch-service.
+ app_service.shutdown(force=True)
+ assert not fetch.is_service_online()
+
+
+def test_create_teardown_session(
+ app_service, mocker, tmp_path, monkeypatch, mock_instance
+):
+ monkeypatch.chdir(tmp_path)
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+ app_service.setup()
+
+ assert len(fetch.get_service_status()["active-sessions"]) == 0
+
+ app_service.create_session(instance=mock_instance)
+ assert len(fetch.get_service_status()["active-sessions"]) == 1
+
+ report = app_service.teardown_session()
+ assert len(fetch.get_service_status()["active-sessions"]) == 0
+
+ assert "artefacts" in report
+
+
+def test_service_logging(app_service, mocker, tmp_path, monkeypatch, mock_instance):
+ monkeypatch.chdir(tmp_path)
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+
+ logfile = fetch._get_log_filepath()
+ assert not logfile.is_file()
+
+ app_service.setup()
+
+ # Create and teardown two sessions
+ app_service.create_session(mock_instance)
+ app_service.teardown_session()
+ app_service.create_session(mock_instance)
+ app_service.teardown_session()
+
+ # Check the logfile for the creation/deletion of the two sessions
+ expected = 2
+ assert logfile.is_file()
+ lines = logfile.read_text().splitlines()
+ create = discard = 0
+ for line in lines:
+ if "creating session" in line:
+ create += 1
+ if "discarding session" in line:
+ discard += 1
+ assert create == discard == expected
+
+
+# Bash script to setup the build instance before the actual testing.
+setup_environment = (
+ textwrap.dedent(
+ """
+ #! /bin/bash
+ set -euo pipefail
+
+ apt install -y python3.10-venv
+ python3 -m venv venv
+ venv/bin/pip install requests
+"""
+ )
+ .strip()
+ .encode("ascii")
+)
+
+wheel_url = (
+ "https://files.pythonhosted.org/packages/0f/ec/"
+ "a9b769274512ea65d8484c2beb8c3d2686d1323b450ce9ee6d09452ac430/"
+ "craft_application-3.0.0-py3-none-any.whl"
+)
+# Bash script to fetch the craft-application wheel.
+check_requests = (
+ textwrap.dedent(
+ f"""
+ #! /bin/bash
+ set -euo pipefail
+
+ venv/bin/python -c "import requests; requests.get('{wheel_url}').raise_for_status()"
+"""
+ )
+ .strip()
+ .encode("ascii")
+)
+
+
+@pytest.fixture
+def lxd_instance(snap_safe_tmp_path, provider_service):
+ provider_service.get_provider("lxd")
+
+ arch = util.get_host_architecture()
+ build_info = BuildInfo("foo", arch, arch, bases.BaseName("ubuntu", "22.04"))
+ instance = provider_service.instance(build_info, work_dir=snap_safe_tmp_path)
+
+ with instance as executor:
+ executor.push_file_io(
+ destination=pathlib.Path("/root/setup-environment.sh"),
+ content=io.BytesIO(setup_environment),
+ file_mode="0644",
+ )
+ executor.execute_run(
+ ["bash", "/root/setup-environment.sh"],
+ check=True,
+ capture_output=True,
+ )
+ yield executor
+
+ if executor is not None:
+ with contextlib.suppress(craft_providers.ProviderError):
+ executor.delete()
+
+
+def test_build_instance_integration(
+ app_service, lxd_instance, tmp_path, monkeypatch, fake_project, manifest_data_dir
+):
+ monkeypatch.chdir(tmp_path)
+
+ app_service.setup()
+
+ env = app_service.create_session(lxd_instance)
+
+ try:
+ # Install the hello Ubuntu package.
+ lxd_instance.execute_run(
+ ["apt", "install", "-y", "hello"], check=True, env=env, capture_output=True
+ )
+
+ # Download the craft-application wheel.
+ lxd_instance.push_file_io(
+ destination=pathlib.Path("/root/check-requests.sh"),
+ content=io.BytesIO(check_requests),
+ file_mode="0644",
+ )
+ lxd_instance.execute_run(
+ ["bash", "/root/check-requests.sh"],
+ check=True,
+ env=env,
+ capture_output=True,
+ )
+
+ # Write the "project" manifest inside the instance, as if a regular
+ # packing was taking place
+ lxd_instance.push_file(
+ source=manifest_data_dir / "project-expected.yaml",
+ destination=_PROJECT_MANIFEST_MANAGED_PATH,
+ )
+
+ finally:
+ report = app_service.teardown_session()
+
+ artefacts_and_types: list[tuple[str, str]] = []
+
+ for artefact in report["artefacts"]:
+ metadata_name = artefact["metadata"]["name"]
+ metadata_type = artefact["metadata"]["type"]
+
+ artefacts_and_types.append((metadata_name, metadata_type))
+
+ # Check that the installation of the "hello" deb went through the inspector.
+ assert ("hello", "application/vnd.debian.binary-package") in artefacts_and_types
+
+ # Check that the fetching of the "craft-application" wheel went through the inspector.
+ assert ("craft-application", "application/x.python.wheel") in artefacts_and_types
+
+ manifest_path = tmp_path / f"{fake_project.name}_{fake_project.version}_foo.json"
+ assert manifest_path.is_file()
+
+ with manifest_path.open("r") as f:
+ manifest_data = json.load(f)
+
+ # Check metadata of the "artifact"
+ assert manifest_data["component-name"] == fake_project.name
+ assert manifest_data["component-version"] == fake_project.version
+ assert manifest_data["architecture"] == "amd64"
+
+ dependencies = {}
+ for dep in manifest_data["dependencies"]:
+ dependencies[dep["component-name"]] = dep
+
+ # Check some of the dependencies
+ assert dependencies["hello"]["type"] == "application/vnd.debian.binary-package"
+ assert dependencies["craft-application"]["type"] == "application/x.python.wheel"
+ assert dependencies["craft-application"]["component-version"] == "3.0.0"
diff --git a/tests/integration/test_application.py b/tests/integration/test_application.py
index 47606046..536f660c 100644
--- a/tests/integration/test_application.py
+++ b/tests/integration/test_application.py
@@ -137,7 +137,7 @@ def test_project_managed(capsys, monkeypatch, tmp_path, project, create_app):
app = create_app()
app._work_dir = tmp_path
- app.run()
+ assert app.run() == 0
assert (tmp_path / "package_1.0.tar.zst").exists()
captured = capsys.readouterr()
@@ -304,6 +304,10 @@ def test_global_environment(
],
)
+ # Check that this odd value makes its way through to the yaml build script
+ build_count = "5"
+ mocker.patch.dict("os.environ", {"TESTCRAFT_PARALLEL_BUILD_COUNT": build_count})
+
# Run in destructive mode
monkeypatch.setattr(
"sys.argv", ["testcraft", "prime", "--destructive-mode", *arguments]
@@ -328,6 +332,7 @@ def test_global_environment(
assert variables["arch_triplet_build_on"].startswith(
util.convert_architecture_deb_to_platform(util.get_host_architecture())
)
+ assert variables["parallel_build_count"] == build_count
@pytest.fixture
diff --git a/tests/unit/commands/test_lifecycle.py b/tests/unit/commands/test_lifecycle.py
index 7a643e54..b4b0960b 100644
--- a/tests/unit/commands/test_lifecycle.py
+++ b/tests/unit/commands/test_lifecycle.py
@@ -433,6 +433,7 @@ def test_pack_fill_parser(
"platform": None,
"build_for": None,
"output": pathlib.Path(output_arg),
+ "fetch_service_policy": None,
**shell_dict,
**debug_dict,
**build_env_dict,
@@ -465,7 +466,9 @@ def test_pack_run(
emitter, mock_services, app_metadata, parts, tmp_path, packages, message
):
mock_services.package.pack.return_value = packages
- parsed_args = argparse.Namespace(parts=parts, output=tmp_path)
+ parsed_args = argparse.Namespace(
+ parts=parts, output=tmp_path, fetch_service_policy=None
+ )
command = PackCommand(
{
"app": app_metadata,
@@ -483,6 +486,34 @@ def test_pack_run(
emitter.assert_progress(message, permanent=True)
+@pytest.mark.parametrize(
+ ("fetch_service_policy", "expect_create_called"),
+ [("strict", True), ("permissive", True), (None, False)],
+)
+def test_pack_fetch_manifest(
+ mock_services, app_metadata, tmp_path, fetch_service_policy, expect_create_called
+):
+ packages = [pathlib.Path("package.zip")]
+ mock_services.package.pack.return_value = packages
+ parsed_args = argparse.Namespace(
+ output=tmp_path, fetch_service_policy=fetch_service_policy
+ )
+ command = PackCommand(
+ {
+ "app": app_metadata,
+ "services": mock_services,
+ }
+ )
+
+ command.run(parsed_args)
+
+ mock_services.package.pack.assert_called_once_with(
+ mock_services.lifecycle.prime_dir,
+ tmp_path,
+ )
+ assert mock_services.fetch.create_project_manifest.called == expect_create_called
+
+
def test_pack_run_wrong_step(app_metadata, fake_services):
parsed_args = argparse.Namespace(parts=None, output=pathlib.Path())
command = PackCommand(
@@ -594,7 +625,9 @@ def test_shell_after_pack(
mocker,
mock_subprocess_run,
):
- parsed_args = argparse.Namespace(shell_after=True, output=pathlib.Path())
+ parsed_args = argparse.Namespace(
+ shell_after=True, output=pathlib.Path(), fetch_service_policy=None
+ )
mock_lifecycle_run = mocker.patch.object(fake_services.lifecycle, "run")
mock_pack = mocker.patch.object(fake_services.package, "pack")
mocker.patch.object(
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
index 9ad49687..33749dd3 100644
--- a/tests/unit/conftest.py
+++ b/tests/unit/conftest.py
@@ -50,4 +50,5 @@ def mock_services(app_metadata, fake_project, fake_package_service_class):
factory.package = mock.Mock(spec=services.PackageService)
factory.provider = mock.Mock(spec=services.ProviderService)
factory.remote_build = mock.Mock(spec_set=services.RemoteBuildService)
+ factory.fetch = mock.Mock(spec=services.FetchService)
return factory
diff --git a/tests/unit/git/test_git.py b/tests/unit/git/test_git.py
index 34268908..e9de637a 100644
--- a/tests/unit/git/test_git.py
+++ b/tests/unit/git/test_git.py
@@ -549,7 +549,7 @@ def test_push_url_hide_token(url, expected_url, mocker, empty_working_directory)
repo.push_url(
remote_url=url,
remote_branch="test-branch",
- token="test-token", # noqa: S106
+ token="test-token",
)
# token should be hidden in the log output
diff --git a/tests/unit/models/test_manifest.py b/tests/unit/models/test_manifest.py
new file mode 100644
index 00000000..2bd3b246
--- /dev/null
+++ b/tests/unit/models/test_manifest.py
@@ -0,0 +1,82 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+import json
+from datetime import datetime
+
+import pytest
+from craft_application import util
+from craft_application.models import BuildInfo
+from craft_application.models.manifest import (
+ CraftManifest,
+ ProjectManifest,
+ SessionArtifactManifest,
+)
+from craft_providers import bases
+from freezegun import freeze_time
+
+
+@pytest.fixture
+@freeze_time(datetime.fromisoformat("2024-09-16T01:02:03.456789"))
+def project_manifest(tmp_path, fake_project):
+ project = fake_project
+ build_info = BuildInfo(
+ platform="amd64",
+ build_on="amd64",
+ build_for="amd64",
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ return ProjectManifest.from_packed_artifact(project, build_info, artifact)
+
+
+@pytest.fixture
+def session_report(manifest_data_dir):
+ report_path = manifest_data_dir / "session-report.json"
+ return json.loads(report_path.read_text())
+
+
+def test_from_packed_artifact(project_manifest, manifest_data_dir):
+ expected = (manifest_data_dir / "project-expected.yaml").read_text()
+ obtained = project_manifest.to_yaml_string()
+
+ assert obtained == expected
+
+
+def test_from_session_report(session_report, manifest_data_dir):
+ deps = SessionArtifactManifest.from_session_report(session_report)
+ obtained = util.dump_yaml([d.marshal() for d in deps])
+
+ expected = (manifest_data_dir / "session-manifest-expected.yaml").read_text()
+ assert obtained == expected
+
+
+def test_create_craft_manifest(
+ tmp_path, project_manifest, session_report, manifest_data_dir
+):
+ project_manifest_path = tmp_path / "project-manifest.yaml"
+ project_manifest.to_yaml_file(project_manifest_path)
+
+ craft_manifest = CraftManifest.create_craft_manifest(
+ project_manifest_path, session_report
+ )
+
+ obtained = json.dumps(craft_manifest.marshal(), indent=2) + "\n"
+ expected = (manifest_data_dir / "craft-manifest-expected.json").read_text()
+
+ assert obtained == expected
diff --git a/tests/unit/models/test_project.py b/tests/unit/models/test_project.py
index ca20a2aa..c49e8d5d 100644
--- a/tests/unit/models/test_project.py
+++ b/tests/unit/models/test_project.py
@@ -20,6 +20,7 @@
import textwrap
from textwrap import dedent
+import craft_platforms
import craft_providers.bases
import pydantic
import pytest
@@ -123,6 +124,43 @@ def full_project_dict():
return copy.deepcopy(FULL_PROJECT_DICT)
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ (
+ craft_platforms.BuildInfo(
+ "my-platform",
+ craft_platforms.DebianArchitecture.RISCV64,
+ "all",
+ craft_platforms.DistroBase("ubuntu", "24.04"),
+ ),
+ BuildInfo(
+ "my-platform",
+ "riscv64",
+ "all",
+ craft_providers.bases.BaseName("ubuntu", "24.04"),
+ ),
+ ),
+ (
+ craft_platforms.BuildInfo(
+ "my-platform",
+ craft_platforms.DebianArchitecture.RISCV64,
+ craft_platforms.DebianArchitecture.AMD64,
+ craft_platforms.DistroBase("almalinux", "9"),
+ ),
+ BuildInfo(
+ "my-platform",
+ "riscv64",
+ "amd64",
+ craft_providers.bases.BaseName("almalinux", "9"),
+ ),
+ ),
+ ],
+)
+def test_build_info_from_platforms(incoming, expected):
+ assert BuildInfo.from_platforms(incoming) == expected
+
+
@pytest.mark.parametrize(
("incoming", "expected"),
[
@@ -155,6 +193,46 @@ def test_platform_vectorise_architectures(incoming, expected):
assert platform == expected
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ (
+ {"build-on": ["amd64"], "build-for": ["all"]},
+ Platform(build_on=["amd64"], build_for=["all"]),
+ ),
+ ],
+)
+def test_platform_from_platform_dict(incoming, expected):
+ assert Platform.model_validate(incoming) == expected
+
+
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ pytest.param(
+ {
+ craft_platforms.DebianArchitecture.AMD64: None,
+ craft_platforms.DebianArchitecture.ARM64: None,
+ craft_platforms.DebianArchitecture.RISCV64: None,
+ },
+ {
+ "amd64": Platform(build_on=["amd64"], build_for=["amd64"]),
+ "arm64": Platform(build_on=["arm64"], build_for=["arm64"]),
+ "riscv64": Platform(build_on=["riscv64"], build_for=["riscv64"]),
+ },
+ id="architectures",
+ ),
+ pytest.param(
+ {"any string": {"build-on": ["amd64"], "build-for": ["all"]}},
+ {"any string": Platform(build_on=["amd64"], build_for=["all"])},
+ id="stringy",
+ ),
+ ],
+)
+def test_platform_from_platforms(incoming, expected):
+ assert Platform.from_platforms(incoming) == expected
+
+
@pytest.mark.parametrize(
("project_fixture", "project_dict"),
[("basic_project", BASIC_PROJECT_DICT), ("full_project", FULL_PROJECT_DICT)],
diff --git a/tests/unit/services/test_fetch.py b/tests/unit/services/test_fetch.py
new file mode 100644
index 00000000..72bd7430
--- /dev/null
+++ b/tests/unit/services/test_fetch.py
@@ -0,0 +1,160 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Unit tests for the FetchService.
+
+Note that most of the fetch-service functionality is already tested either on:
+- unit/test_fetch.py, for unit tests of the endpoint calls, or;
+- integration/services/test_fetch.py, for full integration tests.
+
+As such, this module mostly unit-tests error paths coming from wrong usage of
+the FetchService class.
+"""
+import contextlib
+import json
+import re
+from datetime import datetime
+from unittest import mock
+from unittest.mock import MagicMock, call
+
+import craft_providers
+import pytest
+from craft_application import ProviderService, fetch, services
+from craft_application.models import BuildInfo
+from craft_application.services import fetch as service_module
+from craft_providers import bases
+from freezegun import freeze_time
+
+
+@pytest.fixture
+def fetch_service(app, fake_services, fake_project):
+ build_info = BuildInfo(
+ platform="amd64",
+ build_on="amd64",
+ build_for="amd64",
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+ return services.FetchService(
+ app,
+ fake_services,
+ project=fake_project,
+ build_plan=[build_info],
+ session_policy="strict",
+ )
+
+
+def test_create_session_already_exists(fetch_service):
+ fetch_service._session_data = fetch.SessionData(id="id", token="token")
+
+ expected = re.escape(
+ "create_session() called but there's already a live fetch-service session."
+ )
+ with pytest.raises(ValueError, match=expected):
+ fetch_service.create_session(instance=MagicMock())
+
+
+def test_teardown_session_no_session(fetch_service):
+ expected = re.escape(
+ "teardown_session() called with no live fetch-service session."
+ )
+
+ with pytest.raises(ValueError, match=expected):
+ fetch_service.teardown_session()
+
+
+@freeze_time(datetime.fromisoformat("2024-09-16T01:02:03.456789"))
+def test_create_project_manifest(
+ fetch_service, tmp_path, monkeypatch, manifest_data_dir
+):
+ manifest_path = tmp_path / "craft-project-manifest.yaml"
+ monkeypatch.setattr(service_module, "_PROJECT_MANIFEST_MANAGED_PATH", manifest_path)
+ monkeypatch.setenv("CRAFT_MANAGED_MODE", "1")
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ assert not manifest_path.exists()
+ fetch_service.create_project_manifest([artifact])
+
+ assert manifest_path.is_file()
+ expected = manifest_data_dir / "project-expected.yaml"
+
+ assert manifest_path.read_text() == expected.read_text()
+
+
+def test_create_project_manifest_not_managed(fetch_service, tmp_path, monkeypatch):
+ manifest_path = tmp_path / "craft-project-manifest.yaml"
+ monkeypatch.setattr(service_module, "_PROJECT_MANIFEST_MANAGED_PATH", manifest_path)
+ monkeypatch.setenv("CRAFT_MANAGED_MODE", "0")
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ assert not manifest_path.exists()
+ fetch_service.create_project_manifest([artifact])
+ assert not manifest_path.exists()
+
+
+def test_teardown_session_create_manifest(
+ fetch_service, tmp_path, mocker, manifest_data_dir, monkeypatch, fake_project
+):
+ monkeypatch.chdir(tmp_path)
+
+ # A lot of mock setup here but the goal is to have the fake fetch-service
+ # session return the expected report, and the fake CraftManifest return the
+ # expected data.
+
+ # fetch.teardown_session returns a fake session report
+ fake_report = json.loads((manifest_data_dir / "session-report.json").read_text())
+ mocker.patch.object(fetch, "teardown_session", return_value=fake_report)
+
+ # temporarily_pull_file returns a fake project manifest file
+ project_manifest_path = manifest_data_dir / "project-expected.yaml"
+
+ @contextlib.contextmanager
+ def temporarily_pull_file(*, source, missing_ok):
+ assert source == service_module._PROJECT_MANIFEST_MANAGED_PATH
+ assert missing_ok
+ yield project_manifest_path
+
+ mock_instance = mock.Mock(spec=craft_providers.Executor)
+ mock_instance.temporarily_pull_file = temporarily_pull_file
+
+ fetch_service._session_data = {}
+ fetch_service._instance = mock_instance
+
+ fetch_service.teardown_session()
+
+ expected_file = manifest_data_dir / "craft-manifest-expected.json"
+ obtained_file = tmp_path / f"{fake_project.name}_{fake_project.version}_amd64.json"
+
+ assert obtained_file.read_text() + "\n" == expected_file.read_text()
+
+
+@pytest.mark.parametrize("run_on_host", [True, False])
+def test_warning_experimental(mocker, fetch_service, run_on_host, emitter):
+ """The fetch-service warning should only be emitted when running on the host."""
+ mocker.patch.object(fetch, "start_service")
+ mocker.patch.object(ProviderService, "is_managed", return_value=not run_on_host)
+
+ fetch_service.setup()
+
+ warning = (
+ "Warning: the fetch-service integration is experimental "
+ "and still in development."
+ )
+ warning_emitted = call("message", warning) in emitter.interactions
+
+ assert warning_emitted == run_on_host
diff --git a/tests/unit/services/test_lifecycle.py b/tests/unit/services/test_lifecycle.py
index 99e3be54..5dcb6c15 100644
--- a/tests/unit/services/test_lifecycle.py
+++ b/tests/unit/services/test_lifecycle.py
@@ -23,10 +23,11 @@
import craft_parts
import craft_parts.callbacks
+import craft_platforms
import pytest
import pytest_check
from craft_application import errors, models, util
-from craft_application.errors import InvalidParameterError, PartsLifecycleError
+from craft_application.errors import PartsLifecycleError
from craft_application.models.project import BuildInfo
from craft_application.services import lifecycle
from craft_application.util import repositories
@@ -372,7 +373,7 @@ def test_get_primed_stage_packages(lifecycle_service):
BuildInfo(
"my-platform",
build_on="any",
- build_for="amd64",
+ build_for=craft_platforms.DebianArchitecture.AMD64,
base=bases.BaseName("ubuntu", "24.04"),
)
],
@@ -611,160 +612,6 @@ def test_lifecycle_package_repositories(
mock_callback.assert_called_once_with(repositories.install_overlay_repositories)
-# endregion
-
-# region parallel build count tests
-
-
-@pytest.mark.parametrize(
- ("env_dict", "cpu_count", "expected"),
- [
- (
- {},
- None,
- 1,
- ),
- (
- {},
- 100,
- 100,
- ),
- (
- {"TESTCRAFT_PARALLEL_BUILD_COUNT": "100"},
- 1,
- 100,
- ),
- (
- {"CRAFT_PARALLEL_BUILD_COUNT": "200"},
- 1,
- 200,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- },
- 50,
- 50,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- },
- 80,
- 80,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_PARALLEL_BUILD_COUNT": "200",
- },
- 1,
- 100,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
- },
- 150,
- 100,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
- },
- None,
- 1,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_PARALLEL_BUILD_COUNT": "200",
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "300",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "400",
- },
- 150,
- 100,
- ),
- ],
-)
-def test_get_parallel_build_count(
- monkeypatch, mocker, fake_parts_lifecycle, env_dict, cpu_count, expected
-):
- mocker.patch("os.cpu_count", return_value=cpu_count)
- for env_dict_key, env_dict_value in env_dict.items():
- monkeypatch.setenv(env_dict_key, env_dict_value)
-
- assert fake_parts_lifecycle._get_parallel_build_count() == expected
-
-
-@pytest.mark.parametrize(
- ("env_dict", "cpu_count"),
- [
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "abc",
- },
- 1,
- ),
- (
- {
- "CRAFT_PARALLEL_BUILD_COUNT": "-",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "*",
- },
- 1,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "$COUNT",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "0",
- },
- 1,
- ),
- (
- {
- "CRAFT_PARALLEL_BUILD_COUNT": "-1",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "5.6",
- },
- 1,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "inf",
- },
- 1,
- ),
- ],
-)
-def test_get_parallel_build_count_error(
- monkeypatch, mocker, fake_parts_lifecycle, env_dict, cpu_count
-):
- mocker.patch("os.cpu_count", return_value=cpu_count)
- for env_dict_key, env_dict_value in env_dict.items():
- monkeypatch.setenv(env_dict_key, env_dict_value)
-
- with pytest.raises(
- InvalidParameterError, match=r"^Value '.*' is invalid for parameter '.*'.$"
- ):
- fake_parts_lifecycle._get_parallel_build_count()
-
-
# endregion
# region project variables
diff --git a/tests/unit/services/test_provider.py b/tests/unit/services/test_provider.py
index c63b7c5d..d1842ee3 100644
--- a/tests/unit/services/test_provider.py
+++ b/tests/unit/services/test_provider.py
@@ -30,6 +30,18 @@
from craft_providers.actions.snap_installer import Snap
+@pytest.fixture
+def mock_provider(monkeypatch, provider_service):
+ mocked_provider = mock.MagicMock(spec=craft_providers.Provider)
+ monkeypatch.setattr(
+ provider_service,
+ "get_provider",
+ lambda name: mocked_provider, # noqa: ARG005 (unused argument)
+ )
+
+ return mocked_provider
+
+
@pytest.mark.parametrize(
("given_environment", "expected_environment"),
[
@@ -416,7 +428,6 @@ def test_get_base_packages(provider_service):
],
)
def test_instance(
- monkeypatch,
check,
emitter,
tmp_path,
@@ -425,13 +436,8 @@ def test_instance(
provider_service,
base_name,
allow_unstable,
+ mock_provider,
):
- mock_provider = mock.MagicMock(spec=craft_providers.Provider)
- monkeypatch.setattr(
- provider_service,
- "get_provider",
- lambda name: mock_provider, # noqa: ARG005 (unused argument)
- )
arch = util.get_host_architecture()
build_info = models.BuildInfo("foo", arch, arch, base_name)
@@ -461,6 +467,33 @@ def test_instance(
emitter.assert_progress("Launching managed .+ instance...", regex=True)
+@pytest.mark.parametrize("clean_existing", [True, False])
+def test_instance_clean_existing(
+ tmp_path,
+ provider_service,
+ mock_provider,
+ clean_existing,
+):
+ arch = util.get_host_architecture()
+ base_name = bases.BaseName("ubuntu", "24.04")
+ build_info = models.BuildInfo("foo", arch, arch, base_name)
+
+ with provider_service.instance(
+ build_info, work_dir=tmp_path, clean_existing=clean_existing
+ ) as _instance:
+ pass
+
+ clean_called = mock_provider.clean_project_environments.called
+ assert clean_called == clean_existing
+
+ if clean_existing:
+ work_dir_inode = tmp_path.stat().st_ino
+ expected_name = f"testcraft-full-project-on-{arch}-for-{arch}-{work_dir_inode}"
+ mock_provider.clean_project_environments.assert_called_once_with(
+ instance_name=expected_name
+ )
+
+
def test_load_bashrc(emitter):
"""Test that we are able to load the bashrc file from the craft-application package."""
bashrc = pkgutil.get_data("craft_application", "misc/instance_bashrc")
diff --git a/tests/unit/test_application.py b/tests/unit/test_application.py
index 49cd843a..17df6c69 100644
--- a/tests/unit/test_application.py
+++ b/tests/unit/test_application.py
@@ -26,6 +26,7 @@
import subprocess
import sys
import textwrap
+from io import StringIO
from textwrap import dedent
from typing import Any
from unittest import mock
@@ -39,6 +40,7 @@
import pytest
import pytest_check
from craft_application import (
+ ProviderService,
application,
commands,
errors,
@@ -56,6 +58,8 @@
from craft_providers import bases
from overrides import override
+from tests.conftest import FakeApplication
+
EMPTY_COMMAND_GROUP = craft_cli.CommandGroup("FakeCommands", [])
BASIC_PROJECT_YAML = """
name: myproject
@@ -367,35 +371,6 @@ def test_app_metadata_default_mandatory_adoptable_fields():
assert app.mandatory_adoptable_fields == ["version"]
-class FakeApplication(application.Application):
- """An application class explicitly for testing. Adds some convenient test hooks."""
-
- platform: str = "unknown-platform"
- build_on: str = "unknown-build-on"
- build_for: str | None = "unknown-build-for"
-
- def set_project(self, project):
- self._Application__project = project
-
- @override
- def _extra_yaml_transform(
- self,
- yaml_data: dict[str, Any],
- *,
- build_on: str,
- build_for: str | None,
- ) -> dict[str, Any]:
- self.build_on = build_on
- self.build_for = build_for
-
- return yaml_data
-
-
-@pytest.fixture
-def app(app_metadata, fake_services):
- return FakeApplication(app_metadata, fake_services)
-
-
class FakePlugin(craft_parts.plugins.Plugin):
def __init__(self, properties, part_info):
pass
@@ -512,6 +487,7 @@ def test_run_managed_success(mocker, app, fake_project, fake_build_plan):
mock.call(
fake_build_plan[0],
work_dir=mock.ANY,
+ clean_existing=False,
)
in mock_provider.instance.mock_calls
)
@@ -570,8 +546,12 @@ def test_run_managed_multiple(app, fake_project):
app.run_managed(None, None)
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) in mock_provider.instance.mock_calls
def test_run_managed_specified_arch(app, fake_project):
@@ -586,8 +566,12 @@ def test_run_managed_specified_arch(app, fake_project):
app.run_managed(None, "arch2")
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) not in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) not in mock_provider.instance.mock_calls
def test_run_managed_specified_platform(app, fake_project):
@@ -602,8 +586,12 @@ def test_run_managed_specified_platform(app, fake_project):
app.run_managed("a2", None)
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) not in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) not in mock_provider.instance.mock_calls
def test_run_managed_empty_plan(app, fake_project):
@@ -2118,3 +2106,38 @@ def test_emitter_docs_url(monkeypatch, mocker, app):
app.run()
assert spied_init.mock_calls[0].kwargs["docs_base_url"] == expected_url
+
+
+def test_clean_platform(monkeypatch, tmp_path, app_metadata, fake_services, mocker):
+ """Test that calling "clean --platform=x" correctly filters the build plan."""
+ data = util.safe_yaml_load(StringIO(BASIC_PROJECT_YAML))
+ # Put a few different platforms on the project
+ arch = util.get_host_architecture()
+ build_on_for = {
+ "build-on": [arch],
+ "build-for": [arch],
+ }
+ data["platforms"] = {
+ "plat1": build_on_for,
+ "plat2": build_on_for,
+ "plat3": build_on_for,
+ }
+ project_file = tmp_path / "testcraft.yaml"
+ project_file.write_text(util.dump_yaml(data))
+ monkeypatch.setattr(sys, "argv", ["testcraft", "clean", "--platform=plat2"])
+
+ mocked_clean = mocker.patch.object(ProviderService, "_clean_instance")
+ app = FakeApplication(app_metadata, fake_services)
+ app.project_dir = tmp_path
+
+ fake_services.project = None
+
+ app.run()
+
+ expected_info = models.BuildInfo(
+ platform="plat2",
+ build_on=arch,
+ build_for=arch,
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+ mocked_clean.assert_called_once_with(mocker.ANY, mocker.ANY, expected_info)
diff --git a/tests/unit/test_application_fetch.py b/tests/unit/test_application_fetch.py
new file mode 100644
index 00000000..71dd57b0
--- /dev/null
+++ b/tests/unit/test_application_fetch.py
@@ -0,0 +1,123 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Unit tests for the interaction between the Application and the FetchService."""
+from typing import Any
+from unittest import mock
+
+import craft_providers
+import pytest
+from craft_application import services
+from typing_extensions import override
+
+
+class FakeFetchService(services.FetchService):
+ """Fake FetchService that tracks calls"""
+
+ def __init__(self, *args, fetch_calls: list[str], **kwargs):
+ super().__init__(*args, **kwargs)
+ self.calls = fetch_calls
+
+ @override
+ def setup(self) -> None:
+ self.calls.append("setup")
+
+ @override
+ def create_session(
+ self,
+ instance: craft_providers.Executor, # (unused-method-argument)
+ ) -> dict[str, str]:
+ self.calls.append("create_session")
+ return {}
+
+ @override
+ def teardown_session(self) -> dict[str, Any]:
+ self.calls.append("teardown_session")
+ return {}
+
+ @override
+ def shutdown(self, *, force: bool = False) -> None:
+ self.calls.append(f"shutdown({force})")
+
+
+@pytest.mark.parametrize("fake_build_plan", [2], indirect=True)
+@pytest.mark.parametrize(
+ ("pack_args", "expected_calls", "expected_clean_existing"),
+ [
+ # No --use-fetch-service: no calls to the FetchService
+ (
+ [],
+ [],
+ False,
+ ),
+ # --use-fetch-service: full expected calls to the FetchService
+ (
+ ["--use-fetch-service", "strict"],
+ [
+ # One call to setup
+ "setup",
+ # Two pairs of create/teardown sessions, for two builds
+ "create_session",
+ "teardown_session",
+ "create_session",
+ "teardown_session",
+ # One call to shut down (with `force`)
+ "shutdown(True)",
+ ],
+ True,
+ ),
+ ],
+)
+def test_run_managed_fetch_service(
+ app,
+ fake_project,
+ fake_build_plan,
+ monkeypatch,
+ pack_args,
+ expected_calls,
+ expected_clean_existing,
+):
+ """Test that the application calls the correct FetchService methods."""
+ mock_provider = mock.MagicMock(spec_set=services.ProviderService)
+ app.services.provider = mock_provider
+ app.set_project(fake_project)
+
+ expected_build_infos = 2
+ assert len(fake_build_plan) == expected_build_infos
+ app._build_plan = fake_build_plan
+
+ fetch_calls: list[str] = []
+ app.services.FetchClass = FakeFetchService
+ app.services.set_kwargs("fetch", fetch_calls=fetch_calls)
+
+ monkeypatch.setattr("sys.argv", ["testcraft", "pack", *pack_args])
+ app.run()
+
+ assert fetch_calls == expected_calls
+
+ # Check that the provider service was correctly instructed to clean, or not
+ # clean, the existing instance.
+
+ # Filter out the various calls to entering and exiting the instance()
+ # context manager.
+ instance_calls = [
+ call
+ for call in mock_provider.instance.mock_calls
+ if "work_dir" in call.kwargs and "clean_existing" in call.kwargs
+ ]
+
+ assert len(instance_calls) == len(fake_build_plan)
+ for call in instance_calls:
+ assert call.kwargs["clean_existing"] == expected_clean_existing
diff --git a/tests/unit/test_fetch.py b/tests/unit/test_fetch.py
new file mode 100644
index 00000000..608b0a66
--- /dev/null
+++ b/tests/unit/test_fetch.py
@@ -0,0 +1,311 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Tests for fetch-service-related functions."""
+import re
+import shlex
+import subprocess
+from pathlib import Path
+from unittest import mock
+from unittest.mock import call
+
+import pytest
+import responses
+from craft_application import errors, fetch
+from craft_providers.lxd import LXDInstance
+from responses import matchers
+
+CONTROL = fetch._DEFAULT_CONFIG.control
+PROXY = fetch._DEFAULT_CONFIG.proxy
+AUTH = fetch._DEFAULT_CONFIG.auth
+
+assert_requests = responses.activate(assert_all_requests_are_fired=True)
+
+
+@assert_requests
+def test_get_service_status_success():
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ json={"uptime": 10},
+ status=200,
+ )
+ status = fetch.get_service_status()
+ assert status == {"uptime": 10}
+
+
+@assert_requests
+def test_get_service_status_failure():
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ status=404,
+ )
+ expected = "Error with fetch-service GET: 404 Client Error"
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ fetch.get_service_status()
+
+
+@pytest.mark.parametrize(
+ ("status", "json", "expected"),
+ [
+ (200, {"uptime": 10}, True),
+ (200, {"uptime": 10, "other-key": "value"}, True),
+ (200, {"other-key": "value"}, False),
+ (404, {"other-key": "value"}, False),
+ ],
+)
+@assert_requests
+def test_is_service_online(status, json, expected):
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ status=status,
+ json=json,
+ )
+ assert fetch.is_service_online() == expected
+
+
+def test_start_service(mocker, tmp_path):
+ mock_is_online = mocker.patch.object(fetch, "is_service_online", return_value=False)
+ mocker.patch.object(fetch, "_check_installed", return_value=True)
+ mock_base_dir = mocker.patch.object(
+ fetch, "_get_service_base_dir", return_value=tmp_path
+ )
+ mock_get_status = mocker.patch.object(
+ fetch, "get_service_status", return_value={"uptime": 10}
+ )
+ mock_archive_key = mocker.patch.object(
+ subprocess, "check_output", return_value="DEADBEEF"
+ )
+
+ fake_cert, fake_key = tmp_path / "cert.crt", tmp_path / "key.pem"
+ mock_obtain_certificate = mocker.patch.object(
+ fetch, "_obtain_certificate", return_value=(fake_cert, fake_key)
+ )
+
+ mock_popen = mocker.patch.object(subprocess, "Popen")
+ mock_process = mock_popen.return_value
+ mock_process.poll.return_value = None
+
+ process = fetch.start_service()
+ assert process is mock_process
+
+ assert mock_is_online.called
+ assert mock_base_dir.called
+ assert mock_get_status.called
+ mock_archive_key.assert_called_once_with(
+ [
+ "gpg",
+ "--export",
+ "--armor",
+ "--no-default-keyring",
+ "--keyring",
+ "/snap/fetch-service/current/usr/share/keyrings/ubuntu-archive-keyring.gpg",
+ "F6ECB3762474EDA9D21B7022871920D1991BC93C",
+ ],
+ text=True,
+ )
+
+ assert mock_obtain_certificate.called
+
+ popen_call = mock_popen.mock_calls[0]
+ assert popen_call == call(
+ [
+ "bash",
+ "-c",
+ shlex.join(
+ [
+ fetch._FETCH_BINARY,
+ f"--control-port={CONTROL}",
+ f"--proxy-port={PROXY}",
+ f"--config={tmp_path/'config'}",
+ f"--spool={tmp_path/'spool'}",
+ f"--cert={fake_cert}",
+ f"--key={fake_key}",
+ "--permissive-mode",
+ "--idle-shutdown=300",
+ ]
+ )
+ + f" > {fetch._get_log_filepath()}",
+ ],
+ env={"FETCH_SERVICE_AUTH": AUTH, "FETCH_APT_RELEASE_PUBLIC_KEY": "DEADBEEF"},
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ start_new_session=True,
+ )
+
+
+def test_start_service_already_up(mocker):
+ """If the fetch-service is already up then a new process is *not* created."""
+ mock_is_online = mocker.patch.object(fetch, "is_service_online", return_value=True)
+ mock_popen = mocker.patch.object(subprocess, "Popen")
+
+ assert fetch.start_service() is None
+
+ assert mock_is_online.called
+ assert not mock_popen.called
+
+
+def test_start_service_not_installed(mocker):
+ mocker.patch.object(fetch, "is_service_online", return_value=False)
+ mocker.patch.object(fetch, "_check_installed", return_value=False)
+
+ expected = re.escape("The 'fetch-service' snap is not installed.")
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ fetch.start_service()
+
+
+@assert_requests
+@pytest.mark.parametrize(
+ ("strict", "expected_policy"), [(True, "strict"), (False, "permissive")]
+)
+def test_create_session(strict, expected_policy):
+ responses.add(
+ responses.POST,
+ f"http://localhost:{CONTROL}/session",
+ json={"id": "my-session-id", "token": "my-session-token"},
+ status=200,
+ match=[matchers.json_params_matcher({"policy": expected_policy})],
+ )
+
+ session_data = fetch.create_session(strict=strict)
+
+ assert session_data.session_id == "my-session-id"
+ assert session_data.token == "my-session-token"
+
+
+@assert_requests
+def test_teardown_session():
+ session_data = fetch.SessionData(id="my-session-id", token="my-session-token")
+
+ # Call to delete token
+ responses.delete(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}/token",
+ match=[matchers.json_params_matcher({"token": session_data.token})],
+ json={},
+ status=200,
+ )
+ # Call to get session report
+ responses.get(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+ # Call to delete session
+ responses.delete(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+ # Call to delete session resources
+ responses.delete(
+ f"http://localhost:{CONTROL}/resources/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+
+ fetch.teardown_session(session_data)
+
+
+def test_configure_build_instance(mocker):
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+ mocker.patch.object(
+ fetch, "_obtain_certificate", return_value=("fake-cert.crt", "key.pem")
+ )
+
+ session_data = fetch.SessionData(id="my-session-id", token="my-session-token")
+ instance = mock.MagicMock(spec_set=LXDInstance)
+ assert isinstance(instance, LXDInstance)
+
+ expected_proxy = f"http://my-session-id:my-session-token@127.0.0.1:{PROXY}/"
+ expected_env = {
+ "http_proxy": expected_proxy,
+ "https_proxy": expected_proxy,
+ "REQUESTS_CA_BUNDLE": "/usr/local/share/ca-certificates/local-ca.crt",
+ "CARGO_HTTP_CAINFO": "/usr/local/share/ca-certificates/local-ca.crt",
+ "GOPROXY": "direct",
+ }
+
+ env = fetch.configure_instance(instance, session_data)
+ assert env == expected_env
+
+ # Execution calls on the instance
+ assert instance.execute_run.mock_calls == [
+ call(
+ ["/bin/sh", "-c", "/usr/sbin/update-ca-certificates > /dev/null"],
+ check=True,
+ ),
+ call(["mkdir", "-p", "/root/.pip"]),
+ call(["systemctl", "restart", "snapd"]),
+ call(
+ [
+ "snap",
+ "set",
+ "system",
+ f"proxy.http={expected_proxy}",
+ ]
+ ),
+ call(
+ [
+ "snap",
+ "set",
+ "system",
+ f"proxy.https={expected_proxy}",
+ ]
+ ),
+ call(["/bin/rm", "-Rf", "/var/lib/apt/lists"], check=True),
+ call(
+ ["apt", "update"],
+ env=expected_env,
+ check=True,
+ stdout=mocker.ANY,
+ stderr=mocker.ANY,
+ ),
+ ]
+
+ # Files pushed to the instance
+ assert instance.push_file.mock_calls == [
+ call(
+ source="fake-cert.crt",
+ destination=Path("/usr/local/share/ca-certificates/local-ca.crt"),
+ )
+ ]
+
+ assert instance.push_file_io.mock_calls == [
+ call(
+ destination=Path("/root/.pip/pip.conf"),
+ content=mocker.ANY,
+ file_mode="0644",
+ ),
+ call(
+ destination=Path("/etc/apt/apt.conf.d/99proxy"),
+ content=mocker.ANY,
+ file_mode="0644",
+ ),
+ ]
+
+
+def test_get_certificate_dir(mocker):
+ mocker.patch.object(
+ fetch,
+ "_get_service_base_dir",
+ return_value=Path("/home/user/snap/fetch-service/common"),
+ )
+ cert_dir = fetch._get_certificate_dir()
+
+ expected = Path("/home/user/snap/fetch-service/common/craft/fetch-certificate")
+ assert cert_dir == expected
diff --git a/tests/unit/test_secrets.py b/tests/unit/test_secrets.py
index 62f0d88a..22db8525 100644
--- a/tests/unit/test_secrets.py
+++ b/tests/unit/test_secrets.py
@@ -100,7 +100,7 @@ def test_secrets_cache(mocker, monkeypatch):
spied_run.assert_called_once_with("echo ${SECRET_1}")
-_SECRET = "$(HOST_SECRET:echo ${GIT_VERSION})" # noqa: S105 (this is not a password)
+_SECRET = "$(HOST_SECRET:echo ${GIT_VERSION})" # (this is not a password)
@pytest.mark.parametrize(
diff --git a/tests/unit/util/test_system.py b/tests/unit/util/test_system.py
new file mode 100644
index 00000000..343394d3
--- /dev/null
+++ b/tests/unit/util/test_system.py
@@ -0,0 +1,174 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Unit tests for system util module."""
+
+import pytest
+from craft_application import util
+from craft_application.errors import InvalidParameterError
+
+
+@pytest.mark.parametrize(
+ ("env_dict", "cpu_count", "expected"),
+ [
+ (
+ {},
+ None,
+ 1,
+ ),
+ (
+ {},
+ 100,
+ 100,
+ ),
+ (
+ {"TESTCRAFT_PARALLEL_BUILD_COUNT": "100"},
+ 1,
+ 100,
+ ),
+ (
+ {"CRAFT_PARALLEL_BUILD_COUNT": "200"},
+ 1,
+ 200,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ },
+ 50,
+ 50,
+ ),
+ (
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ },
+ 80,
+ 80,
+ ),
+ (
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_PARALLEL_BUILD_COUNT": "200",
+ },
+ 1,
+ 100,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
+ },
+ 150,
+ 100,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
+ },
+ None,
+ 1,
+ ),
+ (
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_PARALLEL_BUILD_COUNT": "200",
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "300",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "400",
+ },
+ 150,
+ 100,
+ ),
+ ],
+)
+def test_get_parallel_build_count(monkeypatch, mocker, env_dict, cpu_count, expected):
+ mocker.patch("os.cpu_count", return_value=cpu_count)
+ for env_dict_key, env_dict_value in env_dict.items():
+ monkeypatch.setenv(env_dict_key, env_dict_value)
+
+ assert util.get_parallel_build_count("testcraft") == expected
+
+
+@pytest.mark.parametrize(
+ ("env_dict", "cpu_count"),
+ [
+ pytest.param(
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "abc",
+ },
+ 1,
+ id="abc",
+ ),
+ pytest.param(
+ {
+ "CRAFT_PARALLEL_BUILD_COUNT": "-",
+ },
+ 1,
+ id="-",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "*",
+ },
+ 1,
+ id="*",
+ ),
+ pytest.param(
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "$COUNT",
+ },
+ 1,
+ id="COUNT",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "0",
+ },
+ 1,
+ id="0",
+ ),
+ pytest.param(
+ {
+ "CRAFT_PARALLEL_BUILD_COUNT": "-1",
+ },
+ 1,
+ id="-1",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "5.6",
+ },
+ 1,
+ id="5.6",
+ ),
+ pytest.param(
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "inf",
+ },
+ 1,
+ id="inf",
+ ),
+ ],
+)
+def test_get_parallel_build_count_error(monkeypatch, mocker, env_dict, cpu_count):
+
+ mocker.patch("os.cpu_count", return_value=cpu_count)
+ for env_dict_key, env_dict_value in env_dict.items():
+ monkeypatch.setenv(env_dict_key, env_dict_value)
+
+ with pytest.raises(
+ InvalidParameterError, match=r"^Value '.*' is invalid for parameter '.*'.$"
+ ):
+ util.get_parallel_build_count("testcraft")
diff --git a/tox.ini b/tox.ini
index 1299849e..4cffea77 100644
--- a/tox.ini
+++ b/tox.ini
@@ -119,7 +119,7 @@ commands = sphinx-build {posargs:-b html} -W {tox_root}/docs {tox_root}/docs/_bu
[testenv:autobuild-docs]
description = Build documentation with an autoupdating server
base = docs
-commands = sphinx-autobuild {posargs:-b html --open-browser --port 8080} -W --watch {tox_root}/craft_application {tox_root}/docs {tox_root}/docs/_build/html
+commands = sphinx-autobuild {posargs:-b html --open-browser --port 8080} -W --watch {tox_root}/craft_application {tox_root}/docs {tox_root}/docs/_build
[testenv:lint-docs]
description = Lint the documentation with sphinx-lint