diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc index 7ed5e3a88..41460b6d4 100644 --- a/src/hydra-eval-jobs/hydra-eval-jobs.cc +++ b/src/hydra-eval-jobs/hydra-eval-jobs.cc @@ -471,14 +471,14 @@ static void rewriteAggregates(nlohmann::json & jobs, auto drvPath = store->parseStorePath((std::string) job["drvPath"]); auto drv = store->readDerivation(drvPath); - for (auto & childJobName : aggregateJob.dependencies) { - auto childDrvPath = store->parseStorePath((std::string) jobs[childJobName]["drvPath"]); - auto childDrv = store->readDerivation(childDrvPath); - job["constituents"].push_back(store->printStorePath(childDrvPath)); - drv.inputDrvs.map[childDrvPath].value = {childDrv.outputs.begin()->first}; - } - if (aggregateJob.brokenJobs.empty()) { + for (auto & childJobName : aggregateJob.dependencies) { + auto childDrvPath = store->parseStorePath((std::string) jobs[childJobName]["drvPath"]); + auto childDrv = store->readDerivation(childDrvPath); + job["constituents"].push_back(store->printStorePath(childDrvPath)); + drv.inputDrvs.map[childDrvPath].value = {childDrv.outputs.begin()->first}; + } + std::string drvName(drvPath.name()); assert(hasSuffix(drvName, drvExtension)); drvName.resize(drvName.size() - drvExtension.size()); @@ -513,6 +513,7 @@ static void rewriteAggregates(nlohmann::json & jobs, } if (!aggregateJob.brokenJobs.empty()) { + job.erase("constituents"); std::stringstream ss; for (const auto& [jobName, error] : aggregateJob.brokenJobs) { ss << jobName << ": " << error << "\n"; diff --git a/t/evaluator/evaluate-constituents-globbing.t b/t/evaluator/evaluate-constituents-globbing.t index c4a67f131..825c93900 100644 --- a/t/evaluator/evaluate-constituents-globbing.t +++ b/t/evaluator/evaluate-constituents-globbing.t @@ -164,4 +164,30 @@ subtest "cycle check with globbing" => sub { ok(defined $builds->{"packages.constituentB"}, "'packages.constituentB' is part of the jobset evaluation"); }; +subtest "partial error doesn't swallow other eval errors" => sub { + my $jobsetCtx = $ctx->makeJobset( + expression => 'constituents-partial-error.nix', + ); + my $jobset = $jobsetCtx->{"jobset"}; + + my ($res, $stdout, $stderr) = captureStdoutStderr(60, + ("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name) + ); + + ok(utf8::decode($stderr), "Stderr output is UTF8-clean"); + + $jobset->discard_changes; # refresh from DB + + like( + $jobset->errormsg, + qr/in job ‘release’:\npkgs.*: constituent glob pattern had no matches/, + "eval error of 'release' is missing" + ); + like( + $jobset->errormsg, + qr/in job ‘pkgs’:\nerror:\n.*error: you shall not pass/s, + "eval error of 'pkgs' is missing" + ); +}; + done_testing; diff --git a/t/jobs/constituents-partial-error.nix b/t/jobs/constituents-partial-error.nix new file mode 100644 index 000000000..fc67880ac --- /dev/null +++ b/t/jobs/constituents-partial-error.nix @@ -0,0 +1,26 @@ +with import ./config.nix; +{ + pkgs = throw "you shall not pass"; + pkgs2.foo = mkDerivation { + name = "foobar"; + builder = builtins.toFile "build.sh" '' + #!/bin/sh + mkdir $out + ''; + }; + + release = mkDerivation { + name = "foobar"; + builder = builtins.toFile "build.sh" '' + #!/bin/sh + mkdir $out + ''; + _hydraAggregate = true; + _hydraGlobConstituents = true; + constituents = [ + "pkgs.*" + "pkgs2.*" + ]; + }; +} +