diff --git a/metricsaggregator/README.md b/metricsaggregator/README.md index d6598f06..c9283644 100644 --- a/metricsaggregator/README.md +++ b/metricsaggregator/README.md @@ -69,16 +69,39 @@ Usage:
[options] [command] [command options] format of the workflows that were validated by the validator specified. The first line of the file should contain the CSV fields: trsID,versionName,isValid,dateExecuted + -id, --executionId + The execution ID to use for each validation execution. Assumes + that each validation in the file is performed on unique workflows + and workflow versions. --help Prints help for metricsaggregator * -p, --platform The platform that the workflow was validated on - Possible Values: [GALAXY, TERRA, DNA_STACK, DNA_NEXUS, CGC, NHLBI_BIODATA_CATALYST, ANVIL, CAVATICA, NEXTFLOW_TOWER, ELWAZI, AGC, OTHER] + Possible Values: [GALAXY, TERRA, DNA_STACK, DNA_NEXUS, CGC, NHLBI_BIODATA_CATALYST, ANVIL, CAVATICA, NEXTFLOW_TOWER, ELWAZI, AGC, OTHER, ALL] * -v, --validator The validator tool used to validate the workflows Possible Values: [MINIWDL, WOMTOOL, CWLTOOL, NF_VALIDATION, OTHER] * -vv, --validatorVersion The version of the validator tool used to validate the workflows + + submit-terra-metrics Submits workflow metrics provided by Terra via a + CSV file to Dockstore + Usage: submit-terra-metrics [options] + Options: + -c, --config + The config file path. + Default: ./metrics-aggregator.config + * -d, --data + The file path to the CSV file containing workflow metrics from + Terra. The first line of the file should contain the CSV fields: workflow_id,status,workflow_start,workflow_end,workflow_runtime_minutes,source_url + -de, --description + Optional description about the metrics to include when submitting + metrics to Dockstore + --help + Prints help for metricsaggregator + -r, --recordSkipped + Record skipped executions and the reason skipped to a CSV file + Default: false ``` ### aggregate-metrics @@ -98,7 +121,18 @@ with miniwdl on DNAstack: ``` java -jar target/metricsaggregator-*-SNAPSHOT.jar submit-validation-data --config my-custom-config \ ---data --validator MINIWDL --validatorVersion 1.0 --platform DNA_STACK +--data --validator MINIWDL --validatorVersion 1.0 --platform DNA_STACK --executionId a02075d9-092a-4fe7-9f83-4abf11de3dc9 +``` + +After running this command, you will want to run the `aggregate-metrics` command to aggregate the new validation data submitted. + +### submit-terra-metrics + +The following is an example command that submits metrics from a CSV file that Terra provided, recording the metrics that were skipped into an output CSV file. + +``` +java -jar target/metricsaggregator-*-SNAPSHOT.jar submit-terra-metrics --config my-custom-config \ +--data --recordSkipped ``` -After running this command, you will want to run the `aggregate-metrics` command to aggregate the new validation data submitted. \ No newline at end of file +After running this command, you will want to run the `aggregate-metrics` command to aggregate the new Terra metrics submitted. diff --git a/metricsaggregator/pom.xml b/metricsaggregator/pom.xml index 7a525b01..362f0e83 100644 --- a/metricsaggregator/pom.xml +++ b/metricsaggregator/pom.xml @@ -132,6 +132,14 @@ org.slf4j slf4j-api + + ch.qos.logback + logback-core + + + ch.qos.logback + logback-classic + software.amazon.awssdk s3 @@ -144,6 +152,10 @@ javax.money money-api + + org.apache.commons + commons-csv + org.junit.jupiter junit-jupiter-api @@ -278,6 +290,8 @@ org.glassfish.jersey.inject:jersey-hk2 javax.money:money-api org.javamoney.moneta:moneta-core + ch.qos.logback:logback-classic + ch.qos.logback:logback-core diff --git a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/MetricsAggregatorS3Client.java b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/MetricsAggregatorS3Client.java index a7b5182f..179b3337 100644 --- a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/MetricsAggregatorS3Client.java +++ b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/MetricsAggregatorS3Client.java @@ -39,8 +39,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Queue; -import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.s3.S3Client; @@ -52,6 +53,9 @@ public class MetricsAggregatorS3Client { private static final Logger LOG = LoggerFactory.getLogger(MetricsAggregatorS3Client.class); private static final Gson GSON = new Gson(); + private final AtomicInteger numberOfDirectoriesProcessed = new AtomicInteger(0); + private final AtomicInteger numberOfMetricsSubmitted = new AtomicInteger(0); + private final AtomicInteger numberOfMetricsSkipped = new AtomicInteger(0); private final String bucketName; @@ -71,60 +75,80 @@ public MetricsAggregatorS3Client(String bucketName, String s3EndpointOverride) t } public void aggregateMetrics(ExtendedGa4GhApi extendedGa4GhApi) { + LOG.info("Getting directories to process..."); List metricsDirectories = getDirectories(); if (metricsDirectories.isEmpty()) { - System.out.println("No directories found to aggregate metrics"); + LOG.info("No directories found to aggregate metrics"); return; } - System.out.println("Aggregating metrics..."); - for (S3DirectoryInfo directoryInfo : metricsDirectories) { - String toolId = directoryInfo.toolId(); - String versionName = directoryInfo.versionId(); - List platforms = directoryInfo.platforms(); - String platformsString = String.join(", ", platforms); - String versionS3KeyPrefix = directoryInfo.versionS3KeyPrefix(); + LOG.info("Aggregating metrics for {} directories", metricsDirectories.size()); + metricsDirectories.stream() + .parallel() + .forEach(directoryInfo -> aggregateMetricsForDirectory(directoryInfo, extendedGa4GhApi)); + LOG.info("Completed aggregating metrics. Processed {} directories, submitted {} platform metrics, and skipped {} platform metrics", numberOfDirectoriesProcessed, numberOfMetricsSubmitted, numberOfMetricsSkipped); + } - // Collect metrics for each platform, so we can calculate metrics across all platforms - List allMetrics = new ArrayList<>(); - for (String platform : platforms) { - ExecutionsRequestBody allSubmissions; - try { - allSubmissions = getExecutions(toolId, versionName, platform); - } catch (Exception e) { - LOG.error("Error aggregating metrics: Could not get all executions from directory {}", versionS3KeyPrefix, e); - continue; // Continue aggregating metrics for other directories - } + private void aggregateMetricsForDirectory(S3DirectoryInfo directoryInfo, ExtendedGa4GhApi extendedGa4GhApi) { + LOG.info("Processing directory {}", directoryInfo); + String toolId = directoryInfo.toolId(); + String versionName = directoryInfo.versionId(); + List platforms = directoryInfo.platforms(); + String platformsString = String.join(", ", platforms); + String versionS3KeyPrefix = directoryInfo.versionS3KeyPrefix(); - try { - getAggregatedMetrics(allSubmissions).ifPresent(metrics -> { - extendedGa4GhApi.aggregatedMetricsPut(metrics, platform, toolId, versionName); - System.out.printf("Aggregated metrics for tool ID %s, version %s, platform %s from directory %s%n", toolId, versionName, platform, versionS3KeyPrefix); - allMetrics.add(metrics); - }); - } catch (Exception e) { - LOG.error("Error aggregating metrics: Could not put all executions from directory {}", versionS3KeyPrefix, e); - // Continue aggregating metrics for other platforms - } + // Collect metrics for each platform, so we can calculate metrics across all platforms + List allMetrics = new ArrayList<>(); + for (String platform : platforms) { + ExecutionsRequestBody allSubmissions; + try { + allSubmissions = getExecutions(toolId, versionName, platform); + } catch (Exception e) { + LOG.error("Error aggregating metrics: Could not get all executions from directory {}", versionS3KeyPrefix, e); + numberOfMetricsSkipped.incrementAndGet(); + continue; // Continue aggregating metrics for other directories } - if (!allMetrics.isEmpty()) { - // Calculate metrics across all platforms by aggregating the aggregated metrics from each platform - try { - getAggregatedMetrics(allMetrics).ifPresent(metrics -> { - extendedGa4GhApi.aggregatedMetricsPut(metrics, Partner.ALL.name(), toolId, versionName); - System.out.printf("Aggregated metrics across all platforms (%s) for tool ID %s, version %s from directory %s%n", - platformsString, toolId, versionName, versionS3KeyPrefix); - allMetrics.add(metrics); - }); - } catch (Exception e) { - LOG.error("Error aggregating metrics across all platforms ({}) for tool ID {}, version {} from directory {}", platformsString, toolId, versionName, versionS3KeyPrefix, e); - // Continue aggregating metrics for other directories + try { + Optional aggregatedPlatformMetric = getAggregatedMetrics(allSubmissions); + if (aggregatedPlatformMetric.isPresent()) { + extendedGa4GhApi.aggregatedMetricsPut(aggregatedPlatformMetric.get(), platform, toolId, versionName); + LOG.info("Aggregated metrics for tool ID {}, version {}, platform {} from directory {}", toolId, versionName, platform, + versionS3KeyPrefix); + allMetrics.add(aggregatedPlatformMetric.get()); + numberOfMetricsSubmitted.incrementAndGet(); + } else { + LOG.error("Error aggregating metrics for tool ID {}, version {}, platform {} from directory {}", toolId, versionName, platform, versionS3KeyPrefix); } + } catch (Exception e) { + LOG.error("Error aggregating metrics: Could not put all executions from directory {}", versionS3KeyPrefix, e); + numberOfMetricsSkipped.incrementAndGet(); + // Continue aggregating metrics for other platforms } } - System.out.println("Completed aggregating metrics"); + + if (!allMetrics.isEmpty()) { + // Calculate metrics across all platforms by aggregating the aggregated metrics from each platform + try { + getAggregatedMetrics(allMetrics).ifPresent(metrics -> { + extendedGa4GhApi.aggregatedMetricsPut(metrics, Partner.ALL.name(), toolId, versionName); + LOG.info("Aggregated metrics across all platforms ({}) for tool ID {}, version {} from directory {}", + platformsString, toolId, versionName, versionS3KeyPrefix); + allMetrics.add(metrics); + numberOfMetricsSubmitted.incrementAndGet(); + }); + } catch (Exception e) { + LOG.error("Error aggregating metrics across all platforms ({}) for tool ID {}, version {} from directory {}", platformsString, toolId, versionName, versionS3KeyPrefix, e); + numberOfMetricsSkipped.incrementAndGet(); + // Continue aggregating metrics for other directories + } + } else { + LOG.error("Error aggregating metrics for directory {}: no platform metrics aggregated", versionS3KeyPrefix); + numberOfMetricsSkipped.incrementAndGet(); + } + numberOfDirectoriesProcessed.incrementAndGet(); + LOG.info("Processed {} directories", numberOfDirectoriesProcessed); } /** @@ -152,7 +176,7 @@ private ExecutionsRequestBody getExecutions(String toolId, String versionName, S try { executionsFromOneSubmission = GSON.fromJson(fileContent, ExecutionsRequestBody.class); } catch (JsonSyntaxException e) { - LOG.error("Could not read execution(s) from S3 key {}, ignoring file", metricsData.s3Key()); + LOG.error("Could not read execution(s) from S3 key {}, ignoring file", metricsData.s3Key(), e); continue; } @@ -196,20 +220,6 @@ private ExecutionsRequestBody getExecutions(String toolId, String versionName, S .aggregatedExecutions(executionIdToAggregatedExecutionMap.values().stream().toList()); } - /** - * If the execution ID is null, generate a random one for the purposes of aggregation. - * Executions that were submitted to S3 prior to the existence of execution IDs don't have an execution ID, - * thus for the purposes of aggregation, generate one. - * @param executionId - * @return - */ - private String generateExecutionIdIfNull(String executionId) { - if (executionId == null) { - return UUID.randomUUID().toString(); - } - return executionId; - } - /** * Returns a unique list of directories containing metrics files. * For example, suppose the local-dockstore-metrics-data bucket looks like the following. diff --git a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/CommandLineArgs.java b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/CommandLineArgs.java index 68910bc4..8762ba4a 100644 --- a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/CommandLineArgs.java +++ b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/CommandLineArgs.java @@ -85,4 +85,51 @@ public String getExecutionId() { return executionId; } } + + @Parameters(commandNames = { "submit-terra-metrics" }, commandDescription = "Submits workflow metrics provided by Terra via a CSV file to Dockstore") + public static class SubmitTerraMetrics extends CommandLineArgs { + @Parameter(names = {"-c", "--config"}, description = "The config file path.") + private File config = new File("./" + MetricsAggregatorClient.CONFIG_FILE_NAME); + + + @Parameter(names = {"-d", "--data"}, description = "The file path to the CSV file containing workflow metrics from Terra. The first line of the file should contain the CSV fields: workflow_id,status,workflow_start,workflow_end,workflow_runtime_minutes,source_url", required = true) + private String dataFilePath; + + @Parameter(names = {"-r", "--recordSkipped"}, description = "Record skipped executions and the reason skipped to a CSV file") + private boolean recordSkippedExecutions; + + @Parameter(names = {"-de", "--description"}, description = "Optional description about the metrics to include when submitting metrics to Dockstore") + private String description; + + public File getConfig() { + return config; + } + + public String getDataFilePath() { + return dataFilePath; + } + + public String getDescription() { + return description; + } + + public boolean isRecordSkippedExecutions() { + return recordSkippedExecutions; + } + + /** + * Headers for the input data file + */ + public enum TerraMetricsCsvHeaders { + workflow_id, status, workflow_start, workflow_end, workflow_runtime_minutes, source_url + } + + /** + * Headers for the output file containing workflow executions that were skipped. + * The headers are the same as the input file headers, with the addition of a "reason" header indicating why an execution was skipped + */ + public enum SkippedTerraMetricsCsvHeaders { + workflow_id, status, workflow_start, workflow_end, workflow_runtime_minutes, source_url, reason_skipped + } + } } diff --git a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClient.java b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClient.java index 3d7864e3..556ceb60 100644 --- a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClient.java +++ b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClient.java @@ -17,8 +17,10 @@ package io.dockstore.metricsaggregator.client.cli; -import static io.dockstore.utils.CLIConstants.FAILURE_EXIT_CODE; import static io.dockstore.utils.ConfigFileUtils.getConfiguration; +import static io.dockstore.utils.DockstoreApiClientUtils.setupApiClient; +import static io.dockstore.utils.ExceptionHandler.GENERIC_ERROR; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; import com.beust.jcommander.JCommander; import com.beust.jcommander.MissingCommandException; @@ -27,9 +29,9 @@ import io.dockstore.metricsaggregator.MetricsAggregatorConfig; import io.dockstore.metricsaggregator.MetricsAggregatorS3Client; import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.AggregateMetricsCommand; +import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.SubmitTerraMetrics; import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.SubmitValidationData; import io.dockstore.openapi.client.ApiClient; -import io.dockstore.openapi.client.Configuration; import io.dockstore.openapi.client.api.ExtendedGa4GhApi; import io.dockstore.openapi.client.model.ExecutionsRequestBody; import io.dockstore.openapi.client.model.ValidationExecution; @@ -38,8 +40,9 @@ import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; +import java.time.Duration; +import java.time.Instant; import java.util.List; -import java.util.Optional; import org.apache.commons.configuration2.INIConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,34 +59,36 @@ public class MetricsAggregatorClient { private static final Logger LOG = LoggerFactory.getLogger(MetricsAggregatorClient.class); - public MetricsAggregatorClient() { } public static void main(String[] args) { + final Instant startTime = Instant.now(); MetricsAggregatorClient metricsAggregatorClient = new MetricsAggregatorClient(); final CommandLineArgs commandLineArgs = new CommandLineArgs(); final JCommander jCommander = new JCommander(commandLineArgs); final AggregateMetricsCommand aggregateMetricsCommand = new AggregateMetricsCommand(); final SubmitValidationData submitValidationData = new SubmitValidationData(); + final SubmitTerraMetrics submitTerraMetrics = new SubmitTerraMetrics(); + jCommander.addCommand(aggregateMetricsCommand); jCommander.addCommand(submitValidationData); + jCommander.addCommand(submitTerraMetrics); try { jCommander.parse(args); } catch (MissingCommandException e) { jCommander.usage(); if (e.getUnknownCommand().isEmpty()) { - LOG.error("No command entered", e); + LOG.error("No command entered"); } else { - LOG.error("Unknown command", e); + LOG.error("Unknown command"); } - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "The command is missing", GENERIC_ERROR); } catch (ParameterException e) { jCommander.usage(); - LOG.error("Error parsing arguments", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Error parsing arguments", GENERIC_ERROR); } if (jCommander.getParsedCommand() == null || commandLineArgs.isHelp()) { @@ -92,46 +97,52 @@ public static void main(String[] args) { if (aggregateMetricsCommand.isHelp()) { jCommander.usage(); } else { - final Optional config = getConfiguration(aggregateMetricsCommand.getConfig()); - if (config.isEmpty()) { - System.exit(FAILURE_EXIT_CODE); - } + INIConfiguration config = getConfiguration(aggregateMetricsCommand.getConfig()); try { - final MetricsAggregatorConfig metricsAggregatorConfig = new MetricsAggregatorConfig(config.get()); + final MetricsAggregatorConfig metricsAggregatorConfig = new MetricsAggregatorConfig(config); metricsAggregatorClient.aggregateMetrics(metricsAggregatorConfig); } catch (Exception e) { - LOG.error("Could not aggregate metrics", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Could not aggregate metrics", GENERIC_ERROR); } } } else if ("submit-validation-data".equals(jCommander.getParsedCommand())) { if (submitValidationData.isHelp()) { jCommander.usage(); } else { - final Optional config = getConfiguration(submitValidationData.getConfig()); - if (config.isEmpty()) { - System.exit(FAILURE_EXIT_CODE); - } + INIConfiguration config = getConfiguration(submitValidationData.getConfig()); try { - final MetricsAggregatorConfig metricsAggregatorConfig = new MetricsAggregatorConfig(config.get()); + final MetricsAggregatorConfig metricsAggregatorConfig = new MetricsAggregatorConfig(config); metricsAggregatorClient.submitValidationData(metricsAggregatorConfig, submitValidationData.getValidator(), - submitValidationData.getValidatorVersion(), submitValidationData.getDataFilePath(), submitValidationData.getPlatform(), + submitValidationData.getValidatorVersion(), submitValidationData.getDataFilePath(), + submitValidationData.getPlatform(), submitValidationData.getExecutionId()); } catch (Exception e) { - LOG.error("Could not submit validation metrics to Dockstore", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Could not submit validation metrics to Dockstore", GENERIC_ERROR); + } + } + } else if ("submit-terra-metrics".equals(jCommander.getParsedCommand())) { + if (submitTerraMetrics.isHelp()) { + jCommander.usage(); + } else { + INIConfiguration config = getConfiguration(submitTerraMetrics.getConfig()); + + try { + final MetricsAggregatorConfig metricsAggregatorConfig = new MetricsAggregatorConfig(config); + final TerraMetricsSubmitter submitTerraMetricsCommand = new TerraMetricsSubmitter(metricsAggregatorConfig, + submitTerraMetrics); + submitTerraMetricsCommand.submitTerraMetrics(); + } catch (Exception e) { + exceptionMessage(e, "Could not submit Terra metrics to Dockstore", GENERIC_ERROR); } } } - } - private ApiClient setupApiClient(String serverUrl, String token) { - ApiClient apiClient = Configuration.getDefaultApiClient(); - apiClient.setBasePath(serverUrl); - apiClient.addDefaultHeader("Authorization", "Bearer " + token); - return apiClient; + if (jCommander.getParsedCommand() != null) { + final Instant endTime = Instant.now(); + LOG.info("{} took {}", jCommander.getParsedCommand(), Duration.between(startTime, endTime)); + } } private void aggregateMetrics(MetricsAggregatorConfig config) throws URISyntaxException { @@ -180,10 +191,8 @@ private void submitValidationData(MetricsAggregatorConfig config, ValidatorToolE continue; } String dateExecuted = lineComponents[DATE_EXECUTED_INDEX]; - ValidationExecution validationExecution = new ValidationExecution() - .validatorTool(validator) - .validatorToolVersion(validatorVersion) - .isValid(isValid); + ValidationExecution validationExecution = new ValidationExecution().validatorTool(validator) + .validatorToolVersion(validatorVersion).isValid(isValid); validationExecution.setDateExecuted(dateExecuted); validationExecution.setExecutionId(executionId); ExecutionsRequestBody executionsRequestBody = new ExecutionsRequestBody().validationExecutions(List.of(validationExecution)); @@ -191,7 +200,8 @@ private void submitValidationData(MetricsAggregatorConfig config, ValidatorToolE try { extendedGa4GhApi.executionMetricsPost(executionsRequestBody, platform.toString(), trsId, versionName, "Validation executions submitted using dockstore-support metricsaggregator"); - System.out.printf("Submitted validation metrics for tool ID %s, version %s, %s validated by %s %s on platform %s%n", trsId, versionName, isValid ? "successfully" : "unsuccessfully", validator, validatorVersion, platform); + System.out.printf("Submitted validation metrics for tool ID %s, version %s, %s validated by %s %s on platform %s%n", trsId, + versionName, isValid ? "successfully" : "unsuccessfully", validator, validatorVersion, platform); } catch (Exception e) { // Could end up here if the workflow no longer exists. Log then continue processing LOG.error("Could not submit validation executions to Dockstore for workflow {}", csvLine, e); diff --git a/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitter.java b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitter.java new file mode 100644 index 00000000..70dc2df4 --- /dev/null +++ b/metricsaggregator/src/main/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitter.java @@ -0,0 +1,458 @@ +package io.dockstore.metricsaggregator.client.cli; + +import static io.dockstore.utils.DockstoreApiClientUtils.setupApiClient; +import static io.dockstore.utils.ExceptionHandler.IO_ERROR; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; +import static java.util.stream.Collectors.groupingBy; + +import io.dockstore.common.Partner; +import io.dockstore.metricsaggregator.MetricsAggregatorConfig; +import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.SubmitTerraMetrics; +import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.SubmitTerraMetrics.SkippedTerraMetricsCsvHeaders; +import io.dockstore.metricsaggregator.client.cli.CommandLineArgs.SubmitTerraMetrics.TerraMetricsCsvHeaders; +import io.dockstore.openapi.client.ApiClient; +import io.dockstore.openapi.client.ApiException; +import io.dockstore.openapi.client.api.ExtendedGa4GhApi; +import io.dockstore.openapi.client.api.WorkflowsApi; +import io.dockstore.openapi.client.model.ExecutionsRequestBody; +import io.dockstore.openapi.client.model.RunExecution; +import io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum; +import io.dockstore.openapi.client.model.Workflow.DescriptorTypeEnum; +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TerraMetricsSubmitter { + private static final Logger LOG = LoggerFactory.getLogger(MetricsAggregatorClient.class); + private static final int MAX_NUMBER_OF_MICRO_SECONDS = 9; + // Example workflow_start: 2022-07-15 15:37:06.450000 UTC + private static final DateTimeFormatter WORKFLOW_START_FORMAT = new DateTimeFormatterBuilder() + .appendPattern("yyyy-MM-dd HH:mm:ss") + .appendFraction(ChronoField.MICRO_OF_SECOND, 0, MAX_NUMBER_OF_MICRO_SECONDS, true) // There are varying widths of micro seconds + .optionalStart() // Start optional time zone pattern. Time zone is not always included + .appendPattern(" ") + .appendZoneId() + .optionalEnd() // End optional time zone pattern + .toFormatter(); + private final MetricsAggregatorConfig config; + private final SubmitTerraMetrics submitTerraMetricsCommand; + private final AtomicInteger numberOfExecutionsProcessed = new AtomicInteger(0); + private final AtomicInteger numberOfExecutionsSubmitted = new AtomicInteger(0); + private final AtomicInteger numberOfExecutionsSkipped = new AtomicInteger(0); + + // Keep track of sourceUrls that are found to be ambiguous + private final ConcurrentMap skippedSourceUrlsToReason = new ConcurrentHashMap<>(); + // Map of source url to TRS info that was previously calculated + private final ConcurrentMap sourceUrlToSourceUrlTrsInfo = new ConcurrentHashMap<>(); + // Map of workflow path prefixes, like github.com/organization/repo, to published workflows with the same workflow path prefix + private final ConcurrentMap> workflowPathPrefixToWorkflows = new ConcurrentHashMap<>(); + + public TerraMetricsSubmitter(MetricsAggregatorConfig config, SubmitTerraMetrics submitTerraMetricsCommand) { + this.config = config; + this.submitTerraMetricsCommand = submitTerraMetricsCommand; + } + + public void submitTerraMetrics() { + ApiClient apiClient = setupApiClient(config.getDockstoreServerUrl(), config.getDockstoreToken()); + ExtendedGa4GhApi extendedGa4GhApi = new ExtendedGa4GhApi(apiClient); + WorkflowsApi workflowsApi = new WorkflowsApi(apiClient); + + // Read CSV file + Iterable workflowMetricRecords; + final String inputDateFilePath = this.submitTerraMetricsCommand.getDataFilePath(); + try (BufferedReader metricsBufferedReader = new BufferedReader(new FileReader(inputDateFilePath))) { + final CSVFormat csvFormat = CSVFormat.DEFAULT.builder() + .setHeader(TerraMetricsCsvHeaders.class) + .setSkipHeaderRecord(true) + .setTrim(true) + .build(); + workflowMetricRecords = csvFormat.parse(metricsBufferedReader); + + // This output file is used to record skipped executions + final String outputFileName = inputDateFilePath + "_skipped_executions_" + Instant.now().truncatedTo(ChronoUnit.SECONDS).toString().replace("-", "").replace(":", "") + ".csv"; + + List workflowMetricsToProcess = new ArrayList<>(); + try (CSVPrinter skippedExecutionsCsvPrinter = submitTerraMetricsCommand.isRecordSkippedExecutions() ? new CSVPrinter( + new FileWriter(outputFileName, StandardCharsets.UTF_8), + CSVFormat.DEFAULT.builder().setHeader(SkippedTerraMetricsCsvHeaders.class).build()) : null) { + + // Process the executions by reading 100,000 rows, then processing the rows in parallel + final int batchSize = 100000; + for (CSVRecord workflowMetricRecord: workflowMetricRecords) { + if (workflowMetricsToProcess.size() < batchSize && workflowMetricRecords.iterator().hasNext()) { + workflowMetricsToProcess.add(workflowMetricRecord); + } else { + workflowMetricsToProcess.add(workflowMetricRecord); + LOG.info("Processing rows {} to {}", workflowMetricsToProcess.get(0).getRecordNumber(), workflowMetricsToProcess.get(workflowMetricsToProcess.size() - 1).getRecordNumber()); + // Collect a map of CSV records with the same source URL + Map> sourceUrlToCsvRecords = workflowMetricsToProcess.stream() + .collect(groupingBy(csvRecord -> csvRecord.get(TerraMetricsCsvHeaders.source_url))); + + sourceUrlToCsvRecords.entrySet().stream() + .parallel() + .forEach(entry -> { + final String sourceUrl = entry.getKey(); + final List csvRecordsWithSameSourceUrl = entry.getValue(); + submitWorkflowExecutions(sourceUrl, csvRecordsWithSameSourceUrl, workflowsApi, extendedGa4GhApi, skippedExecutionsCsvPrinter); + }); + + workflowMetricsToProcess.clear(); + logStats(); + } + } + } catch (IOException e) { + exceptionMessage(e, "Unable to create new CSV output file", IO_ERROR); + } + + logStats(); + + if (submitTerraMetricsCommand.isRecordSkippedExecutions()) { + LOG.info("View skipped executions in file {}", outputFileName); + } + } catch (IOException e) { + exceptionMessage(e, "Unable to read input CSV file", IO_ERROR); + } + } + + private void logStats() { + LOG.info("Done processing {} executions from Terra. Submitted {} executions. Skipped {} executions.", numberOfExecutionsProcessed, numberOfExecutionsSubmitted, numberOfExecutionsSkipped); + } + + private void submitWorkflowExecutions(String sourceUrl, List workflowMetricRecords, WorkflowsApi workflowsApi, ExtendedGa4GhApi extendedGa4GhApi, CSVPrinter skippedExecutionsCsvPrinter) { + LOG.info("Processing source_url {} for {} executions", sourceUrl, workflowMetricRecords.size()); + numberOfExecutionsProcessed.addAndGet(workflowMetricRecords.size()); + + if (StringUtils.isBlank(sourceUrl)) { + logSkippedExecutions("", workflowMetricRecords, "Can't determine TRS ID because source_url is missing", skippedExecutionsCsvPrinter, false); + } + + // Check to see if this source_url was skipped before + if (skippedSourceUrlsToReason.containsKey(sourceUrl)) { + logSkippedExecutions(sourceUrl, workflowMetricRecords, skippedSourceUrlsToReason.get(sourceUrl), skippedExecutionsCsvPrinter, true); + return; + } + + if (!sourceUrlToSourceUrlTrsInfo.containsKey(sourceUrl)) { + Optional sourceUrlTrsInfo = calculateTrsInfoFromSourceUrl(workflowMetricRecords, sourceUrl, workflowsApi, skippedExecutionsCsvPrinter); + if (sourceUrlTrsInfo.isEmpty()) { + return; + } else { + sourceUrlToSourceUrlTrsInfo.put(sourceUrl, sourceUrlTrsInfo.get()); + } + } + + final SourceUrlTrsInfo sourceUrlTrsInfo = sourceUrlToSourceUrlTrsInfo.get(sourceUrl); + final List workflowExecutionsToSubmit = workflowMetricRecords.stream() + .map(workflowExecution -> getTerraWorkflowExecutionFromCsvRecord(workflowExecution, sourceUrlTrsInfo.sourceUrl(), skippedExecutionsCsvPrinter)) + .filter(Optional::isPresent) + .map(Optional::get) + .toList(); + final ExecutionsRequestBody executionsRequestBody = new ExecutionsRequestBody().runExecutions(workflowExecutionsToSubmit); + try { + String description = "Submitted using the metricsaggregator's submit-terra-metrics command"; + if (StringUtils.isNotBlank(submitTerraMetricsCommand.getDescription())) { + description += ". " + submitTerraMetricsCommand.getDescription(); + } + extendedGa4GhApi.executionMetricsPost(executionsRequestBody, Partner.TERRA.toString(), sourceUrlTrsInfo.trsId(), sourceUrlTrsInfo.version(), description); + numberOfExecutionsSubmitted.addAndGet(workflowMetricRecords.size()); + } catch (ApiException e) { + logSkippedExecutions(sourceUrlTrsInfo.sourceUrl(), workflowMetricRecords, String.format("Could not submit execution metrics to Dockstore for workflow %s: %s", sourceUrlTrsInfo, e.getMessage()), skippedExecutionsCsvPrinter, false); + } + } + + /** + * Performs logging and writing of the skipped execution to an output file. + * If skipFutureExecutionsWithSourceUrl is true, also adds the source_url of the skipped execution to the sourceUrlsToSkipToReason map + * so that future executions with the same source_url are skipped. + * @param sourceUrl source_url of the csvRecordToSkip + * @param csvRecordToSkip CSVRecord to skip + * @param reason Reason that this execution is being skipped + * @param skippedExecutionsCsvPrinter CSVPrinter that writes to the output file + * @param skipFutureExecutionsWithSourceUrl boolean indicating if all executions with the same source_url should be skipped + * @param logToConsole boolean indicating if the reason skipped should be logged to the console + */ + private void logSkippedExecution(String sourceUrl, CSVRecord csvRecordToSkip, String reason, CSVPrinter skippedExecutionsCsvPrinter, boolean skipFutureExecutionsWithSourceUrl, boolean logToConsole) { + if (logToConsole) { + LOG.warn("Skipping execution on row {} with source_url {}: {}", csvRecordToSkip.getRecordNumber(), sourceUrl, reason); + } + + // Record to map for future reference. Only want to do this if the skip reason applies for ALL executions with the source_url. + // Should not add to this map if the skip reason is specific to one execution + if (skipFutureExecutionsWithSourceUrl) { + skippedSourceUrlsToReason.put(sourceUrl, reason); + } + if (submitTerraMetricsCommand.isRecordSkippedExecutions()) { + // Record to output CSV file for later examination + // Headers: workflow_id, status, workflow_start, workflow_end, workflow_runtime_minutes, source_url + List csvColumnValues = Arrays.stream(TerraMetricsCsvHeaders.values()) + .map(csvRecordToSkip::get) // Get the column value for the record + .collect(Collectors.toList()); + csvColumnValues.add(reason); + try { + skippedExecutionsCsvPrinter.printRecord(csvColumnValues); + } catch (IOException e) { + LOG.error("Could not write skipped execution to output file"); + } + } + numberOfExecutionsSkipped.incrementAndGet(); + } + + /** + * Performs logging and writing of the skipped execution to the console and output file. + * Assumes that the execution being skipped is for a reason that is only applicable to that execution. + * @param sourceUrl source_url of the csvRecordToSkip + * @param csvRecordToSkip CSVRecord to skip + * @param reason Reason that this execution is being skipped + * @param skippedExecutionsCsvPrinter CSVPrinter that writes to the output file + */ + private void logSkippedExecution(String sourceUrl, CSVRecord csvRecordToSkip, String reason, CSVPrinter skippedExecutionsCsvPrinter) { + logSkippedExecution(sourceUrl, csvRecordToSkip, reason, skippedExecutionsCsvPrinter, false, true); + } + + /** + * Performs logging and writing of the skipped executions with the same sourceUrl to an output file. Assumes that all executions are skipped for the same reason. + * If skipFutureExecutionsWithSourceUrl is true, also adds the source_url of the skipped execution to the sourceUrlsToSkipToReason map + * so that future executions with the same source_url are skipped. + * @param sourceUrl sourceUrl of all csvRecordsToSkip + * @param csvRecordsToSkip the CSVRecords to skip + * @param reason the reason the CSVRecords are being skipped + * @param skippedExecutionsCsvPrinter CSVPrinter that writes the skipped reason and records to an output file + * @param skipFutureExecutionsWithSourceUrl boolean indicating if all executions with the same source_url should be skipped + */ + private void logSkippedExecutions(String sourceUrl, List csvRecordsToSkip, String reason, CSVPrinter skippedExecutionsCsvPrinter, boolean skipFutureExecutionsWithSourceUrl) { + LOG.warn("Skipping {} executions with source_url {}: {}", csvRecordsToSkip.size(), sourceUrl, reason); + csvRecordsToSkip.forEach(csvRecordToSkip -> logSkippedExecution(sourceUrl, csvRecordToSkip, reason, skippedExecutionsCsvPrinter, skipFutureExecutionsWithSourceUrl, false)); + } + + /** + * Gets a RunExecution representing a single Terra workflow execution from one row of the CSV file. + * If the CSV record is invalid, the function will record the reason why the execution was skipped using skippedExecutionsCsvPrinter. + * Note: If an execution is skipped in this function, it means that the reason is specific to the execution, not the source_url! + * @param csvRecord + * @param sourceUrl + * @param skippedExecutionsCsvPrinter + * @return + */ + public Optional getTerraWorkflowExecutionFromCsvRecord(CSVRecord csvRecord, String sourceUrl, CSVPrinter skippedExecutionsCsvPrinter) { + final String executionId = csvRecord.get(TerraMetricsCsvHeaders.workflow_id); + final String workflowStart = csvRecord.get(TerraMetricsCsvHeaders.workflow_start); + final String status = csvRecord.get(TerraMetricsCsvHeaders.status); + final String workflowRunTimeMinutes = csvRecord.get(TerraMetricsCsvHeaders.workflow_runtime_minutes); + + // Check that all required fields are present + if (StringUtils.isBlank(executionId)) { + logSkippedExecution(sourceUrl, csvRecord, "The required field workflow_id is missing", skippedExecutionsCsvPrinter); + return Optional.empty(); + } + + if (StringUtils.isBlank(workflowStart)) { + logSkippedExecution(sourceUrl, csvRecord, "The required field workflow_start is missing", skippedExecutionsCsvPrinter); + return Optional.empty(); + } + + if (StringUtils.isBlank(status)) { + logSkippedExecution(sourceUrl, csvRecord, "The required field status is missing", skippedExecutionsCsvPrinter); + return Optional.empty(); + } + + // Format fields into Dockstore schema + final Optional executionStatus = getExecutionStatusEnumFromTerraStatus(status); + if (executionStatus.isEmpty()) { + logSkippedExecution(sourceUrl, csvRecord, "Could not get a valid ExecutionStatusEnum from status '" + status + "'", skippedExecutionsCsvPrinter); + return Optional.empty(); + } + + final Optional dateExecuted = formatStringInIso8601Date(workflowStart); + if (dateExecuted.isEmpty()) { + logSkippedExecution(sourceUrl, csvRecord, "Could not get a valid dateExecuted from workflow_start '" + workflowStart + "'", skippedExecutionsCsvPrinter); + return Optional.empty(); + } + + RunExecution workflowExecution = new RunExecution(); + workflowExecution.setExecutionId(executionId); + workflowExecution.setExecutionStatus(executionStatus.get()); + workflowExecution.setDateExecuted(dateExecuted.get()); + getExecutionTime(workflowRunTimeMinutes).ifPresent(workflowExecution::setExecutionTime); + return Optional.of(workflowExecution); + } + + static Optional formatStringInIso8601Date(String workflowStart) { + try { + final LocalDateTime localDateTime = LocalDateTime.parse(workflowStart, WORKFLOW_START_FORMAT); + return Optional.of(DateTimeFormatter.ISO_INSTANT.format(localDateTime.atOffset(ZoneOffset.UTC))); + } catch (DateTimeParseException e) { + LOG.error("Could not format workflow_start '{}' in ISO 8601 date format", workflowStart, e); + return Optional.empty(); + } + } + + static Optional getExecutionStatusEnumFromTerraStatus(String terraStatus) { + ExecutionStatusEnum executionStatusEnum = switch (terraStatus) { + case "Succeeded" -> ExecutionStatusEnum.SUCCESSFUL; + case "Failed" -> ExecutionStatusEnum.FAILED; + case "Aborted" -> ExecutionStatusEnum.ABORTED; + default -> null; + }; + return Optional.ofNullable(executionStatusEnum); + } + + static Optional getExecutionTime(String workflowRunTimeMinutes) { + if (StringUtils.isBlank(workflowRunTimeMinutes)) { + return Optional.empty(); + } + return Optional.of(String.format("PT%sM", workflowRunTimeMinutes)); + } + + /** + * Calculates the TRS ID from the source_url by: + *
    + *
  1. Looking at all published workflows that have the same workflow path prefix, i.e. they belong to the same GitHub repository.
  2. + *
  3. For each published workflow, getting the primary descriptor path for the version specified in source_url and checking if it matches the primary descriptor path in the source_url
  4. + *
  5. Ensuring that there is only one workflow in the repository with the same descriptor path. If there are multiple, it is an ambiguous case and we skip the execution
  6. + *
+ * + * Also writes skipped executions to an output file. + * + * @param workflowMetricRecords workflow CSV records, all with the same sourceUrl, to calculate the TRS info for + * @param sourceUrl the sourceUrl of all the workflow CSV records + * @param workflowsApi workflowsApi used to help calculate the TRS info + * @param skippedExecutionsCsvPrinter If the workflow CSV records are skipped, the CSV Printer that writes the reason why it was skipped and the records to an output file + * @return + */ + private Optional calculateTrsInfoFromSourceUrl(List workflowMetricRecords, String sourceUrl, WorkflowsApi workflowsApi, CSVPrinter skippedExecutionsCsvPrinter) { + // Need to figure out the TRS ID and version name using the source_url. + // Example source_url: https://raw.githubusercontent.com/theiagen/public_health_viral_genomics/v2.0.0/workflows/wf_theiacov_fasta.wdl + // Organization = "theiagen/public_health_viral_genomics", version = "v2.0.0", the rest is the primary descriptor path + // Note that the TRS ID may also have a workflow name, which we need to figure out + final List sourceUrlComponents = getSourceUrlComponents(sourceUrl); + + + final int minNumberOfComponents = 3; + if (sourceUrlComponents.size() < minNumberOfComponents) { + logSkippedExecutions(sourceUrl, workflowMetricRecords, "Not enough components in the source_url to figure out the TRS ID and version", skippedExecutionsCsvPrinter, true); + return Optional.empty(); + } + + // There should be at least three elements in order for there to be an organization name, foo>/, and version + // in /// + final String organization = sourceUrlComponents.get(0) + "/" + sourceUrlComponents.get(1); + final String version = sourceUrlComponents.get(2); + final String primaryDescriptorPathFromUrl = "/" + String.join("/", sourceUrlComponents.subList(3, sourceUrlComponents.size())); + + final String workflowPathPrefix = "github.com/" + organization; + if (!workflowPathPrefixToWorkflows.containsKey(workflowPathPrefix)) { + try { + List publishedWorkflowsWithSamePathPrefix = workflowsApi.getAllPublishedWorkflowByPath( + workflowPathPrefix).stream() + .map(workflow -> new MinimalWorkflowInfo(workflow.getId(), workflow.getFullWorkflowPath(), workflow.getDescriptorType(), new ConcurrentHashMap<>())).toList(); + workflowPathPrefixToWorkflows.put(workflowPathPrefix, publishedWorkflowsWithSamePathPrefix); + } catch (ApiException e) { + logSkippedExecutions(sourceUrl, workflowMetricRecords, + "Could not get all published workflows for workflow path " + workflowPathPrefix + " to determine TRS ID", + skippedExecutionsCsvPrinter, true); + return Optional.empty(); + } + } + + List workflowsFromSameRepo = workflowPathPrefixToWorkflows.get(workflowPathPrefix); + + List foundFullWorkflowPaths = new ArrayList<>(); + // Loop through each workflow to find one that matches the primary descriptor + workflowsFromSameRepo.forEach(workflow -> { + // Get the primary descriptor path for the version and update the map, either with the primary descriptor path or an empty string to indicate that it was not found + if (!workflow.versionToPrimaryDescriptorPathMap().containsKey(version)) { + final String primaryDescriptorAbsolutePath = makePathAbsolute(getPrimaryDescriptorAbsolutePath(workflowsApi, workflow, version).orElse("")); + workflow.versionToPrimaryDescriptorPathMap().put(version, primaryDescriptorAbsolutePath); + } + + // Check to see if there's a version that has the same primary descriptor path + final String primaryDescriptorPathForVersion = workflow.versionToPrimaryDescriptorPathMap().get(version); + if (primaryDescriptorPathFromUrl.equals(primaryDescriptorPathForVersion)) { + foundFullWorkflowPaths.add(workflow.fullWorkflowPath()); + } + }); + + if (foundFullWorkflowPaths.isEmpty()) { + logSkippedExecutions(sourceUrl, workflowMetricRecords, "Could not find workflow with primary descriptor " + primaryDescriptorPathFromUrl, skippedExecutionsCsvPrinter, true); + return Optional.empty(); + } else if (foundFullWorkflowPaths.size() > 1) { + // There is already a workflow in the same repository with the same descriptor path that we're looking for. + // Skip this source_url because it is an ambiguous case and we can't identify which workflow the source url is referring to. + logSkippedExecutions(sourceUrl, workflowMetricRecords, String.format("There's %s workflows in the repository with the same primary descriptor path '%s': %s", + foundFullWorkflowPaths.size(), primaryDescriptorPathFromUrl, foundFullWorkflowPaths), skippedExecutionsCsvPrinter, true); + return Optional.empty(); + } else { + final SourceUrlTrsInfo sourceUrlTrsInfo = new SourceUrlTrsInfo(sourceUrl, "#workflow/" + foundFullWorkflowPaths.get(0), version); + return Optional.of(sourceUrlTrsInfo); + } + } + + /** + * Returns a list of slash-delimited components from the source_url. + * Example: given source_url https://raw.githubusercontent.com/theiagen/public_health_viral_genomics/v2.0.0/workflows/wf_theiacov_fasta.wdl, + * returns a list of "theiagen", "public_health_viral_genomics", "v2.0.0", "wf_theiacov_fasta.wdl" + * @param sourceUrl The source_url that starts with https://raw.githubusercontent.com/. + * @return + */ + static List getSourceUrlComponents(String sourceUrl) { + final String rawGitHubUrlPrefix = "https://raw.githubusercontent.com/"; + if (!sourceUrl.startsWith(rawGitHubUrlPrefix)) { + return List.of(); + } + final String sourceUrlWithoutGitHubPrefix = sourceUrl.replace("https://raw.githubusercontent.com/", ""); + return Arrays.stream(sourceUrlWithoutGitHubPrefix.split("/")) + .filter(urlComponent -> !urlComponent.isEmpty()) // Filter out empty strings that are a result of consecutive slashes '//' + .toList(); + } + + private Optional getPrimaryDescriptorAbsolutePath(WorkflowsApi workflowsApi, MinimalWorkflowInfo workflow, String version) { + Optional primaryDescriptorPath = Optional.empty(); + try { + primaryDescriptorPath = Optional.of(workflowsApi.primaryDescriptor1(workflow.id(), version, workflow.descriptorType().toString()).getAbsolutePath()); + } catch (ApiException e) { + LOG.debug("Could not get primary descriptor path", e); + } + return primaryDescriptorPath; + } + + static String makePathAbsolute(String path) { + return path.startsWith("/") ? path : "/" + path; + } + + /** + * Record that stores the calculated TRS ID and version, derived from the source_url + * @param sourceUrl + * @param trsId + * @param version + */ + public record SourceUrlTrsInfo(String sourceUrl, String trsId, String version) { + } + + public record MinimalWorkflowInfo(long id, String fullWorkflowPath, DescriptorTypeEnum descriptorType, ConcurrentMap versionToPrimaryDescriptorPathMap) { + } +} diff --git a/metricsaggregator/src/main/resources/logback.xml b/metricsaggregator/src/main/resources/logback.xml index ef430a56..3f2d6d36 100644 --- a/metricsaggregator/src/main/resources/logback.xml +++ b/metricsaggregator/src/main/resources/logback.xml @@ -23,7 +23,7 @@ - + diff --git a/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClientIT.java b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClientIT.java index d2451280..c4779134 100644 --- a/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClientIT.java +++ b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/MetricsAggregatorClientIT.java @@ -26,11 +26,14 @@ import static io.dockstore.metricsaggregator.common.TestUtilities.createTasksExecutions; import static io.dockstore.metricsaggregator.common.TestUtilities.createValidationExecution; import static io.dockstore.metricsaggregator.common.TestUtilities.generateExecutionId; +import static io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum.ABORTED; +import static io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum.FAILED; import static io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum.FAILED_RUNTIME_INVALID; import static io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum.FAILED_SEMANTIC_INVALID; import static io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum.SUCCESSFUL; import static io.dockstore.openapi.client.model.ValidationExecution.ValidatorToolEnum.MINIWDL; import static io.dockstore.openapi.client.model.ValidationExecution.ValidatorToolEnum.WOMTOOL; +import static io.dockstore.utils.ExceptionHandler.IO_ERROR; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -67,7 +70,6 @@ import io.dockstore.openapi.client.model.ValidatorVersionInfo; import io.dockstore.openapi.client.model.Workflow; import io.dockstore.openapi.client.model.WorkflowVersion; -import io.dockstore.utils.CLIConstants; import io.dockstore.webservice.DockstoreWebserviceApplication; import io.dockstore.webservice.DockstoreWebserviceConfiguration; import io.dropwizard.testing.DropwizardTestSupport; @@ -405,7 +407,7 @@ private static void testOverallAggregatedMetrics(WorkflowVersion version, String @Test void testAggregateMetricsErrors() throws Exception { int exitCode = catchSystemExit(() -> MetricsAggregatorClient.main(new String[] {"aggregate-metrics", "--config", "thisdoesntexist"})); - assertEquals(CLIConstants.FAILURE_EXIT_CODE, exitCode); + assertEquals(IO_ERROR, exitCode); } /** @@ -535,4 +537,35 @@ void testSubmitValidationData() throws IOException { assertFalse(validationExecution.isIsValid()); assertEquals(validator, validationExecution.getValidatorTool()); } + + @Test + void testSubmitTerraMetrics() throws IOException { + final ApiClient apiClient = CommonTestUtilities.getOpenAPIWebClient(true, ADMIN_USERNAME, testingPostgres); + final WorkflowsApi workflowsApi = new WorkflowsApi(apiClient); + + Workflow workflow = workflowsApi.getPublishedWorkflow(32L, "metrics"); + WorkflowVersion version = workflow.getWorkflowVersions().stream().filter(v -> "master".equals(v.getName())).findFirst().orElse(null); + assertNotNull(version); + + String id = "#workflow/" + workflow.getFullWorkflowPath(); + String versionId = version.getName(); + // This file contains 3 valid rows of executions: 1 failed, 1 successful, and 1 aborted. + // It also contains 2 invalid rows of executions: 1 where workflow_start is missing, and one where the source_url is invalid + String terraMetricsFilePath = ResourceHelpers.resourceFilePath("terra-metrics.csv"); + + // Submit Terra metrics using a CSV file that metrics of workflows executed on Terra + MetricsAggregatorClient.main(new String[] {"submit-terra-metrics", "--config", CONFIG_FILE_PATH, "--data", terraMetricsFilePath}); + List metricsDataList = metricsDataS3Client.getMetricsData(id, versionId); + assertEquals(1, metricsDataList.size()); // There should be 1 file in S3. + MetricsData metricsData = metricsDataList.get(0); + // Verify the metrics that were sent to S3 + String metricsDataContent = metricsDataS3Client.getMetricsDataFileContent(metricsData.toolId(), metricsData.toolVersionName(), metricsData.platform(), metricsData.fileName()); + ExecutionsRequestBody executionsRequestBody = GSON.fromJson(metricsDataContent, ExecutionsRequestBody.class); + assertTrue(executionsRequestBody.getValidationExecutions().isEmpty(), "Should not have validation executions"); + List terraWorkflowExecutions = executionsRequestBody.getRunExecutions(); + assertEquals(3, terraWorkflowExecutions.size(), "Should have 3 workflow executions submitted"); + assertTrue(terraWorkflowExecutions.stream().anyMatch(execution -> execution.getExecutionStatus() == SUCCESSFUL)); + assertTrue(terraWorkflowExecutions.stream().anyMatch(execution -> execution.getExecutionStatus() == ABORTED)); + assertTrue(terraWorkflowExecutions.stream().anyMatch(execution -> execution.getExecutionStatus() == FAILED)); + } } diff --git a/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitterTest.java b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitterTest.java new file mode 100644 index 00000000..b27023b6 --- /dev/null +++ b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/client/cli/TerraMetricsSubmitterTest.java @@ -0,0 +1,57 @@ +package io.dockstore.metricsaggregator.client.cli; + +import static io.dockstore.metricsaggregator.client.cli.TerraMetricsSubmitter.formatStringInIso8601Date; +import static io.dockstore.metricsaggregator.client.cli.TerraMetricsSubmitter.getExecutionTime; +import static io.dockstore.metricsaggregator.client.cli.TerraMetricsSubmitter.getSourceUrlComponents; +import static io.dockstore.metricsaggregator.client.cli.TerraMetricsSubmitter.makePathAbsolute; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.dockstore.openapi.client.model.RunExecution.ExecutionStatusEnum; +import java.util.List; +import org.junit.jupiter.api.Test; + +class TerraMetricsSubmitterTest { + + @Test + void testFormatStringInIso8601Date() { + assertEquals("2022-07-15T15:37:06.123456Z", formatStringInIso8601Date("2022-07-15 15:37:06.123456 UTC").get()); + assertEquals("2022-07-15T15:37:06.450Z", formatStringInIso8601Date("2022-07-15 15:37:06.450000 UTC").get()); + assertEquals("2022-06-30T00:33:44.101Z", formatStringInIso8601Date("2022-06-30 00:33:44.101000 UTC").get()); + assertEquals("2022-09-06T13:46:53Z", formatStringInIso8601Date("2022-09-06 13:46:53").get()); + assertEquals("2022-05-25T13:13:08.510Z", formatStringInIso8601Date("2022-05-25 13:13:08.51 UTC").get()); + assertEquals("2022-12-01T01:52:05.700Z", formatStringInIso8601Date("2022-12-01 01:52:05.7 UTC").get()); + } + + @Test + void testGetExecutionStatusEnumFromTerraStatus() { + assertEquals(ExecutionStatusEnum.SUCCESSFUL, TerraMetricsSubmitter.getExecutionStatusEnumFromTerraStatus("Succeeded").get()); + assertEquals(ExecutionStatusEnum.FAILED, TerraMetricsSubmitter.getExecutionStatusEnumFromTerraStatus("Failed").get()); + assertEquals(ExecutionStatusEnum.ABORTED, TerraMetricsSubmitter.getExecutionStatusEnumFromTerraStatus("Aborted").get()); + // These two statuses are present in the workflow executions Terra provided, but they don't represent a completed execution + assertTrue(TerraMetricsSubmitter.getExecutionStatusEnumFromTerraStatus("Aborting").isEmpty()); + assertTrue(TerraMetricsSubmitter.getExecutionStatusEnumFromTerraStatus("Running").isEmpty()); + } + + @Test + void testGetExecutionTime() { + assertEquals("PT100M", getExecutionTime("100").get()); + assertTrue(getExecutionTime("").isEmpty()); + assertTrue(getExecutionTime(" ").isEmpty()); + assertTrue(getExecutionTime(null).isEmpty()); + } + + @Test + void testGetSourceUrlComponents() { + assertEquals(List.of("theiagen", "public_health_viral_genomics", "v2.0.0", "workflows", "wf_theiacov_fasta.wdl"), getSourceUrlComponents("https://raw.githubusercontent.com/theiagen/public_health_viral_genomics/v2.0.0/workflows/wf_theiacov_fasta.wdl")); + // This source_url has consecutive slashes + assertEquals(List.of("theiagen", "public_health_viral_genomics", "v2.0.0", "workflows", "wf_theiacov_fasta.wdl"), getSourceUrlComponents("https://raw.githubusercontent.com/theiagen/public_health_viral_genomics/v2.0.0//workflows/wf_theiacov_fasta.wdl")); + assertEquals(List.of(), getSourceUrlComponents("https://nottherawgithuburlprefix/theiagen/public_health_viral_genomics/v2.0.0//workflows/wf_theiacov_fasta.wdl")); + } + + @Test + void testMakePathAbsolute() { + assertEquals("/foo.wdl", makePathAbsolute("foo.wdl")); + assertEquals("/foo.wdl", makePathAbsolute("/foo.wdl")); + } +} diff --git a/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/common/TestUtilities.java b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/common/TestUtilities.java index f818e5fc..88e39d81 100644 --- a/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/common/TestUtilities.java +++ b/metricsaggregator/src/test/java/io/dockstore/metricsaggregator/common/TestUtilities.java @@ -29,7 +29,6 @@ import java.io.File; import java.time.Instant; import java.util.List; -import java.util.Optional; import java.util.UUID; import org.apache.commons.configuration2.INIConfiguration; @@ -78,12 +77,7 @@ public static ValidationExecution createValidationExecution(ValidatorToolEnum va } public static MetricsAggregatorConfig getMetricsConfig() { - Optional iniConfig = ConfigFileUtils.getConfiguration(new File(CONFIG_FILE_PATH)); - if (iniConfig.isEmpty()) { - throw new RuntimeException("Unable to get config file"); - } - - MetricsAggregatorConfig config = new MetricsAggregatorConfig(iniConfig.get()); - return config; + INIConfiguration iniConfig = ConfigFileUtils.getConfiguration(new File(CONFIG_FILE_PATH)); + return new MetricsAggregatorConfig(iniConfig); } } diff --git a/metricsaggregator/src/test/resources/terra-metrics.csv b/metricsaggregator/src/test/resources/terra-metrics.csv new file mode 100644 index 00000000..dbeaafcc --- /dev/null +++ b/metricsaggregator/src/test/resources/terra-metrics.csv @@ -0,0 +1,6 @@ +workflow_id,status,workflow_start,workflow_end,workflow_runtime_minutes,source_url +d13db165-9289-4ebd-93c7-26db408f84c1,Succeeded,2022-06-30 00:33:44.101000 UTC,2022-06-30 01:25:29.663000 UTC,51,https://raw.githubusercontent.com/dockstore-testing/testWorkflow/master/Dockstore.cwl +59ce788c-acc6-4aaa-b440-560fe614b7a1,Aborted,2022-03-02 20:37:03.245000 UTC,2022-03-02 21:35:16.497000 UTC,58,https://raw.githubusercontent.com/dockstore-testing/testWorkflow/master/Dockstore.cwl +ed4eaac6-cc95-4e8f-9316-335bb3b13592,Failed,2022-07-15 15:37:06.450000 UTC,2022-07-15 16:29:08.951000 UTC,52,https://raw.githubusercontent.com/dockstore-testing/testWorkflow/master/Dockstore.cwl +f3b368f3-2313-4480-98f6-a3bca3d986ef,Aborted,,,58,https://raw.githubusercontent.com/dockstore-testing/testWorkflow/master/Dockstore.cwl +dac1e206-9a33-4d52-baf3-b04258745534,Succeeded,2022-06-30 00:33:44.101000 UTC,2022-06-30 01:25:29.663000 UTC,51,https://raw.githubusercontent.com/doesntexistorganization/doesntexistrepo/master/Dockstore.cwl \ No newline at end of file diff --git a/pom.xml b/pom.xml index e60e58b2..79e132cd 100644 --- a/pom.xml +++ b/pom.xml @@ -162,6 +162,11 @@ jersey-hk2 3.0.11
+ + org.apache.commons + commons-csv + 1.10.0 + @@ -239,10 +244,6 @@ true - - - **/logback.xml - diff --git a/tooltester/pom.xml b/tooltester/pom.xml index 4030343a..90efd0c0 100644 --- a/tooltester/pom.xml +++ b/tooltester/pom.xml @@ -114,6 +114,11 @@ + + io.dockstore + utils + ${revision}${changelist} + com.fasterxml.jackson.jaxrs jackson-jaxrs-base diff --git a/tooltester/src/main/java/io/dockstore/tooltester/client/cli/Client.java b/tooltester/src/main/java/io/dockstore/tooltester/client/cli/Client.java index 26879e3b..f4c8ba27 100644 --- a/tooltester/src/main/java/io/dockstore/tooltester/client/cli/Client.java +++ b/tooltester/src/main/java/io/dockstore/tooltester/client/cli/Client.java @@ -20,12 +20,12 @@ import static io.dockstore.common.S3ClientHelper.getVersionName; import static io.dockstore.tooltester.client.cli.JCommanderUtility.out; import static io.dockstore.tooltester.client.cli.JCommanderUtility.printJCommanderHelp; -import static io.dockstore.tooltester.helper.ExceptionHandler.COMMAND_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.DEBUG; -import static io.dockstore.tooltester.helper.ExceptionHandler.exceptionMessage; import static io.dockstore.tooltester.runWorkflow.WorkflowRunner.GSON; import static io.dockstore.tooltester.runWorkflow.WorkflowRunner.printLine; import static io.dockstore.tooltester.runWorkflow.WorkflowRunner.uploadRunInfo; +import static io.dockstore.utils.ExceptionHandler.COMMAND_ERROR; +import static io.dockstore.utils.ExceptionHandler.DEBUG; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; import com.beust.jcommander.JCommander; import com.beust.jcommander.MissingCommandException; diff --git a/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunner.java b/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunner.java index 50a237cf..e1af816e 100644 --- a/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunner.java +++ b/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunner.java @@ -19,11 +19,11 @@ import static io.dockstore.common.S3ClientHelper.createFileName; import static io.dockstore.common.metrics.MetricsDataS3Client.generateKey; import static io.dockstore.tooltester.client.cli.JCommanderUtility.out; -import static io.dockstore.tooltester.helper.ExceptionHandler.API_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.COMMAND_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.GENERIC_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.errorMessage; -import static io.dockstore.tooltester.helper.ExceptionHandler.exceptionMessage; +import static io.dockstore.utils.ExceptionHandler.API_ERROR; +import static io.dockstore.utils.ExceptionHandler.COMMAND_ERROR; +import static io.dockstore.utils.ExceptionHandler.GENERIC_ERROR; +import static io.dockstore.utils.ExceptionHandler.errorMessage; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; import static java.util.UUID.randomUUID; import static org.apache.commons.lang3.math.NumberUtils.max; import static org.apache.commons.lang3.math.NumberUtils.min; diff --git a/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunnerConfig.java b/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunnerConfig.java index 8c2a48c1..74a5c789 100644 --- a/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunnerConfig.java +++ b/tooltester/src/main/java/io/dockstore/tooltester/runWorkflow/WorkflowRunnerConfig.java @@ -1,8 +1,8 @@ package io.dockstore.tooltester.runWorkflow; -import static io.dockstore.tooltester.helper.ExceptionHandler.COMMAND_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.IO_ERROR; -import static io.dockstore.tooltester.helper.ExceptionHandler.exceptionMessage; +import static io.dockstore.utils.ExceptionHandler.COMMAND_ERROR; +import static io.dockstore.utils.ExceptionHandler.IO_ERROR; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; import static java.util.UUID.randomUUID; import java.io.BufferedWriter; diff --git a/tooltester/src/test/java/io/dockstore/tooltester/client/cli/ClientTest.java b/tooltester/src/test/java/io/dockstore/tooltester/client/cli/ClientTest.java index 2e221c68..dc032a46 100644 --- a/tooltester/src/test/java/io/dockstore/tooltester/client/cli/ClientTest.java +++ b/tooltester/src/test/java/io/dockstore/tooltester/client/cli/ClientTest.java @@ -1,7 +1,7 @@ package io.dockstore.tooltester.client.cli; import static io.dockstore.tooltester.client.cli.Client.main; -import static io.dockstore.tooltester.helper.ExceptionHandler.COMMAND_ERROR; +import static io.dockstore.utils.ExceptionHandler.COMMAND_ERROR; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static uk.org.webcompere.systemstubs.SystemStubs.catchSystemExit; diff --git a/topicgenerator/pom.xml b/topicgenerator/pom.xml index 64ee2aa7..95ab3576 100644 --- a/topicgenerator/pom.xml +++ b/topicgenerator/pom.xml @@ -129,7 +129,6 @@ org.apache.commons commons-csv - 1.10.0 diff --git a/topicgenerator/src/main/java/io/dockstore/topicgenerator/client/cli/TopicGeneratorClient.java b/topicgenerator/src/main/java/io/dockstore/topicgenerator/client/cli/TopicGeneratorClient.java index 5e9fb3b1..309a5af1 100644 --- a/topicgenerator/src/main/java/io/dockstore/topicgenerator/client/cli/TopicGeneratorClient.java +++ b/topicgenerator/src/main/java/io/dockstore/topicgenerator/client/cli/TopicGeneratorClient.java @@ -1,7 +1,9 @@ package io.dockstore.topicgenerator.client.cli; -import static io.dockstore.utils.CLIConstants.FAILURE_EXIT_CODE; import static io.dockstore.utils.ConfigFileUtils.getConfiguration; +import static io.dockstore.utils.ExceptionHandler.GENERIC_ERROR; +import static io.dockstore.utils.ExceptionHandler.IO_ERROR; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; import com.beust.jcommander.JCommander; import com.beust.jcommander.MissingCommandException; @@ -34,7 +36,6 @@ import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.List; -import java.util.Optional; import org.apache.commons.configuration2.INIConfiguration; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; @@ -75,15 +76,14 @@ public static void main(String[] args) { } catch (MissingCommandException e) { jCommander.usage(); if (e.getUnknownCommand().isEmpty()) { - LOG.error("No command entered", e); + LOG.error("No command entered"); } else { - LOG.error("Unknown command", e); + LOG.error("Unknown command"); } - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "The command is missing", GENERIC_ERROR); } catch (ParameterException e) { jCommander.usage(); - LOG.error("Error parsing arguments", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Error parsing arguments", GENERIC_ERROR); } if (jCommander.getParsedCommand() == null || commandLineArgs.isHelp()) { @@ -92,12 +92,8 @@ public static void main(String[] args) { if (generateTopicsCommand.isHelp()) { jCommander.usage(); } else { - final Optional config = getConfiguration(generateTopicsCommand.getConfig()); - if (config.isEmpty()) { - LOG.error("Unable to get topic-generator config file"); - System.exit(FAILURE_EXIT_CODE); - } - final TopicGeneratorConfig topicGeneratorConfig = new TopicGeneratorConfig(config.get()); + final INIConfiguration config = getConfiguration(generateTopicsCommand.getConfig()); + final TopicGeneratorConfig topicGeneratorConfig = new TopicGeneratorConfig(config); // Read CSV file Iterable entriesCsvRecords = null; @@ -110,8 +106,7 @@ public static void main(String[] args) { .build(); entriesCsvRecords = csvFormat.parse(entriesCsv); } catch (IOException e) { - LOG.error("Unable to read input CSV file", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Unable to read input CSV file", IO_ERROR); } final TopicGeneratorClient topicGeneratorClient = new TopicGeneratorClient(); @@ -165,8 +160,7 @@ private void generateTopics(TopicGeneratorConfig topicGeneratorConfig, Iterable< } } } catch (IOException e) { - LOG.error("Unable to create new CSV output file", e); - System.exit(FAILURE_EXIT_CODE); + exceptionMessage(e, "Unable to create new CSV output file", IO_ERROR); } } diff --git a/utils/pom.xml b/utils/pom.xml index 9157a180..e6439bb5 100644 --- a/utils/pom.xml +++ b/utils/pom.xml @@ -89,6 +89,11 @@ commons-beanutils commons-beanutils + + io.dockstore + openapi-java-client + ${dockstore-core.version} + diff --git a/utils/src/main/java/io/dockstore/utils/CLIConstants.java b/utils/src/main/java/io/dockstore/utils/CLIConstants.java deleted file mode 100644 index c671e79f..00000000 --- a/utils/src/main/java/io/dockstore/utils/CLIConstants.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.dockstore.utils; - -public final class CLIConstants { - public static final int SUCCESS_EXIT_CODE = 0; - public static final int FAILURE_EXIT_CODE = 1; - - private CLIConstants() { - } -} diff --git a/utils/src/main/java/io/dockstore/utils/ConfigFileUtils.java b/utils/src/main/java/io/dockstore/utils/ConfigFileUtils.java index 306f6a2c..b6390513 100644 --- a/utils/src/main/java/io/dockstore/utils/ConfigFileUtils.java +++ b/utils/src/main/java/io/dockstore/utils/ConfigFileUtils.java @@ -1,7 +1,9 @@ package io.dockstore.utils; +import static io.dockstore.utils.ExceptionHandler.IO_ERROR; +import static io.dockstore.utils.ExceptionHandler.exceptionMessage; + import java.io.File; -import java.util.Optional; import org.apache.commons.configuration2.INIConfiguration; import org.apache.commons.configuration2.SubnodeConfiguration; import org.apache.commons.configuration2.builder.fluent.Configurations; @@ -20,16 +22,16 @@ public final class ConfigFileUtils { private ConfigFileUtils() { } - public static Optional getConfiguration(File iniFile) { + public static INIConfiguration getConfiguration(File iniFile) { Configurations configs = new Configurations(); + INIConfiguration config = null; try { - INIConfiguration config = configs.ini(iniFile); - return Optional.of(config); + config = configs.ini(iniFile); } catch (ConfigurationException e) { - LOG.error(CONFIG_FILE_ERROR, e); - return Optional.empty(); + exceptionMessage(e, CONFIG_FILE_ERROR, IO_ERROR); } + return config; } public static SubnodeConfiguration getDockstoreSection(INIConfiguration iniConfig) { diff --git a/utils/src/main/java/io/dockstore/utils/DockstoreApiClientUtils.java b/utils/src/main/java/io/dockstore/utils/DockstoreApiClientUtils.java new file mode 100644 index 00000000..b65d1e99 --- /dev/null +++ b/utils/src/main/java/io/dockstore/utils/DockstoreApiClientUtils.java @@ -0,0 +1,16 @@ +package io.dockstore.utils; + +import io.dockstore.openapi.client.ApiClient; +import io.dockstore.openapi.client.Configuration; + +public final class DockstoreApiClientUtils { + private DockstoreApiClientUtils() { + } + + public static ApiClient setupApiClient(String serverUrl, String token) { + ApiClient apiClient = Configuration.getDefaultApiClient(); + apiClient.setBasePath(serverUrl); + apiClient.addDefaultHeader("Authorization", "Bearer " + token); + return apiClient; + } +} diff --git a/tooltester/src/main/java/io/dockstore/tooltester/helper/ExceptionHandler.java b/utils/src/main/java/io/dockstore/utils/ExceptionHandler.java similarity index 97% rename from tooltester/src/main/java/io/dockstore/tooltester/helper/ExceptionHandler.java rename to utils/src/main/java/io/dockstore/utils/ExceptionHandler.java index b243ab07..a9088ba5 100644 --- a/tooltester/src/main/java/io/dockstore/tooltester/helper/ExceptionHandler.java +++ b/utils/src/main/java/io/dockstore/utils/ExceptionHandler.java @@ -1,4 +1,4 @@ -package io.dockstore.tooltester.helper; +package io.dockstore.utils; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger;