diff --git a/db/db.sql b/db/db.sql index 713dc1713..1118c1884 100644 --- a/db/db.sql +++ b/db/db.sql @@ -78,6 +78,8 @@ CREATE TABLE `workflow_steps` ( `maxBatchInputs` integer, `maxBatchSizeInBytes` integer, `operation` text not null, + `completed_work_item_count` integer not null default 0, + `progress_weight` float not null default 1.0, `createdAt` datetime not null, `updatedAt` datetime not null, FOREIGN KEY(jobID) REFERENCES jobs(jobID), diff --git a/db/migrations/20240122162340_add_progress_fields_to_workflow_steps.js b/db/migrations/20240122162340_add_progress_fields_to_workflow_steps.js new file mode 100644 index 000000000..bdc1b6db5 --- /dev/null +++ b/db/migrations/20240122162340_add_progress_fields_to_workflow_steps.js @@ -0,0 +1,22 @@ +exports.up = function up(knex) { + const result = knex.schema + .alterTable('workflow_steps', (t) => { + t.integer('completed_work_item_count').defaultTo(0).notNullable(); + t.float('progress_weight').defaultTo(1).notNullable(); + }) + .then(() => { + return knex.schema.raw(` + UPDATE "workflow_steps" SET completed_work_item_count="workItemCount" WHERE is_complete = true + ` + )}); + + return result; +}; + +exports.down = function down(knex) { + return knex.schema + .alterTable('workflow_steps', (t) => { + t.dropColumn('progress_weight'); + t.dropColumn('completed_work_item_count'); + }); +}; diff --git a/services/harmony/app/backends/workflow-orchestration/work-item-updates.ts b/services/harmony/app/backends/workflow-orchestration/work-item-updates.ts index a8b311e0d..ea05e943d 100644 --- a/services/harmony/app/backends/workflow-orchestration/work-item-updates.ts +++ b/services/harmony/app/backends/workflow-orchestration/work-item-updates.ts @@ -2,7 +2,7 @@ import env from '../../util/env'; import { logAsyncExecutionTime } from '../../util/log-execution'; import { v4 as uuid } from 'uuid'; import WorkItemUpdate from '../../models/work-item-update'; -import WorkflowStep, { decrementFutureWorkItemCount, getWorkflowStepByJobIdStepIndex, getWorkflowStepsByJobId, updateIsComplete } from '../../models/workflow-steps'; +import WorkflowStep, { getWorkflowStepByJobIdStepIndex, updateIsComplete } from '../../models/workflow-steps'; import { Logger } from 'winston'; import _, { ceil, range, sum } from 'lodash'; import { JobStatus, Job } from '../../models/job'; @@ -83,7 +83,7 @@ async function addJobLinksForFinishedWorkItem( * @returns the final job status for the request */ async function getFinalStatusAndMessageForJob(tx: Transaction, job: Job): -Promise<{ finalStatus: JobStatus, finalMessage: string }> { +Promise<{ finalStatus: JobStatus, finalMessage: string; }> { let finalStatus = JobStatus.SUCCESSFUL; const errorCount = await getErrorCountForJob(tx, job.jobID); const dataLinkCount = await getJobDataLinkCount(tx, job.jobID); @@ -96,7 +96,7 @@ Promise<{ finalStatus: JobStatus, finalMessage: string }> { } let finalMessage = ''; if ((errorCount > 1) && (finalStatus == JobStatus.FAILED)) { - finalMessage = `The job failed with ${errorCount} errors. See the errors field for more details`; + finalMessage = `The job failed with ${errorCount} errors. See the errors field for more details`; } else if ((errorCount == 1) && (finalStatus == JobStatus.FAILED)) { const jobError = (await getErrorsForJob(tx, job.jobID, 1))[0]; finalMessage = jobError.message; @@ -168,7 +168,7 @@ async function handleFailedWorkItems( logger: Logger, errorMessage: string, ): Promise { let continueProcessing = true; - // If the response is an error then set the job status to 'failed' + // If the response is an error then maybe set the job status to 'failed' if (status === WorkItemStatus.FAILED) { continueProcessing = job.ignoreErrors; if (!job.hasTerminalStatus()) { @@ -204,8 +204,6 @@ async function handleFailedWorkItems( if (!continueProcessing) { await completeJob(tx, job, JobStatus.FAILED, logger, jobMessage); } else { - // Need to make sure we expect one fewer granule to complete - await decrementFutureWorkItemCount(tx, job.jobID, workflowStep.stepIndex); if (job.status == JobStatus.RUNNING) { job.status = JobStatus.RUNNING_WITH_ERRORS; await job.save(tx); @@ -217,26 +215,20 @@ async function handleFailedWorkItems( } /** - * Updated the workflow steps `workItemCount` field for the given job to match the new + * Updated the query-cmr workflow step's `workItemCount` field for the given job to match the new + * granule count * * @param transaction - the transaction to use for the update * @param job - A Job that has a new input granule count */ -async function updateWorkItemCounts( +async function updateCmrWorkItemCount( transaction: Transaction, job: Job): Promise { - const workflowSteps = await getWorkflowStepsByJobId(transaction, job.jobID); - for (const step of workflowSteps) { - if (QUERY_CMR_SERVICE_REGEX.test(step.serviceID)) { - step.workItemCount = Math.ceil(job.numInputGranules / env.cmrMaxPageSize); - } else if (!step.hasAggregatedOutput) { - step.workItemCount = job.numInputGranules; - } else { - step.workItemCount = 1; - } - await step.save(transaction); - } + // NOTE We assume here that any chain using query-cmr will have it as the first step + const step = await getWorkflowStepByJobIdStepIndex(transaction, job.jobID, 1); + step.workItemCount = Math.ceil(job.numInputGranules / env.cmrMaxPageSize); + await step.save(transaction); } /** @@ -447,7 +439,7 @@ async function createNextWorkItems( // When a work-item update comes in we have three possible cases to handle: // 1. next step aggregates AND uses batching - in this case we process each result and put it // into a batch. As batches fill up we generate a new work-item for the next step and create - // a new batch if neeeded. We have a check to see if we have completed all the work-items for + // a new batch if needed. We have a check to see if we have completed all the work-items for // the current step, in which case we close the last batch even if it is not full and // generate a final work-item for the next step // 2. next step aggregates, but does not use batching, so we only create a new work-item @@ -477,7 +469,9 @@ async function createNextWorkItems( } else if (allWorkItemsForStepComplete) { await createAggregatingWorkItem(tx, workItem, nextWorkflowStep, logger); didCreateWorkItem = true; + nextWorkflowStep.workItemCount += 1; } + } else { // Create a new work item for each result using the next step didCreateWorkItem = true; @@ -513,12 +507,16 @@ async function createNextWorkItems( return newItem; }); + nextWorkflowStep.workItemCount += newItems.length; await incrementReadyCount(tx, workItem.jobID, nextWorkflowStep.serviceID, newItems.length); for (const batch of _.chunk(newItems, batchSize)) { await WorkItem.insertBatch(tx, batch); - logger.info('Queued new batch of work items.'); + logger.info(`Created new set of ${newItems.length} work items.`); } } + if (didCreateWorkItem) { + await nextWorkflowStep.save(tx); + } return didCreateWorkItem; } @@ -550,7 +548,7 @@ export async function preprocessWorkItem( let catalogItems; try { if (status === WorkItemStatus.SUCCESSFUL && !nextWorkflowStep) { - // if we are the last step in the chain we should read the catalog items since they are + // if we are CREATING STAC CATALOGSth;e last step in the chain we should read the catalog items since they are // needed for generating the output links we will save catalogItems = await readCatalogsItems(results); durationMs = new Date().getTime() - startTime; @@ -680,6 +678,11 @@ export async function processWorkItem( totalItemsSize = sum(outputItemSizes) / 1024 / 1024; } + if (COMPLETED_WORK_ITEM_STATUSES.includes(status)) { + thisStep.completed_work_item_count += 1; + await thisStep.save(tx); + } + await (await logAsyncExecutionTime( updateWorkItemStatus, 'HWIUWJI.updateWorkItemStatus', @@ -713,11 +716,14 @@ export async function processWorkItem( logger.debug('timing.HWIUWJI.job.save.end', { durationMs }); await (await logAsyncExecutionTime( - updateWorkItemCounts, - 'HWIUWJI.updateWorkItemCounts', + updateCmrWorkItemCount, + 'HWIUWJI.updateCmrWorkItemCount', logger))(tx, job); } + await job.updateProgress(tx); + await job.save(tx); + if (checkCompletion) { allWorkItemsForStepComplete = await updateIsComplete(tx, jobID, job.numInputGranules, thisStep); } @@ -773,7 +779,8 @@ export async function processWorkItem( logger))(tx, job, JobStatus.FAILED, logger, message); } } - if (!nextWorkflowStep || allWorkItemsForStepComplete) { + + if (!nextWorkflowStep) { // Finished with the chain for this granule if (status != WorkItemStatus.FAILED) { await (await logAsyncExecutionTime( @@ -781,35 +788,39 @@ export async function processWorkItem( 'HWIUWJI.addJobLinksForFinishedWorkItem', logger))(tx, job.jobID, catalogItems); } - job.completeBatch(thisStep.workItemCount); - if (allWorkItemsForStepComplete && !didCreateWorkItem && (!nextWorkflowStep || nextWorkflowStep.workItemCount === 0)) { + } + + if (allWorkItemsForStepComplete) { + if (!didCreateWorkItem && (!nextWorkflowStep || nextWorkflowStep.workItemCount < 1)) { // If all granules are finished mark the job as finished const { finalStatus, finalMessage } = await getFinalStatusAndMessageForJob(tx, job); await (await logAsyncExecutionTime( completeJob, 'HWIUWJI.completeJob', logger))(tx, job, finalStatus, logger, finalMessage); - } else { - // Either previewing or next step is a batched step and this item failed - if (job.status === JobStatus.PREVIEWING) { - // Special case to pause the job as soon as any single granule completes when in the previewing state - jobSaveStartTime = new Date().getTime(); - await job.pauseAndSave(tx); - durationMs = new Date().getTime() - jobSaveStartTime; - logger.debug('timing.HWIUWJI.job.pauseAndSave.end', { durationMs }); - } else { - jobSaveStartTime = new Date().getTime(); - await job.save(tx); - durationMs = new Date().getTime() - jobSaveStartTime; - logger.debug('timing.HWIUWJI.job.save.end', { durationMs }); - } } } else { // Currently only reach this condition for batched aggregation requests - jobSaveStartTime = new Date().getTime(); - await job.save(tx); - durationMs = new Date().getTime() - jobSaveStartTime; - logger.debug('timing.HWIUWJI.job.save.end', { durationMs }); + + if (!nextWorkflowStep && job.status === JobStatus.PREVIEWING) { + // Special case to pause the job as soon as any single granule completes when in the previewing state + jobSaveStartTime = new Date().getTime(); + await job.pauseAndSave(tx); + durationMs = new Date().getTime() - jobSaveStartTime; + logger.debug('timing.HWIUWJI.job.pauseAndSave.end', { durationMs }); + } else { + jobSaveStartTime = new Date().getTime(); + await job.save(tx); + durationMs = new Date().getTime() - jobSaveStartTime; + logger.debug('timing.HWIUWJI.job.save.end', { durationMs }); + } + } + + jobSaveStartTime = new Date().getTime(); + await job.save(tx); + durationMs = new Date().getTime() - jobSaveStartTime; + logger.debug('timing.HWIUWJI.job.save.end', { durationMs }); + } } catch (e) { logger.error(`Work item update failed for work item ${workItemID} and status ${status}`); @@ -851,7 +862,7 @@ export async function processWorkItems( const lastIndex = items.length - 1; for (let index = 0; index < items.length; index++) { - const { preprocessResult, update } = items[index]; + const { preprocessResult, update } = items[index]; if (index < lastIndex) { await processWorkItem(tx, preprocessResult, job, update, logger, false, thisStep); } else { @@ -898,7 +909,7 @@ export async function handleWorkItemUpdateWithJobId( logger))(tx, jobID, false, true); await processWorkItem(tx, preprocessResult, job, update, logger, true, undefined); - + await job.save(tx); }); const durationMs = new Date().getTime() - transactionStart; logger.debug('timing.HWIUWJI.transaction.end', { durationMs }); diff --git a/services/harmony/app/models/job.ts b/services/harmony/app/models/job.ts index d52b50185..3a2fb8285 100644 --- a/services/harmony/app/models/job.ts +++ b/services/harmony/app/models/job.ts @@ -9,6 +9,7 @@ import { truncateString } from '@harmony/util/string'; import DBRecord from './record'; import { Transaction } from '../util/db'; import JobLink, { getLinksForJob, JobLinkOrRecord } from './job-link'; +import WorkflowStep, { getWorkflowStepsByJobId } from './workflow-steps'; // how long data generated by this job will be available export const EXPIRATION_DAYS = 30; @@ -801,21 +802,38 @@ export class Job extends DBRecord implements JobRecord { } /** - * Updates the job progress based on a single batch completing - * You must call `#save` to persist the change + * Update the progress of a job using the progress of the WorkflowSteps for the job. + * You must call `#save` to persist the change. * - * @param totalItemCount - the number of items in total that need to be processed for the job - * to complete. + * @param tx - a transaction to use when querying the database + * @returns An empty Promise + */ + async updateProgress(tx: Transaction): Promise { + const steps = await getWorkflowStepsByJobId(tx, this.jobID, ['workItemCount', 'completed_work_item_count', 'progress_weight']); + let prevStep: WorkflowStep = null; + for (const step of steps) { + step.updateProgress(prevStep); + prevStep = step; + } + let sumOfWeights = steps.reduce((sum: number, step: WorkflowStep) => sum + step.progress_weight, 0); + sumOfWeights = sumOfWeights > 0 ? sumOfWeights : 1; + let progSum = steps.reduce((sum: number, step: WorkflowStep) => sum + step.progress_weight * step.progress, 0); + progSum = Math.max(0, progSum); + // Only allow progress to be set to 100 when the job completes and don't let progress go + // backwards + const progress = Math.min(Math.floor(progSum / sumOfWeights), 99); + if (this.progress < progress) { + this.progress = progress; + } + } + + /** + * Updates the number of completed batches. This is no longer used to compute job progress, + * but it is left in place in the event we want to track batches later. + * You must call `#save` to persist the change. */ - completeBatch(totalItemCount: number = this.numInputGranules): void { + completeBatch(): void { this.batchesCompleted += 1; - // Only allow progress to be set to 100 when the job completes - let progress = Math.min(100 * (this.batchesCompleted / totalItemCount), 99); - // don't allow negative progress - progress = Math.max(0, progress); - // progress must be an integer - progress = Math.floor(progress); - this.progress = progress; } /** diff --git a/services/harmony/app/models/services/base-service.ts b/services/harmony/app/models/services/base-service.ts index d04e7643c..0ca92c724 100644 --- a/services/harmony/app/models/services/base-service.ts +++ b/services/harmony/app/models/services/base-service.ts @@ -17,6 +17,7 @@ import HarmonyRequest from '../harmony-request'; import UserWork from '../user-work'; import { joinTexts } from '@harmony/util/string'; import { makeWorkScheduleRequest } from '../../backends/workflow-orchestration/work-item-polling'; +import { QUERY_CMR_SERVICE_REGEX } from '../../backends/workflow-orchestration/util'; export interface ServiceCapabilities { concatenation?: boolean; @@ -46,14 +47,14 @@ export interface ServiceStep { format?: string[]; umm_c?: { native_format?: string[]; - } + }; }; } export interface ServiceCollection { id: string; granule_limit?: number; - variables?: string[] + variables?: string[]; } export interface ServiceConfig { @@ -228,7 +229,7 @@ export default abstract class BaseService { * * @returns the final staging location of the service */ - finalStagingLocation() : string { + finalStagingLocation(): string { const { requestId, destinationUrl } = this.operation; if (destinationUrl) { let destPath = destinationUrl.substring(5); @@ -256,7 +257,7 @@ export default abstract class BaseService { const { isAsync, jobID } = job; const requestMetric = getRequestMetric(req, this.operation, this.config.name); - logger.info(`Request metric for request ${jobID}`, { requestMetric: true, ...requestMetric } ); + logger.info(`Request metric for request ${jobID}`, { requestMetric: true, ...requestMetric }); this.operation.callback = `${env.callbackUrlRoot}/service/${jobID}`; return new Promise((resolve, reject) => { this._run(logger) @@ -384,7 +385,7 @@ export default abstract class BaseService { */ protected _createFirstStepWorkItems(workflowStep: WorkflowStep): WorkItem[] { const workItems = []; - if ( this.operation.scrollIDs.length > 0 ) { + if (this.operation.scrollIDs.length > 0) { for (const scrollID of this.operation.scrollIDs) { workItems.push(new WorkItem({ jobID: this.operation.requestId, @@ -408,18 +409,16 @@ export default abstract class BaseService { * Return the number of work items that should be created for a given step * * @param step - workflow service step - * @param operation - the operation * @returns the number of work items for the given step */ - protected _workItemCountForStep(step: ServiceStep, operation: DataOperation): number { + protected _workItemCountForStep(step: ServiceStep): number { const regex = /query\-cmr/; // query-cmr number of work items is a function of the page size and total granules if (step.image.match(regex)) { return Math.ceil(this.numInputGranules / env.cmrMaxPageSize); - } else if (stepHasAggregatedOutput(step, operation)) { - return 1; } - return this.numInputGranules; + // the rest will get filled in as we go + return 0; } /** @@ -456,11 +455,15 @@ export default abstract class BaseService { if (i === numSteps) { this.operation.stagingLocation = this.finalStagingLocation(); } + let progressWeight = 1.0; + if (QUERY_CMR_SERVICE_REGEX.test(step.image)) { + progressWeight = 0.1; + } workflowSteps.push(new WorkflowStep({ jobID: this.operation.requestId, serviceID: serviceImageToId(step.image), stepIndex: i, - workItemCount: this._workItemCountForStep(step, this.operation), + workItemCount: this._workItemCountForStep(step), operation: this.operation.serialize( this.config.data_operation_version, step.operations || [], @@ -470,6 +473,7 @@ export default abstract class BaseService { is_sequential: !!step.is_sequential, maxBatchInputs: step.max_batch_inputs, maxBatchSizeInBytes: step.max_batch_size_in_bytes, + progress_weight: progressWeight, })); } })); diff --git a/services/harmony/app/models/user-work.ts b/services/harmony/app/models/user-work.ts index d4d982d60..02e2ce1ee 100644 --- a/services/harmony/app/models/user-work.ts +++ b/services/harmony/app/models/user-work.ts @@ -364,4 +364,4 @@ export async function populateUserWorkFromWorkItems(tx: Transaction): Promise source.collection); } + + /** + * Update the progress value based on the number of completed work-items for this step. + * NOTE: this should be called on the workflow steps in order since the progress + * computation depends on the progress of the previous step. + * + * @param prevStep - the previous step in the workflow (nil if this is the first step) + * @returns an integer number representing the percent progress + */ + updateProgress(prevStep: WorkflowStep): number { + let workItemCount = Math.max(1, this.workItemCount); + const completedItemCount = Math.max(0, this.completed_work_item_count); + workItemCount = Math.max(workItemCount, completedItemCount); + let prevProgress = 1.0; + if (prevStep) { + prevProgress = Math.max(0, prevStep.progress) / 100.0; + } + this.progress = Math.floor(100.0 * prevProgress * completedItemCount / workItemCount); + return this.progress; + } + } const tableFields = serializedFields.map((field) => `${WorkflowStep.table}.${field}`); @@ -131,15 +170,16 @@ export async function getWorkflowStepById( * Returns all workflow steps for a job * @param tx - the transaction to use for querying * @param jobID - the job ID - * + * @param fields - optional table fields to include in the result - default is all * @returns A promise with the workflow steps array */ export async function getWorkflowStepsByJobId( tx: Transaction, jobID: string, + fields = tableFields, ): Promise { const workItemData = await tx(WorkflowStep.table) - .select() + .select(...fields) .where({ jobID }) .orderBy('id'); @@ -257,19 +297,6 @@ export async function decrementFutureWorkItemCount(tx: Transaction, jobID, stepI .decrement('workItemCount'); } -/** - * Increment the number of expected work items for the step. Used during batching. - * - * @param tx - the database transaction - * @param jobID - the job ID - * @param stepIndex - the current step index - */ -export async function incrementWorkItemCount(tx: Transaction, jobID, stepIndex): Promise { - await tx(WorkflowStep.table) - .where({ jobID, stepIndex }) - .increment('workItemCount'); -} - /** * Decrement the number of expected work items for the step. Used during batching when prior step * items fail and we end up with the final batch being empty. @@ -301,7 +328,9 @@ export async function updateIsComplete(tx: Transaction, jobID: string, numInputG if (step.is_sequential) { const completedCount = await workItemCountForStep(tx, jobID, stepIndex, COMPLETED_WORK_ITEM_STATUSES); - const expectedCount = Math.ceil(numInputGranules / env.cmrMaxPageSize); + // TODO this is only true for query-cmr. If we add another sequential service we need to + // fix this. + const expectedCount = Math.ceil(numInputGranules / env.cmrMaxPageSize); isComplete = completedCount == expectedCount; } else { diff --git a/services/harmony/app/util/aggregation-batch.ts b/services/harmony/app/util/aggregation-batch.ts index e232d9f60..ab173faf8 100644 --- a/services/harmony/app/util/aggregation-batch.ts +++ b/services/harmony/app/util/aggregation-batch.ts @@ -12,7 +12,7 @@ import { objectStoreForProtocol } from './object-store'; import axios from 'axios'; import { getCatalogItemUrls, getCatalogLinks, readCatalogItems } from './stac'; import WorkItemUpdate from '../models/work-item-update'; -import WorkflowStep, { decrementWorkItemCount, incrementWorkItemCount } from '../models/workflow-steps'; +import WorkflowStep from '../models/workflow-steps'; import { WorkItemStatus } from '../models/work-item-interface'; import WorkItem from '../models/work-item'; import { incrementReadyCount } from '../models/user-work'; @@ -171,7 +171,7 @@ async function createStacCatalogForBatch( * Create a STAC catalog for a batch then create an aggregating work item to process it * * @param tx - the database transaction - * @param workflowStep- the step in the workflow that needs batching + * @param workflowStep - the step in the workflow that needs batching * @param batch - the Batch to process * @param logger - the Logger for the request * @@ -199,6 +199,8 @@ async function createCatalogAndWorkItemForBatch( stacCatalogLocation: catalogUrl, workflowStepIndex: workflowStep.stepIndex, }); + workflowStep.workItemCount += 1; + await workflowStep.save(tx); await incrementReadyCount(tx, jobID, serviceID); await newWorkItem.save(tx); @@ -208,8 +210,7 @@ async function createCatalogAndWorkItemForBatch( return true; } else { - logger.warn('Attempted to construct a work item for a batch, but there were no valid items in the batch. Decrementing the expected number of work items.'); - await decrementWorkItemCount(tx, jobID, stepIndex); + logger.warn('Attempted to construct a work item for a batch, but there were no valid items in the batch.'); } return false; } @@ -238,7 +239,7 @@ export async function handleBatching( workItemStatus: WorkItemStatus, allWorkItemsForStepComplete: boolean) : Promise { - const { jobID, serviceID, stepIndex } = workflowStep; + const { jobID, serviceID } = workflowStep; let { maxBatchInputs, maxBatchSizeInBytes } = workflowStep; let didCreateWorkItem = false; maxBatchInputs = maxBatchInputs || env.maxBatchInputs; @@ -299,7 +300,7 @@ export async function handleBatching( let nextSortIndex: number; // get the most recent batch so we can try to assign new items to it let currentBatch = await withHighestBatchIDForJobService(tx, jobID, serviceID); - // keep track of how big the batch is in terms of number of items and total size of the + // keep track of how big the batch is in terms of number of items and total size of the // items in bytes let currentBatchSize = 0; let currentBatchCount = 0; @@ -374,7 +375,6 @@ export async function handleBatching( currentBatch = newBatch; currentBatchCount = 0; currentBatchSize = 0; - await incrementWorkItemCount(tx, jobID, stepIndex); } } else { if (currentBatchSize + batchItem.itemSize > maxBatchSizeInBytes) { @@ -401,7 +401,6 @@ export async function handleBatching( currentBatch = newBatch; currentBatchCount = 1; currentBatchSize = batchItem.itemSize; - await incrementWorkItemCount(tx, jobID, stepIndex); index += 1; } } else { diff --git a/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889458462746 b/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889458462746 new file mode 100644 index 000000000..9d63702fe --- /dev/null +++ b/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889458462746 @@ -0,0 +1,31 @@ +POST /search/granules.json +accept: application/json +content-type: multipart/form-data; boundary=----------------------------012345678901234567890123 +accept-encoding: gzip,deflate +body: ------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"page_size\"\r\n\r\n2\r\n------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"bounding_box\"\r\n\r\n-180,0,180,90\r\n------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"collection_concept_id\"\r\n\r\nC1234208438-POCLOUD\r\n------------------------------012345678901234567890123--\r\n + +HTTP/1.1 200 OK +content-type: application/json;charset=utf-8 +transfer-encoding: chunked +connection: close +date: Mon, 05 Feb 2024 21:34:54 GMT +x-frame-options: SAMEORIGIN +access-control-allow-origin: * +x-xss-protection: 1; mode=block +cmr-request-id: 9047863e-7a46-4584-b060-177cfdc48f07 +content-sha1: 5a8113797cc847eab69fba41a304a871896ad257 +strict-transport-security: max-age=31536000 +cmr-search-after: ["pocloud",1199237724619,1234495214] +cmr-hits: 7975 +access-control-expose-headers: CMR-Hits, CMR-Request-Id, X-Request-Id, CMR-Scroll-Id, CMR-Search-After, CMR-Timed-Out, CMR-Shapefile-Original-Point-Count, CMR-Shapefile-Simplified-Point-Count +x-content-type-options: nosniff +cmr-took: 70 +x-request-id: sfSGw2PgiAWIGSm8aZ_kTjkSFzb7zmAqbaEJv-NvZLcGnB_Kg6K5aA== +content-md5: 46c8cab8d85c150747ce055b56e7b961 +server: ServerTokens ProductOnly +x-cache: Miss from cloudfront +via: 1.1 c1e0a7ac577ef4659f75bd73a73531f6.cloudfront.net (CloudFront) +x-amz-cf-pop: DEN52-P3 +x-amz-cf-id: sfSGw2PgiAWIGSm8aZ_kTjkSFzb7zmAqbaEJv-NvZLcGnB_Kg6K5aA== + +{"feed":{"updated":"2024-02-05T21:34:54.549Z","id":"https://cmr.uat.earthdata.nasa.gov:443/search/granules.json","title":"ECHO granule metadata","entry":[{"boxes":["-66.15 0.202 66.149 180","-66.15 -180 66.149 -0.065"],"time_start":"2007-12-31T00:52:14.361Z","updated":"2020-09-23T13:04:09.706Z","dataset_id":"Jason-1 SGDR version E NetCDF","lines":["-66.1495 -101.9729 -64.8397 -82.9482 -61.0739 -66.504 -54.9985 -53.2772 -46.3974 -42.6732 -37.3258 -35.5672 -25.6325 -29.159 17.1072 -12.7754 31.1728 -6.3157 42.6321 1.1403 51.2589 9.7149 57.7895 20.123 62.4081 32.6955 65.1937 47.3897 66.1494 63.7891"],"data_center":"POCLOUD","title":"JA1_GPS_2PeP220_111_20071231_005214_20071231_014826","coordinate_system":"CARTESIAN","day_night_flag":"UNSPECIFIED","time_end":"2007-12-31T01:48:26.552Z","id":"G1234495188-POCLOUD","original_format":"UMM_JSON","granule_size":"8.288930892944336","browse_flag":true,"collection_concept_id":"C1234208438-POCLOUD","online_access_flag":true,"links":[{"rel":"http://esipfed.org/ns/fedsearch/1.1/s3#","title":"This link provides direct download access via S3 to the granule.","hreflang":"en-US","href":"s3://podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc.md5","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc.md5"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.cmr.json","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.cmr.json"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"api endpoint to retrieve temporary credentials valid for same-region direct s3 access","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/s3credentials"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"OPeNDAP request URL","hreflang":"en-US","href":"https://opendap.uat.earthdata.nasa.gov/providers/POCLOUD/collections/Jason-1%20SGDR%20version%20E%20NetCDF/granules/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.rad_liquid_water.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.ssha.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.wind_speed_alt.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.wind_speed_rad.png"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_maneuver.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_safehold.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_cyclelist.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/Handbook_Jason-1_v5.1_April2016.pdf"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://podaac.jpl.nasa.gov/CitingPODAAC"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://cmr.earthdata.nasa.gov/virtual-directory/collections/C1940472420-POCLOUD"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://search.earthdata.nasa.gov/search/granules?p=C1940472420-POCLOUD"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://github.com/podaac/data-readers"}]},{"boxes":["-66.149 0.061 66.149 180","-66.149 -180 66.149 -0.005"],"time_start":"2008-01-02T01:35:24.619Z","updated":"2020-09-23T14:22:48.447Z","dataset_id":"Jason-1 SGDR version E NetCDF","lines":["-66.1495 -118.9314 -64.8333 -99.9114 -61.0624 -83.4776 -54.9836 -70.2611 -46.3709 -59.6559 -37.2976 -52.5568 -25.6031 -46.1531 17.1056 -29.7842 31.1322 -23.3456 42.5766 -15.9122 51.3057 -7.2346 57.5833 2.6936 62.1143 14.6271 65.1196 29.7786 66.1493 46.7257"],"data_center":"POCLOUD","title":"JA1_GPS_2PeP220_163_20080102_013524_20080102_023136","coordinate_system":"CARTESIAN","day_night_flag":"UNSPECIFIED","time_end":"2008-01-02T02:31:36.001Z","id":"G1234495214-POCLOUD","original_format":"UMM_JSON","granule_size":"8.6459321975708","browse_flag":true,"collection_concept_id":"C1234208438-POCLOUD","online_access_flag":true,"links":[{"rel":"http://esipfed.org/ns/fedsearch/1.1/s3#","title":"This link provides direct download access via S3 to the granule.","hreflang":"en-US","href":"s3://podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc.md5","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc.md5"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.cmr.json","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.cmr.json"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"api endpoint to retrieve temporary credentials valid for same-region direct s3 access","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/s3credentials"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"OPeNDAP request URL","hreflang":"en-US","href":"https://opendap.uat.earthdata.nasa.gov/providers/POCLOUD/collections/Jason-1%20SGDR%20version%20E%20NetCDF/granules/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.rad_liquid_water.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.ssha.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.wind_speed_alt.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.wind_speed_rad.png"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_maneuver.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_safehold.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_cyclelist.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/Handbook_Jason-1_v5.1_April2016.pdf"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://podaac.jpl.nasa.gov/CitingPODAAC"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://cmr.earthdata.nasa.gov/virtual-directory/collections/C1940472420-POCLOUD"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://search.earthdata.nasa.gov/search/granules?p=C1940472420-POCLOUD"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://github.com/podaac/data-readers"}]}]}} \ No newline at end of file diff --git a/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889556891275 b/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889556891275 new file mode 100644 index 000000000..36cbd628f --- /dev/null +++ b/services/harmony/fixtures/cmr.uat.earthdata.nasa.gov-443/170716889556891275 @@ -0,0 +1,31 @@ +POST /search/granules.json +accept: application/json +content-type: multipart/form-data; boundary=----------------------------012345678901234567890123 +accept-encoding: gzip,deflate +body: ------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"page_size\"\r\n\r\n2\r\n------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"bounding_box\"\r\n\r\n-180,0,180,90\r\n------------------------------012345678901234567890123\r\nContent-Disposition: form-data; name=\"collection_concept_id\"\r\n\r\nC1234208438-POCLOUD\r\n------------------------------012345678901234567890123--\r\n + +HTTP/1.1 200 OK +content-type: application/json;charset=utf-8 +transfer-encoding: chunked +connection: close +date: Mon, 05 Feb 2024 21:34:55 GMT +x-frame-options: SAMEORIGIN +access-control-allow-origin: * +x-xss-protection: 1; mode=block +cmr-request-id: 96f63b19-27dc-4deb-ab8f-788602478a4b +content-sha1: 164b6b82c1c181e24a6dd95be888c33f18c983c8 +strict-transport-security: max-age=31536000 +cmr-search-after: ["pocloud",1199237724619,1234495214] +cmr-hits: 7975 +access-control-expose-headers: CMR-Hits, CMR-Request-Id, X-Request-Id, CMR-Scroll-Id, CMR-Search-After, CMR-Timed-Out, CMR-Shapefile-Original-Point-Count, CMR-Shapefile-Simplified-Point-Count +x-content-type-options: nosniff +cmr-took: 61 +x-request-id: qWTWf7_ZJ6zMmLnGBsUQB1aPkXHEJ0qYgH7x-R6ih-XhiWsRBPccEg== +content-md5: 3023693e2560336b67eabec7b793be46 +server: ServerTokens ProductOnly +x-cache: Miss from cloudfront +via: 1.1 2049902380178fd7b885115d80ccf966.cloudfront.net (CloudFront) +x-amz-cf-pop: DEN52-P3 +x-amz-cf-id: qWTWf7_ZJ6zMmLnGBsUQB1aPkXHEJ0qYgH7x-R6ih-XhiWsRBPccEg== + +{"feed":{"updated":"2024-02-05T21:34:55.536Z","id":"https://cmr.uat.earthdata.nasa.gov:443/search/granules.json","title":"ECHO granule metadata","entry":[{"boxes":["-66.15 0.202 66.149 180","-66.15 -180 66.149 -0.065"],"time_start":"2007-12-31T00:52:14.361Z","updated":"2020-09-23T13:04:09.706Z","dataset_id":"Jason-1 SGDR version E NetCDF","lines":["-66.1495 -101.9729 -64.8397 -82.9482 -61.0739 -66.504 -54.9985 -53.2772 -46.3974 -42.6732 -37.3258 -35.5672 -25.6325 -29.159 17.1072 -12.7754 31.1728 -6.3157 42.6321 1.1403 51.2589 9.7149 57.7895 20.123 62.4081 32.6955 65.1937 47.3897 66.1494 63.7891"],"data_center":"POCLOUD","title":"JA1_GPS_2PeP220_111_20071231_005214_20071231_014826","coordinate_system":"CARTESIAN","day_night_flag":"UNSPECIFIED","time_end":"2007-12-31T01:48:26.552Z","id":"G1234495188-POCLOUD","original_format":"UMM_JSON","granule_size":"8.288930892944336","browse_flag":true,"collection_concept_id":"C1234208438-POCLOUD","online_access_flag":true,"links":[{"rel":"http://esipfed.org/ns/fedsearch/1.1/s3#","title":"This link provides direct download access via S3 to the granule.","hreflang":"en-US","href":"s3://podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc.md5","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc.md5"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.cmr.json","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.cmr.json"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"api endpoint to retrieve temporary credentials valid for same-region direct s3 access","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/s3credentials"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"OPeNDAP request URL","hreflang":"en-US","href":"https://opendap.uat.earthdata.nasa.gov/providers/POCLOUD/collections/Jason-1%20SGDR%20version%20E%20NetCDF/granules/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.rad_liquid_water.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.ssha.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.wind_speed_alt.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_111_20071231_005214_20071231_014826.wind_speed_rad.png"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_maneuver.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_safehold.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_cyclelist.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/Handbook_Jason-1_v5.1_April2016.pdf"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://podaac.jpl.nasa.gov/CitingPODAAC"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://cmr.earthdata.nasa.gov/virtual-directory/collections/C1940472420-POCLOUD"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://search.earthdata.nasa.gov/search/granules?p=C1940472420-POCLOUD"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://github.com/podaac/data-readers"}]},{"boxes":["-66.149 0.061 66.149 180","-66.149 -180 66.149 -0.005"],"time_start":"2008-01-02T01:35:24.619Z","updated":"2020-09-23T14:22:48.447Z","dataset_id":"Jason-1 SGDR version E NetCDF","lines":["-66.1495 -118.9314 -64.8333 -99.9114 -61.0624 -83.4776 -54.9836 -70.2611 -46.3709 -59.6559 -37.2976 -52.5568 -25.6031 -46.1531 17.1056 -29.7842 31.1322 -23.3456 42.5766 -15.9122 51.3057 -7.2346 57.5833 2.6936 62.1143 14.6271 65.1196 29.7786 66.1493 46.7257"],"data_center":"POCLOUD","title":"JA1_GPS_2PeP220_163_20080102_013524_20080102_023136","coordinate_system":"CARTESIAN","day_night_flag":"UNSPECIFIED","time_end":"2008-01-02T02:31:36.001Z","id":"G1234495214-POCLOUD","original_format":"UMM_JSON","granule_size":"8.6459321975708","browse_flag":true,"collection_concept_id":"C1234208438-POCLOUD","online_access_flag":true,"links":[{"rel":"http://esipfed.org/ns/fedsearch/1.1/s3#","title":"This link provides direct download access via S3 to the granule.","hreflang":"en-US","href":"s3://podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc.md5","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc.md5"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.nc"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"Download JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.cmr.json","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.cmr.json"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/metadata#","title":"api endpoint to retrieve temporary credentials valid for same-region direct s3 access","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/s3credentials"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/data#","title":"OPeNDAP request URL","hreflang":"en-US","href":"https://opendap.uat.earthdata.nasa.gov/providers/POCLOUD/collections/Jason-1%20SGDR%20version%20E%20NetCDF/granules/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.rad_liquid_water.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.ssha.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.wind_speed_alt.png"},{"rel":"http://esipfed.org/ns/fedsearch/1.1/browse#","type":"image/png","hreflang":"en-US","href":"https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/JASON-1_L2_OST_GPS_E/JA1_GPS_2PeP220_163_20080102_013524_20080102_023136.wind_speed_rad.png"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_maneuver.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_safehold.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/j1_cyclelist.txt"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-docs/jason1/open/L2/docs/Handbook_Jason-1_v5.1_April2016.pdf"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://podaac.jpl.nasa.gov/CitingPODAAC"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://cmr.earthdata.nasa.gov/virtual-directory/collections/C1940472420-POCLOUD"},{"inherited":true,"length":"0MB","rel":"http://esipfed.org/ns/fedsearch/1.1/data#","hreflang":"en-US","href":"https://search.earthdata.nasa.gov/search/granules?p=C1940472420-POCLOUD"},{"inherited":true,"rel":"http://esipfed.org/ns/fedsearch/1.1/documentation#","hreflang":"en-US","href":"https://github.com/podaac/data-readers"}]}]}} \ No newline at end of file diff --git a/services/harmony/test/aggregation-batching.ts b/services/harmony/test/aggregation-batching.ts index b05344c57..fcab4c400 100644 --- a/services/harmony/test/aggregation-batching.ts +++ b/services/harmony/test/aggregation-batching.ts @@ -270,10 +270,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('lists the job as running and progress of 43 with 1 link to the first aggregated output', async function () { + it('lists the job as running and progress of 49 with 1 link to the first aggregated output', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(50); + expect(job.progress).to.equal(49); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(1); }); @@ -318,10 +318,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('marks the job as running and progress of 86 with 2 links to the first two aggregated outputs', async function () { + it('marks the job as running and progress of 75 with 2 links to the first two aggregated outputs', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(66); + expect(job.progress).to.equal(75); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(2); }); @@ -530,10 +530,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('lists the job as running and progress of 43 with 1 link to the first aggregated output', async function () { + it('lists the job as running and progress of 49 with 1 link to the first aggregated output', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(50); + expect(job.progress).to.equal(49); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(1); }); @@ -578,10 +578,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('marks the job as running and progress of 86 with 2 links to the first two aggregated outputs', async function () { + it('marks the job as running and progress of 75 with 2 links to the first two aggregated outputs', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(66); + expect(job.progress).to.equal(75); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(2); }); @@ -766,10 +766,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('lists the job as running and progress of 43 with 1 link to the first aggregated output', async function () { + it('lists the job as running and progress of 49 with 1 link to the first aggregated output', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(50); + expect(job.progress).to.equal(49); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(1); }); @@ -814,10 +814,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('marks the job as running and progress of 86 with 2 links to the first two aggregated outputs', async function () { + it('marks the job as running and progress of 75 with 2 links to the first two aggregated outputs', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(66); + expect(job.progress).to.equal(75); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(2); }); @@ -1018,10 +1018,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('lists the job as running and progress of 43 with 1 link to the first aggregated output', async function () { + it('lists the job as running and progress of 49 with 1 link to the first aggregated output', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(50); + expect(job.progress).to.equal(49); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(1); }); @@ -1066,10 +1066,10 @@ describe('when testing a batched aggregation service', function () { }); describe('when checking the jobs listing', function () { - it('marks the job as running and progress of 86 with 2 links to the first two aggregated outputs', async function () { + it('marks the job as running and progress of 75 with 2 links to the first two aggregated outputs', async function () { const job = await getFirstJob(db); expect(job.status).to.equal('running'); - expect(job.progress).to.equal(66); + expect(job.progress).to.equal(75); const dataLinks = job.links.filter(link => link.rel === 'data'); expect(dataLinks.length).to.equal(2); }); diff --git a/services/harmony/test/helpers/queue.ts b/services/harmony/test/helpers/queue.ts index 0e6f1e274..57ba4c9d8 100644 --- a/services/harmony/test/helpers/queue.ts +++ b/services/harmony/test/helpers/queue.ts @@ -126,4 +126,4 @@ export function hookGetQueueUrlForService(): void { export function resetQueues(): void { serviceQueues = {}; typeQueues = {}; -} \ No newline at end of file +} diff --git a/services/harmony/test/ignore-errors.ts b/services/harmony/test/ignore-errors.ts index ad13cdc04..aeb72be5b 100644 --- a/services/harmony/test/ignore-errors.ts +++ b/services/harmony/test/ignore-errors.ts @@ -137,6 +137,7 @@ describe('ignoreErrors', function () { }); describe('when the first Swath Projector work item fails', function () { + this.timeout(120000); let firstSwathItem; before(async function () { @@ -158,6 +159,7 @@ describe('ignoreErrors', function () { }); it('fails the job', async function () { + // await sleep(100000); // work item failure with only one granue should trigger job failure const { job } = await Job.byJobID(db, firstSwathItem.jobID); expect(job.status).to.equal(JobStatus.FAILED); diff --git a/services/harmony/test/job-progress.ts b/services/harmony/test/job-progress.ts new file mode 100644 index 000000000..6e4c3b95b --- /dev/null +++ b/services/harmony/test/job-progress.ts @@ -0,0 +1,141 @@ +import { expect } from 'chai'; +import db from '../app/util/db'; +import { Job } from '../app/models/job'; +import { hookRangesetRequest } from './helpers/ogc-api-coverages'; +import hookServersStartStop from './helpers/servers'; +import { getWorkForService, updateWorkItem, fakeServiceStacOutput } from './helpers/work-items'; +import { getStacLocation, WorkItemStatus } from '../app/models/work-item-interface'; + +describe('Testing job progress', function () { + const collection = 'C1234208438-POCLOUD'; + describe('when making a sub-setting request with no concatenation', async function () { + hookServersStartStop(); + const reprojectQuery = { + maxResults: 2, + subset: 'lat(0:90)', + concatenate: false, + }; + + hookRangesetRequest('1.0.0', collection, 'all', { query: reprojectQuery }); + + describe('when the query-cmr work-item is retrieved and processed', async function () { + it('sets the job progress to 9', async function () { + const res = await getWorkForService(this.backend, 'harmonyservices/query-cmr:latest'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog0.json'), + getStacLocation(workItem, 'catalog1.json'), + ]; + workItem.outputItemSizes = [1, 2]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 2, 1); + + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(9); + }); + + describe('when the first sub-setter work-item is retrieved and processed', async function () { + it('sets the job progress to 54', async function () { + const res = await getWorkForService(this.backend, 'ghcr.io/podaac/l2ss-py:sit'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog.json'), + ]; + workItem.outputItemSizes = [1]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 1, 1); + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(54); + + }); + }); + + describe('when the second sub-setter work-item is retrieved and processed', async function () { + it('sets the job progress to 100', async function () { + const res = await getWorkForService(this.backend, 'ghcr.io/podaac/l2ss-py:sit'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog.json'), + ]; + workItem.outputItemSizes = [1]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 1, 1); + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(100); + + }); + }); + + }); + }); + + describe('when making a sub-setting request with concatenation', async function () { + hookServersStartStop(); + const reprojectQuery = { + maxResults: 2, + subset: 'lat(0:90)', + concatenate: true, + }; + + hookRangesetRequest('1.0.0', collection, 'all', { query: reprojectQuery }); + + describe('when the query-cmr work-item is retrieved and processed', async function () { + it('sets the job progress to 4', async function () { + const res = await getWorkForService(this.backend, 'harmonyservices/query-cmr:latest'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog0.json'), + getStacLocation(workItem, 'catalog1.json'), + ]; + workItem.outputItemSizes = [1, 2]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 2, 1); + + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(4); + }); + + describe('when the first sub-setter work-item is retrieved and processed', async function () { + it('sets the job progress to 28', async function () { + const res = await getWorkForService(this.backend, 'ghcr.io/podaac/l2ss-py:sit'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog.json'), + ]; + workItem.outputItemSizes = [1]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 1, 1); + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(28); + }); + }); + + describe('when the second sub-setter work-item is retrieved and processed', async function () { + it('sets the job progress to 52', async function () { + const res = await getWorkForService(this.backend, 'ghcr.io/podaac/l2ss-py:sit'); + const { workItem } = JSON.parse(res.text); + workItem.status = WorkItemStatus.SUCCESSFUL; + workItem.results = [ + getStacLocation(workItem, 'catalog.json'), + ]; + workItem.outputItemSizes = [1]; + await fakeServiceStacOutput(workItem.jobID, workItem.id, 1, 1); + await updateWorkItem(this.backend, workItem); + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(52); + }); + }); + }); + }); +}); diff --git a/services/harmony/test/jobs/auto-pause-jobs.ts b/services/harmony/test/jobs/auto-pause-jobs.ts index 5d6205261..22cf31521 100644 --- a/services/harmony/test/jobs/auto-pause-jobs.ts +++ b/services/harmony/test/jobs/auto-pause-jobs.ts @@ -79,7 +79,6 @@ function previewingToPauseTest(username: string): void { ]; await fakeServiceStacOutput(workItemQueryCmr.jobID, workItemQueryCmr.id, 4); await updateWorkItem(this.backend, workItemQueryCmr); - const resServExample = await getWorkForService(this.backend, 'harmonyservices/service-example:latest'); expect(resServExample.status).to.equal(200); const workItemServExample = JSON.parse(resServExample.text).workItem; diff --git a/services/harmony/test/workflow-orchestration.ts b/services/harmony/test/workflow-orchestration.ts index 57279899b..c7f8b45dd 100644 --- a/services/harmony/test/workflow-orchestration.ts +++ b/services/harmony/test/workflow-orchestration.ts @@ -19,7 +19,7 @@ import { populateUserWorkFromWorkItems } from '../app/models/user-work'; import { resetQueues } from './helpers/queue'; /** - * Create a job and some work times to be used by tests + * Create a job and some work steps/items to be used by tests * * @param initialCmrHits - The number of hits returned by the CMR the first time it is queries * @param initialQueryCmrWorkItemCount - The number of query-cmr work items anticipated by the @@ -49,7 +49,7 @@ async function createJobAndWorkItems( jobID: job.jobID, serviceID: nonAggregateService, stepIndex: 2, - workItemCount: initialCmrHits, + workItemCount: 0, hasAggregatedOutput: false, }).save(db); @@ -57,7 +57,7 @@ async function createJobAndWorkItems( jobID: job.jobID, serviceID: aggregateService, stepIndex: 3, - workItemCount: 1, + workItemCount: 0, hasAggregatedOutput: true, }).save(db); @@ -86,8 +86,8 @@ async function testInitialConditions(initialCmrHits: number, initialQueryCmrWork it('sets the initial number of work items for each step', async function () { const workflowSteps = await getWorkflowStepsByJobId(db, this.jobID); expect(workflowSteps[0].workItemCount).equals(initialQueryCmrWorkItemCount); - expect(workflowSteps[1].workItemCount).equals(initialCmrHits); - expect(workflowSteps[2].workItemCount).equals(1); + expect(workflowSteps[1].workItemCount).equals(0); + expect(workflowSteps[2].workItemCount).equals(0); }); } @@ -296,11 +296,11 @@ describe('Workflow chaining for a collection configured for Swath Projector and }); describe('when checking the jobs listing', function () { - it('marks the job as in progress and 50 percent complete because 1 of 2 granules is complete', async function () { + it('marks the job as in progress and 52 percent complete because query-cmr is completely done and 1 of 2 granules is complete in the other services', async function () { const jobs = await Job.forUser(db, 'anonymous'); const job = jobs.data[0]; expect(job.status).to.equal('running'); - expect(job.progress).to.equal(50); + expect(job.progress).to.equal(52); }); }); @@ -664,7 +664,7 @@ describe('When a request spans multiple CMR pages', function () { jobID: job.jobID, serviceID: aggregateService, stepIndex: 2, - workItemCount: 1, + workItemCount: 0, hasAggregatedOutput: true, }).save(db); @@ -698,6 +698,12 @@ describe('When a request spans multiple CMR pages', function () { await updateWorkItem(this.backend, workItem); }); + it('sets job progress to 42 after completing the first query-cmr work-item', async function () { + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(42); + }); + it('does not generate the aggregation step until all query-cmr items are finished', async function () { const queuedCount = (await getWorkItemsByJobIdAndStepIndex(db, this.jobID, 2)).workItems.length; expect(queuedCount).equals(0); @@ -716,6 +722,12 @@ describe('When a request spans multiple CMR pages', function () { await updateWorkItem(this.backend, workItem); }); + it('sets job progress to 50 after completing all the query-cmr work-items', async function () { + const jobs = await Job.forUser(db, 'anonymous'); + const job = jobs.data[0]; + expect(job.progress).to.equal(50); + }); + it('queues the aggregating work item once all query-cmr items are finished', async function () { const queuedCount = (await getWorkItemsByJobIdAndStepIndex(db, this.jobID, 2)).workItems.length; expect(queuedCount).equals(1); @@ -796,7 +808,7 @@ describe('When a request spans multiple CMR pages', function () { it('does not update the number of work items for the aggregating step', async function () { const workflowStep = await getWorkflowStepByJobIdStepIndex(db, this.jobID, 3); - expect(workflowStep.workItemCount).equals(1); + expect(workflowStep.workItemCount).equals(0); }); describe('and the number of worked items matches the new number', async function () { @@ -872,7 +884,7 @@ describe('When a request spans multiple CMR pages', function () { it('does not update the number of work items for the aggregating step', async function () { const workflowStep = await getWorkflowStepByJobIdStepIndex(db, this.jobID, 3); - expect(workflowStep.workItemCount).equals(1); + expect(workflowStep.workItemCount).equals(0); }); describe('and the number of worked items matches the initial number', function () {