diff --git a/core/_docs/requestBodies.json b/core/_docs/requestBodies.json index 62d798eb7..ef91f98a5 100644 --- a/core/_docs/requestBodies.json +++ b/core/_docs/requestBodies.json @@ -755,7 +755,7 @@ } } }, - "ArbimonRecording": { + "ArbimonRecordingDeleteDataItem": { "type": "object", "properties": { "stream": { @@ -769,7 +769,7 @@ "type": "string" }, "description": "Array of start days", - "example": "['2023-07-24T06:30:00.000Z']" + "example": "['2023-07-24T06:30:00.000Z', '2023-07-24T06:31:00.000Z', '2023-07-24T06:32:00.000Z']" } }, "required": [ @@ -777,7 +777,7 @@ "starts" ] }, - "ArbimonRecordingsDeleteData": { + "ArbimonRecordingDeleteData": { "type": "array", "items": { "$ref": "#/components/requestBodies/ArbimonRecording" diff --git a/core/internal/arbimon/bl/recordings.js b/core/internal/arbimon/bl/recordings.js index 904b06b73..520a9d0fd 100644 --- a/core/internal/arbimon/bl/recordings.js +++ b/core/internal/arbimon/bl/recordings.js @@ -5,16 +5,11 @@ const { sequelize } = require('../../../_models') const TRASHES_STREAM_ID = process.env.TRASHES_STREAM_ID async function updateBatch (params) { - const transaction = await sequelize.transaction() - try { + return sequelize.transaction(async (transaction) => { const sourceFiles = await findMultipleSourceFiles(params, { transaction, fields: ['stream_source_file_id'] }) await updateSegmentsBatch(TRASHES_STREAM_ID, params, { transaction }) await streamSourceFileDao.updateById({ stream_id: TRASHES_STREAM_ID }, sourceFiles, { transaction }) - await transaction.commit() - } catch (e) { - console.info('updateBatch error', e) - transaction.rollback() - } + }) } /** @@ -24,12 +19,10 @@ async function updateBatch (params) { * @param {Transaction} options.transaction Perform within given transaction */ async function findMultipleSourceFiles (streams, options = {}) { - const transaction = options.transaction - const fields = options.fields let sourceFiles = [] for (const s of streams) { - const ids = await streamSegmentDao.findByStreamAndStarts(s.stream, s.starts, { transaction, fields }) - sourceFiles = sourceFiles.concat(ids.map(id => id.stream_source_file_id)) + const segments = await streamSegmentDao.findByStreamAndStarts(s.stream, s.starts, options) + sourceFiles = [...sourceFiles, ...segments.map(s => s.stream_source_file_id)] } return sourceFiles } diff --git a/core/internal/arbimon/recordings.js b/core/internal/arbimon/recordings.js index 1f312cb4d..12d632627 100644 --- a/core/internal/arbimon/recordings.js +++ b/core/internal/arbimon/recordings.js @@ -15,9 +15,6 @@ const { updateBatch } = require('./bl/recordings') * description: Array of objects stream and start * required: true * content: - * application/x-www-form-urlencoded: - * schema: - * $ref: '#/components/requestBodies/ArbimonRecordingsDeleteData' * application/json: * schema: * $ref: '#/components/requestBodies/ArbimonRecordingsDeleteData' diff --git a/core/stream-segments/dao/index.js b/core/stream-segments/dao/index.js index 0d87fe7ad..d561e2b49 100644 --- a/core/stream-segments/dao/index.js +++ b/core/stream-segments/dao/index.js @@ -204,15 +204,19 @@ function findByStreamAndStarts (streamId, starts, options = {}) { }) } +function getISOString (date) { + return moment.utc(date).toISOString() +} + function getWhereForStreamAndStarts (streamId, starts) { - const dates = starts.map(s => moment.utc(s).valueOf()) - const maxDate = moment.utc(Math.max(...dates)).toISOString() - const minDate = moment.utc(Math.min(...dates)).toISOString() + const dates = starts.map(s => moment.utc(s).valueOf()).sort((a, b) => { + return a - b + }) const where = { stream_id: streamId, start: { - [Sequelize.Op.gte]: minDate, - [Sequelize.Op.lte]: maxDate, + [Sequelize.Op.gte]: getISOString(dates[0]), + [Sequelize.Op.lte]: getISOString(dates[dates.length - 1]), [Sequelize.Op.in]: starts } }