Skip to content

Commit

Permalink
Improve functions
Browse files Browse the repository at this point in the history
  • Loading branch information
rassokhina-e committed Oct 30, 2023
1 parent 3267fbb commit a3b893d
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 22 deletions.
6 changes: 3 additions & 3 deletions core/_docs/requestBodies.json
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,7 @@
}
}
},
"ArbimonRecording": {
"ArbimonRecordingDeleteDataItem": {
"type": "object",
"properties": {
"stream": {
Expand All @@ -769,15 +769,15 @@
"type": "string"
},
"description": "Array of start days",
"example": "['2023-07-24T06:30:00.000Z']"
"example": "['2023-07-24T06:30:00.000Z', '2023-07-24T06:31:00.000Z', '2023-07-24T06:32:00.000Z']"
}
},
"required": [
"stream",
"starts"
]
},
"ArbimonRecordingsDeleteData": {
"ArbimonRecordingDeleteData": {
"type": "array",
"items": {
"$ref": "#/components/requestBodies/ArbimonRecording"
Expand Down
15 changes: 4 additions & 11 deletions core/internal/arbimon/bl/recordings.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,11 @@ const { sequelize } = require('../../../_models')
const TRASHES_STREAM_ID = process.env.TRASHES_STREAM_ID

async function updateBatch (params) {
const transaction = await sequelize.transaction()
try {
return sequelize.transaction(async (transaction) => {
const sourceFiles = await findMultipleSourceFiles(params, { transaction, fields: ['stream_source_file_id'] })
await updateSegmentsBatch(TRASHES_STREAM_ID, params, { transaction })
await streamSourceFileDao.updateById({ stream_id: TRASHES_STREAM_ID }, sourceFiles, { transaction })
await transaction.commit()
} catch (e) {
console.info('updateBatch error', e)
transaction.rollback()
}
})
}

/**
Expand All @@ -24,12 +19,10 @@ async function updateBatch (params) {
* @param {Transaction} options.transaction Perform within given transaction
*/
async function findMultipleSourceFiles (streams, options = {}) {
const transaction = options.transaction
const fields = options.fields
let sourceFiles = []
for (const s of streams) {
const ids = await streamSegmentDao.findByStreamAndStarts(s.stream, s.starts, { transaction, fields })
sourceFiles = sourceFiles.concat(ids.map(id => id.stream_source_file_id))
const segments = await streamSegmentDao.findByStreamAndStarts(s.stream, s.starts, options)
sourceFiles = [...sourceFiles, ...segments.map(s => s.stream_source_file_id)]
}
return sourceFiles
}
Expand Down
3 changes: 0 additions & 3 deletions core/internal/arbimon/recordings.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@ const { updateBatch } = require('./bl/recordings')
* description: Array of objects stream and start
* required: true
* content:
* application/x-www-form-urlencoded:
* schema:
* $ref: '#/components/requestBodies/ArbimonRecordingsDeleteData'
* application/json:
* schema:
* $ref: '#/components/requestBodies/ArbimonRecordingsDeleteData'
Expand Down
14 changes: 9 additions & 5 deletions core/stream-segments/dao/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -204,15 +204,19 @@ function findByStreamAndStarts (streamId, starts, options = {}) {
})
}

function getISOString (date) {
return moment.utc(date).toISOString()
}

function getWhereForStreamAndStarts (streamId, starts) {
const dates = starts.map(s => moment.utc(s).valueOf())
const maxDate = moment.utc(Math.max(...dates)).toISOString()
const minDate = moment.utc(Math.min(...dates)).toISOString()
const dates = starts.map(s => moment.utc(s).valueOf()).sort((a, b) => {
return a - b
})
const where = {
stream_id: streamId,
start: {
[Sequelize.Op.gte]: minDate,
[Sequelize.Op.lte]: maxDate,
[Sequelize.Op.gte]: getISOString(dates[0]),
[Sequelize.Op.lte]: getISOString(dates[dates.length - 1]),
[Sequelize.Op.in]: starts
}
}
Expand Down

0 comments on commit a3b893d

Please sign in to comment.