From f93d00abb006a058a8e260fb4e1516aa51c94449 Mon Sep 17 00:00:00 2001 From: Evgeniia Rassokhina Date: Tue, 31 Oct 2023 10:36:00 +0400 Subject: [PATCH] Add test cases --- core/internal/arbimon/bl/recordings.js | 36 +++--- core/internal/arbimon/recordings.int.test.js | 116 +++++++++++++++++-- core/internal/arbimon/recordings.js | 10 +- core/stream-source-files/dao/index.js | 4 +- 4 files changed, 138 insertions(+), 28 deletions(-) diff --git a/core/internal/arbimon/bl/recordings.js b/core/internal/arbimon/bl/recordings.js index 292e06561..d7547fe71 100644 --- a/core/internal/arbimon/bl/recordings.js +++ b/core/internal/arbimon/bl/recordings.js @@ -4,14 +4,25 @@ const { sequelize } = require('../../../_models') const TRASHES_STREAM_ID = process.env.TRASHES_STREAM_ID -async function updateBatch (params) { +async function softDeleteRecordings (params) { return sequelize.transaction(async (transaction) => { - const sourceFiles = await findSourceFiles(params, { transaction }) - await softDeleteSegmentsBatch(TRASHES_STREAM_ID, params, { transaction }) - await streamSourceFileDao.updateById({ stream_id: TRASHES_STREAM_ID }, sourceFiles, { transaction }) + await softDeleteSourceFilesBatch(params, { transaction }) + await softDeleteSegmentsBatch(params, { transaction }) }) } +/** + * Move all segments belonging to streams within specified start array to "trash" array + * @param {oblect[]} arr Array of objects with stream ids and starts + * @param {*} options + * @param {Transaction} options.transaction Perform within given transaction + */ +async function softDeleteSegmentsBatch (arr, options = {}) { + for (const item of arr) { + await streamSegmentDao.updateByStreamAndStarts(item.stream, item.starts, { stream_id: TRASHES_STREAM_ID }, options) + } +} + /** * Find all source file ids belonging to a stream within specified start array * @param {object[]} arr Array of objects with stream ids and starts @@ -20,25 +31,24 @@ async function updateBatch (params) { */ async function findSourceFiles (arr, options = {}) { let sourceFiles = [] - for (const s of arr) { - const segments = await streamSegmentDao.findByStreamAndStarts(s.stream, s.starts, { ...options, fields: ['stream_source_file_id'] }) + for (const item of arr) { + const segments = await streamSegmentDao.findByStreamAndStarts(item.stream, item.starts, { ...options, fields: ['stream_source_file_id'] }) sourceFiles = [...sourceFiles, ...segments.map(s => s.stream_source_file_id)] } return sourceFiles } /** - * Move all segments belonging to streams within specified start array to "trash" array - * @param {oblect} streams Stream ids and array of segments start + * Move all source files belonging to streams within specified start array to "trash" array + * @param {object[]} arr Array of objects with stream ids and starts * @param {*} options * @param {Transaction} options.transaction Perform within given transaction */ -async function softDeleteSegmentsBatch (streams, options = {}) { - for (const s of streams) { - await streamSegmentDao.updateByStreamAndStarts(s.stream, s.starts, { stream_id: TRASHES_STREAM_ID }, options) - } +async function softDeleteSourceFilesBatch (arr, options = {}) { + const ids = await findSourceFiles(arr, options) + await streamSourceFileDao.updateByIds({ stream_id: TRASHES_STREAM_ID }, ids, options) } module.exports = { - updateBatch + softDeleteRecordings } diff --git a/core/internal/arbimon/recordings.int.test.js b/core/internal/arbimon/recordings.int.test.js index 1d4e28396..fb754c47d 100644 --- a/core/internal/arbimon/recordings.int.test.js +++ b/core/internal/arbimon/recordings.int.test.js @@ -11,6 +11,9 @@ process.env.TRASHES_STREAM_ID = '1delete6y3yb' let stream, trashesStream, audioFileFormat, audioCodec, fileExtension, testPayload, testRequestData, audioFileFormatId, audioCodecId, fileExtensionId resetTestData() +beforeEach(async () => { + await commonSetup() +}) beforeAll(async () => { muteConsole('warn') }) @@ -23,7 +26,7 @@ afterAll(async () => { }) function resetTestData () { - stream = { id: 'abcdsaqwery1', name: 'my stream', createdById: seedValues.primaryUserId } + stream = { id: 'abcdsaqwery1', name: 'test stream', createdById: seedValues.primaryUserId } trashesStream = { id: '1delete6y3yb', name: 'trashes stream', createdById: seedValues.primaryUserId } audioFileFormat = { value: 'flac' } audioCodec = { value: 'flac' } @@ -48,9 +51,8 @@ function resetTestData () { ] } testRequestData = [{ - stream: 'abcdsaqwery1', - starts: ['2021-04-18T12:12:00.000Z'] - + stream: stream.id, + starts: [testPayload.stream_segments[0].start] }] } @@ -60,20 +62,118 @@ async function commonSetup () { audioFileFormatId = (await models.AudioFileFormat.findOrCreate({ where: audioFileFormat }))[0].id audioCodecId = (await models.AudioCodec.findOrCreate({ where: audioCodec }))[0].id fileExtensionId = (await models.FileExtension.findOrCreate({ where: fileExtension }))[0].id + const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) } describe('DELETE internal/arbimon/recordings', () => { - test('stream_source_file and stream_segments is deleted', async () => { - await commonSetup() - const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) - await models.StreamSegment.create({ ...testPayload.stream_segments[0], stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) + test('happy path', async () => { + const response = await request(app).delete('/recordings').send(testRequestData) + expect(response.statusCode).toBe(200) + }) + test('can set all fields', async () => { const response = await request(app).delete('/recordings').send(testRequestData) const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: stream.id } }) const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: stream.id } }) + expect(response.statusCode).toBe(200) expect(streamSourceFiles.length).toBe(0) expect(streamSegments.length).toBe(0) }) + test('can delete 1 recording', async () => { + const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T12-11-11.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74211', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T13:12:00.000Z', end: '2021-04-18T13:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) + const response = await request(app).delete('/recordings').send(testRequestData) + + const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: stream.id } }) + const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: stream.id } }) + + expect(response.statusCode).toBe(200) + expect(streamSourceFiles.length).toBe(1) + expect(streamSegments.length).toBe(1) + }) + test('can delete 2 recordings', async () => { + const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T12-11-11.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74211', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T13:12:00.000Z', end: '2021-04-18T13:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) + const sourceFile2 = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T13-11-12.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74212', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T14:12:00.000Z', end: '2021-04-18T14:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile2.id }) + testRequestData[0].starts.push('2021-04-18T14:12:00.000Z') + + const response = await request(app).delete('/recordings').send(testRequestData) + + const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: stream.id } }) + const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: stream.id } }) + + expect(response.statusCode).toBe(200) + expect(streamSourceFiles.length).toBe(1) + expect(streamSegments.length).toBe(1) + }) + test('can delete 3 recordings', async () => { + const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T12-11-11.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74211', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T13:12:00.000Z', end: '2021-04-18T13:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) + const sourceFile2 = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T13-11-12.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74212', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T14:12:00.000Z', end: '2021-04-18T14:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile2.id }) + const sourceFile3 = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T14-11-12.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74213', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T15:12:00.000Z', end: '2021-04-18T15:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile3.id }) + testRequestData[0].starts.push('2021-04-18T14:12:00.000Z', '2021-04-18T15:12:00.000Z') + + const response = await request(app).delete('/recordings').send(testRequestData) + + const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: stream.id } }) + const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: stream.id } }) + + expect(response.statusCode).toBe(200) + expect(streamSourceFiles.length).toBe(1) + expect(streamSegments.length).toBe(1) + }) + test('can delete recordings from different streams', async () => { + const sourceFile = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T12-11-11.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74211', stream_id: stream.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T13:12:00.000Z', end: '2021-04-18T13:13:00.000Z', stream_id: stream.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile.id }) + const testStream2 = { id: '1delete6y5yb', name: 'test stream 2', createdById: seedValues.primaryUserId } + await models.Stream.findOrCreate({ where: testStream2 }) + const sourceFile2 = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T13-11-12.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74212', stream_id: testStream2.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T14:12:00.000Z', end: '2021-04-18T14:13:00.000Z', stream_id: testStream2.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile2.id }) + const sourceFile3 = await models.StreamSourceFile.create({ ...testPayload.stream_source_file, filename: '0d99db29f26d-2021-04-19T14-11-12.flac', sha1_checksum: 'e427f7bf6c589b4856d5f51691d159366d74213', stream_id: testStream2.id, audio_codec_id: audioCodecId, audio_file_format_id: audioFileFormatId }) + await models.StreamSegment.create({ ...testPayload.stream_segments[0], start: '2021-04-18T15:12:00.000Z', end: '2021-04-18T15:13:00.000Z', stream_id: testStream2.id, file_extension_id: fileExtensionId, stream_source_file_id: sourceFile3.id }) + testRequestData.push({ + stream: testStream2.id, + starts: ['2021-04-18T14:12:00.000Z'] + }) + + const response = await request(app).delete('/recordings').send(testRequestData) + + const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: [stream.id, testStream2.id] } }) + const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: [stream.id, testStream2.id] } }) + + expect(response.statusCode).toBe(200) + expect(streamSourceFiles.length).toBe(2) + expect(streamSegments.length).toBe(2) + }) + test('doesnt work for not correct dates', async () => { + testRequestData[0].starts = '20210418_121200' + const response = await request(app).delete('/recordings').send(testRequestData) + + expect(response.statusCode).toBe(500) + }) + test('doesnt work for not existing dates', async () => { + testRequestData[0].starts = '2021-02-18T13:12:00.000Z' + const response = await request(app).delete('/recordings').send(testRequestData) + + const streamSourceFiles = await models.StreamSourceFile.findAll({ where: { stream_id: stream.id } }) + const streamSegments = await models.StreamSegment.findAll({ where: { stream_id: stream.id } }) + + expect(response.statusCode).toBe(200) + expect(streamSourceFiles.length).toBe(1) + expect(streamSegments.length).toBe(1) + }) + test('doesnt work for not correct fields', async () => { + const response = await request(app).delete('/recordings').send({ + streams: stream.id, + start: [testPayload.stream_segments[0].start] + }) + + expect(response.statusCode).toBe(500) + }) }) diff --git a/core/internal/arbimon/recordings.js b/core/internal/arbimon/recordings.js index 12d632627..1ab7b6855 100644 --- a/core/internal/arbimon/recordings.js +++ b/core/internal/arbimon/recordings.js @@ -1,7 +1,7 @@ const router = require('express').Router() const { httpErrorHandler } = require('../../../common/error-handling/http') const ArrayConverter = require('../../../common/converter/array') -const { updateBatch } = require('./bl/recordings') +const { softDeleteRecordings } = require('./bl/recordings') /** * @swagger @@ -20,8 +20,8 @@ const { updateBatch } = require('./bl/recordings') * $ref: '#/components/requestBodies/ArbimonRecordingsDeleteData' * responses: * 200: - * description: Updated - * 400: + * description: Deleted + * 500: * description: Invalid parameters */ router.delete('/recordings', (req, res) => { @@ -31,10 +31,10 @@ router.delete('/recordings', (req, res) => { return converter.validate() .then(async (params) => { - await updateBatch(params) + await softDeleteRecordings(params) return res.sendStatus(200) }) - .catch(httpErrorHandler(req, res, 'Failed update stream source file and segments')) + .catch(httpErrorHandler(req, res, 'Failed to soft delete recordings')) }) module.exports = router diff --git a/core/stream-source-files/dao/index.js b/core/stream-source-files/dao/index.js index ea078136e..55a2f1efb 100644 --- a/core/stream-source-files/dao/index.js +++ b/core/stream-source-files/dao/index.js @@ -261,7 +261,7 @@ function calcAvailability (segments) { * @returns {StreamSourceFile} * @throws EmptyResultError when segment not found */ -async function updateById (data, existingSourceFilesId, options = {}) { +async function updateByIds (data, existingSourceFilesId, options = {}) { const transaction = options.transaction return await StreamSourceFile.update(data, { where: { id: existingSourceFilesId }, transaction }) } @@ -276,5 +276,5 @@ module.exports = { transformMetaAttr, format, calcAvailability, - updateById + updateByIds }