diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c0572461..23d921141 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,10 @@ -## 1.3.12 (2024-06-xx) +## 1.3.13 (2024-05-xx) ### Features +* **core**: Add param `query_streams` `query_start` `query_end` `query_hours` to `GET /classifier-jobs` endpoint * **core**: Endpoint `POST /streams/:streamId/detections/:start/review` now returns the review status and id of the detection that has been reviewed in the call. * **core**: Create unique constraint of `(detection_id, user_id)` inside `detection_reviews` table. +## 1.3.12 (2024-05-30) ### Common * **core**: Remove `jwt-custom` and merge custom into list diff --git a/core/classifier-jobs/bl/index.js b/core/classifier-jobs/bl/index.js index f6e44b443..9dea33188 100644 --- a/core/classifier-jobs/bl/index.js +++ b/core/classifier-jobs/bl/index.js @@ -104,7 +104,64 @@ function notify (id) { }) } +/** + * Get a list of classifier jobs matching the filters + * @param {*} filters Classifier jobs attributes + * @param {string[]} filters.projects Where belongs to one of the projects (array of project ids) + * @param {number} filters.status Include only status in filters + * @param {number} filters.createdBy Where created by the given user id + * @param {*} options Query options + * @param {string} options.sort Order the results by one or more columns + * @param {number} options.limit Maximum results to include + * @param {number} options.offset Number of results to skip + * @param {number} options.readableBy Include only classifier jobs readable by the given user id + */ +async function list (filters = {}, options = {}) { + const jobs = await dao.query(filters, options) + if (filters.queryHours) { + const hourRanges = filters.queryHours.split(',').map(hours => rangeToDaytimeHoursArray(hours)) + const filteredJobs = jobs.results.filter(job => { + const jobQueryHours = job.queryHours.split(',').map(hours => rangeToDaytimeHoursArray(hours)) + return isHoursOverlapped(jobQueryHours, hourRanges) + }) + jobs.results = filteredJobs + jobs.total = filteredJobs.length + } + return jobs +} + +function isHoursOverlapped (hours1, hours2) { + const hours1Set = new Set(hours1.flat()) + const hours2Set = new Set(hours2.flat()) + return [...hours1Set].some(hour => hours2Set.has(hour)) +} + +function rangeToDaytimeHoursArray (range) { + const rangeSplitted = range.match(/^(0?[0-9]|1[0-9]|2[0-3])(?:-(0?[0-9]|1[0-9]|2[0-3]))?$/) + if (!rangeSplitted) { + return [] + } + const startRange = parseInt(rangeSplitted[1]) + const endRange = rangeSplitted[2] ? parseInt(rangeSplitted[2]) : undefined + // return startRange if the range is single hour, eg. 23 + if (endRange === undefined) { + return [startRange] + } + + let currentHour = startRange + const hours = [startRange] + while (currentHour !== endRange) { + currentHour += 1 + if (currentHour === 24) { + currentHour = 0 + } + hours.push(currentHour) + } + return hours +} + module.exports = { create, - update + update, + list } diff --git a/core/classifier-jobs/dao/index.js b/core/classifier-jobs/dao/index.js index 3c8f95c3c..9adbb281f 100644 --- a/core/classifier-jobs/dao/index.js +++ b/core/classifier-jobs/dao/index.js @@ -33,13 +33,52 @@ async function query (filters = {}, options = {}) { // Early return if projectIds set, but empty (no accessible projects) if (projectIds && projectIds.length === 0) { return { total: 0, results: [] } } + let queryStreamsFilter = {} + if (filters.queryStreams) { + const filterCause = filters.queryStreams.split(',').map((stream) => { + return { + queryStreams: { + [Sequelize.Op.iLike]: `%${stream}%` + } + } + }) + queryStreamsFilter = { + [Sequelize.Op.or]: [...filterCause] + } + } + + let queryTimeFilter = {} + if (filters.queryStart || filters.queryEnd) { + const start = filters.queryStart || filters.queryEnd + const end = filters.queryEnd || filters.queryStart + queryTimeFilter = { + [Sequelize.Op.and]: [ + { + queryStart: { + [Sequelize.Op.lte]: start.valueOf() + } + }, + { + queryEnd: { + [Sequelize.Op.gte]: end.valueOf() + } + } + ] + } + } + const where = { ...projectIds && { projectId: { [Sequelize.Op.in]: projectIds } }, ...filters.status !== undefined && { status: filters.status }, - ...filters.createdBy !== undefined && { createdById: filters.createdBy } + ...filters.createdBy !== undefined && { createdById: filters.createdBy }, + ...queryStreamsFilter, + ...queryTimeFilter } const attributes = options.fields && options.fields.length > 0 ? ClassifierJob.attributes.full.filter(a => options.fields.includes(a)) : ClassifierJob.attributes.lite + if (filters.queryHours) { + attributes.push('query_hours') // need for post-process + } const include = options.fields && options.fields.length > 0 ? availableIncludes.filter(i => options.fields.includes(i.as)) : availableIncludes const order = getSortFields(options.sort || '-created_at') diff --git a/core/classifier-jobs/list.int.test.js b/core/classifier-jobs/list.int.test.js index acbf5f3c3..37e71128e 100644 --- a/core/classifier-jobs/list.int.test.js +++ b/core/classifier-jobs/list.int.test.js @@ -271,4 +271,220 @@ describe('GET /classifier-jobs', () => { expect(response.body[1].id).toBe(job2.id) expect(response.body[2].id).toBe(job1.id) }) + + test('respects query_streams 1', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + const job1 = await models.ClassifierJob.create({ queryStreams: 'stream1', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_streams: 'stream1' }) + expect(response.body).toHaveLength(3) + expect(response.headers['total-items']).toBe('3') + expect(response.body[0].id).toBe(job3.id) + expect(response.body[1].id).toBe(job2.id) + expect(response.body[2].id).toBe(job1.id) + }) + + test('respects query_streams 2', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_streams: 'stream2' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job3.id) + expect(response.body[1].id).toBe(job2.id) + }) + + test('respects query_streams 3', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_streams: 'stream3' }) + expect(response.body).toHaveLength(1) + expect(response.headers['total-items']).toBe('1') + expect(response.body[0].id).toBe(job3.id) + }) + + test('respects query_streams 4', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_streams: 'stream2,stream3' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job3.id) + expect(response.body[1].id).toBe(job2.id) + }) + + test('respects query_start 1', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + const job1 = await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_start: '2024-01-01' }) + expect(response.body).toHaveLength(1) + expect(response.headers['total-items']).toBe('1') + expect(response.body[0].id).toBe(job1.id) + }) + + test('respects query_start 2', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + const job1 = await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_start: '2024-01-02' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job2.id) + expect(response.body[1].id).toBe(job1.id) + }) + + test('respects query_start 3', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_start: '2024-01-03' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job3.id) + expect(response.body[1].id).toBe(job2.id) + }) + + test('respects query_end 1', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_end: '2024-01-05' }) + expect(response.body).toHaveLength(0) + }) + + test('respects query_end 2', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job3 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_end: '2024-01-03' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job3.id) + expect(response.body[1].id).toBe(job2.id) + }) + + test('respects query_end 3', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + const job1 = await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_end: '2024-01-02' }) + expect(response.body).toHaveLength(2) + expect(response.headers['total-items']).toBe('2') + expect(response.body[0].id).toBe(job2.id) + expect(response.body[1].id).toBe(job1.id) + }) + + test('respects query_hours 1', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + const job1 = await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', queryHours: '1-3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', queryHours: '9-14', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', queryHours: '20-22', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response1 = await request(app).get('/').query({ query_hours: '1' }) + + expect(response1.body).toHaveLength(4) + expect(response1.headers['total-items']).toBe('4') + expect(response1.body[0].id).toBe(job1.id) + + const response2 = await request(app).get('/').query({ query_hours: '2-3' }) + + expect(response2.body).toHaveLength(4) + expect(response2.headers['total-items']).toBe('4') + expect(response2.body[0].id).toBe(job1.id) + + const response3 = await request(app).get('/').query({ query_hours: '3' }) + + expect(response3.body).toHaveLength(1) + expect(response3.headers['total-items']).toBe('1') + expect(response3.body[0].id).toBe(job1.id) + }) + + test('respects query_hours 2', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', queryHours: '1-3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', queryHours: '9-14', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', queryHours: '20-22', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response1 = await request(app).get('/').query({ query_hours: '9' }) + + expect(response1.body).toHaveLength(1) + expect(response1.headers['total-items']).toBe('1') + expect(response1.body[0].id).toBe(job2.id) + + const response2 = await request(app).get('/').query({ query_hours: '9-10' }) + + expect(response2.body).toHaveLength(1) + expect(response2.headers['total-items']).toBe('1') + expect(response2.body[0].id).toBe(job2.id) + + const response3 = await request(app).get('/').query({ query_hours: '13-19' }) + + expect(response3.body).toHaveLength(2) + expect(response3.headers['total-items']).toBe('2') + expect(response3.body[0].id).toBe(job2.id) + expect(response3.body[1].id).toBe(JOB_2.id) + }) + + test('respects all queries params', async () => { + const classifierId = CLASSIFIER_1.id + const projectId = PROJECT_1.id + // Waiting cancel jobs (3) + await models.ClassifierJob.create({ queryStreams: 'stream1', queryStart: '2024-01-01', queryEnd: '2024-01-02', queryHours: '1-3', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + const job2 = await models.ClassifierJob.create({ queryStreams: 'stream1,stream2', queryStart: '2024-01-02', queryEnd: '2024-01-03', queryHours: '9-14', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + await models.ClassifierJob.create({ queryStreams: 'stream1,stream2,stream3', queryStart: '2024-01-03', queryEnd: '2024-01-04', queryHours: '20-22', status: AWAITING_CANCELLATION, classifierId, projectId, createdById: seedValues.otherUserId }) + + const response = await request(app).get('/').query({ query_streams: 'stream2', query_start: '2024-01-02', query_end: '2024-01-02', query_hours: '2-4,5-8,12-14,16,17' }) + + expect(response.body).toHaveLength(1) + expect(response.headers['total-items']).toBe('1') + expect(response.body[0].id).toBe(job2.id) + }) }) diff --git a/core/classifier-jobs/list.js b/core/classifier-jobs/list.js index e20f3d91b..3c51d2eca 100644 --- a/core/classifier-jobs/list.js +++ b/core/classifier-jobs/list.js @@ -1,5 +1,5 @@ const { httpErrorHandler } = require('../../common/error-handling/http') -const { query } = require('./dao') +const { list } = require('./bl') const Converter = require('../../common/converter') /** @@ -42,6 +42,22 @@ const Converter = require('../../common/converter') * description: Customize included fields and relations * in: query * type: array +* - name: query_streams + * description: list of streams (comma separated) that want to include in result + * in: query + * type: string + * - name: query_start + * description: Start date with format YYYY-MM-DD of recordings that being used + * in: query + * type: date + * - name: query_end + * description: End date with format YYYY-MM-DD of recordings that being used + * in: query + * type: date + * - name: query_hours + * description: Hours ranges with format (1 or 0-23 or 2,3,4 or 2,4,5-10) of recordings that being used + * in: query + * type: string * responses: * 200: * description: List of classifier jobs objects @@ -68,19 +84,21 @@ module.exports = (req, res) => { converter.convert('offset').default(0).toInt() converter.convert('sort').optional().toString() converter.convert('fields').optional().toArray() + converter.convert('query_streams').optional().toString() + converter.convert('query_start').optional().toMomentUtc() + converter.convert('query_end').optional().toMomentUtc() + converter.convert('query_hours').optional().toString() return converter.validate() .then(async params => { - const { status, projects, limit, offset, sort, fields } = params - + const { status, projects, limit, offset, sort, fields, queryStreams, queryStart, queryEnd, queryHours } = params const user = req.rfcx.auth_token_info const readableBy = user && (user.is_super || user.has_system_role) ? undefined : user.id const createdBy = params.createdBy === 'me' ? readableBy : undefined - const filters = { projects, status, createdBy } + const filters = { projects, status, createdBy, queryStreams, queryStart, queryEnd, queryHours } const options = { readableBy, limit, offset, sort, fields } - const { total, results } = await query(filters, options) - + const { total, results } = await list(filters, options) return res.header('Access-Control-Expose-Headers', 'Total-Items').header('Total-Items', total).json(results) }) .catch(httpErrorHandler(req, res)) diff --git a/package.json b/package.json index 53fe95a7f..704414f30 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "rfcx-api", - "version": "1.3.12", + "version": "1.3.13", "license": "UNLICENSED", "private": false, "repository": {