From 6a1590271527f2d9038e32136867f299d1f8653f Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Wed, 23 Aug 2023 00:58:50 +0300 Subject: [PATCH 001/185] Add youtube integration --- .../api/integration/helpers/youtubeConnect.ts | 9 + backend/src/api/integration/index.ts | 5 + backend/src/services/integrationService.ts | 75 ++++++- .../src/integrations/integrations-config.js | 2 + .../components/youtube-connect-drawer.vue | 199 ++++++++++++++++++ .../youtube/components/youtube-connect.vue | 31 +++ frontend/src/integrations/youtube/config.js | 26 +++ frontend/src/integrations/youtube/index.js | 3 + .../integration/integration-service.js | 13 ++ .../modules/integration/integration-store.js | 30 +++ .../src/integrations/activityTypes.ts | 14 ++ .../src/integrations/prettyActivityTypes.ts | 3 + .../src/integrations/youtube/api/comments.ts | 36 ++++ .../src/integrations/youtube/api/videos.ts | 37 ++++ .../youtube/api/videosByKeywords.ts | 46 ++++ .../integrations/youtube/generateStreams.ts | 27 +++ .../src/integrations/youtube/grid.ts | 10 + .../src/integrations/youtube/index.ts | 22 ++ .../integrations/youtube/memberAttributes.ts | 24 +++ .../src/integrations/youtube/processData.ts | 65 ++++++ .../src/integrations/youtube/processStream.ts | 74 +++++++ .../src/integrations/youtube/types.ts | 114 ++++++++++ 22 files changed, 864 insertions(+), 1 deletion(-) create mode 100644 backend/src/api/integration/helpers/youtubeConnect.ts create mode 100644 frontend/src/integrations/youtube/components/youtube-connect-drawer.vue create mode 100644 frontend/src/integrations/youtube/components/youtube-connect.vue create mode 100644 frontend/src/integrations/youtube/config.js create mode 100644 frontend/src/integrations/youtube/index.js create mode 100644 services/libs/integrations/src/integrations/youtube/api/comments.ts create mode 100644 services/libs/integrations/src/integrations/youtube/api/videos.ts create mode 100644 services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts create mode 100644 services/libs/integrations/src/integrations/youtube/generateStreams.ts create mode 100644 services/libs/integrations/src/integrations/youtube/grid.ts create mode 100644 services/libs/integrations/src/integrations/youtube/index.ts create mode 100644 services/libs/integrations/src/integrations/youtube/memberAttributes.ts create mode 100644 services/libs/integrations/src/integrations/youtube/processData.ts create mode 100644 services/libs/integrations/src/integrations/youtube/processStream.ts create mode 100644 services/libs/integrations/src/integrations/youtube/types.ts diff --git a/backend/src/api/integration/helpers/youtubeConnect.ts b/backend/src/api/integration/helpers/youtubeConnect.ts new file mode 100644 index 0000000000..c8ec4c89c4 --- /dev/null +++ b/backend/src/api/integration/helpers/youtubeConnect.ts @@ -0,0 +1,9 @@ +import Permissions from '../../../security/permissions' +import IntegrationService from '../../../services/integrationService' +import PermissionChecker from '../../../services/user/permissionChecker' + +export default async (req, res) => { + new PermissionChecker(req).validateHas(Permissions.values.tenantEdit) + const payload = await new IntegrationService(req).connectYoutube(req.body) + await req.responseHandler.success(req, res, payload) +} diff --git a/backend/src/api/integration/index.ts b/backend/src/api/integration/index.ts index 4c98042a45..a3760ae38d 100644 --- a/backend/src/api/integration/index.ts +++ b/backend/src/api/integration/index.ts @@ -132,6 +132,11 @@ export default (app) => { safeWrap(require('./helpers/hubspotStopSyncOrganization').default), ) + app.post( + '/tenant/:tenantId/youtube-connect', + safeWrap(require('./helpers/youtubeConnect').default), + ) + // if (TWITTER_CONFIG.clientId) { // /** // * Using the passport.authenticate this endpoint forces a diff --git a/backend/src/services/integrationService.ts b/backend/src/services/integrationService.ts index 74bca1a480..2940b9c229 100644 --- a/backend/src/services/integrationService.ts +++ b/backend/src/services/integrationService.ts @@ -1,7 +1,7 @@ import { createAppAuth } from '@octokit/auth-app' import { request } from '@octokit/request' import moment from 'moment' -import axios from 'axios' +import axios, { AxiosRequestConfig } from 'axios' import { PlatformType } from '@crowd/types' import { HubspotFieldMapperFactory, @@ -1403,4 +1403,77 @@ export default class IntegrationService { return integration } + + async connectYoutube(integrationData) { + this.options.log.info('Creating youtube integration!!') + let playlistId = null + let channelId = null + let keywords = null + let shouldVerifyChannelId = true + + const isValidKeyword = integrationData.keywords && Array.isArray(integrationData.keywords) + if (isValidKeyword && integrationData.keywords.length > 0) { + keywords = integrationData.keywords + shouldVerifyChannelId = false + } + + if (shouldVerifyChannelId) { + try { + const channelDetailsConfig: AxiosRequestConfig = { + method: 'get', + url: `https://www.googleapis.com/youtube/v3/channels`, + params: { + key: integrationData.apiKey, + id: integrationData.channelId, + part: 'contentDetails', + } + } + + const response = (await axios(channelDetailsConfig)).data + const channelDetails = response.items[0] + channelId = channelDetails.id + playlistId = channelDetails.contentDetails.relatedPlaylists.uploads; + } catch (err) { + throw new Error400(`The channel id your provided channel : ${integrationData.channelId} or the api key : ${integrationData.apiKey} provided is not valid`) + } + } + + let integration + const transaction = await SequelizeRepository.createTransaction(this.options) + + try { + integration = await this.createOrUpdate( + { + platform: PlatformType.YOUTUBE, + settings: { + apiKey: integrationData.apiKey, + uploadPlaylistId: playlistId, + channelId: channelId, + keywords + }, + status: 'in-progress', + }, + transaction, + ) + + await SequelizeRepository.commitTransaction(transaction) + } catch (err) { + await SequelizeRepository.rollbackTransaction(transaction) + throw err + } + + this.options.log.info( + { tenantId: integration.tenantId }, + 'Sending Youtube message to int-run-worker!', + ) + const emitter = await getIntegrationRunWorkerEmitter() + await emitter.triggerIntegrationRun( + integration.tenantId, + integration.platform, + integration.id, + true, + ) + + return integration + } } diff --git a/frontend/src/integrations/integrations-config.js b/frontend/src/integrations/integrations-config.js index 2483760941..f041730beb 100644 --- a/frontend/src/integrations/integrations-config.js +++ b/frontend/src/integrations/integrations-config.js @@ -15,6 +15,7 @@ import crunchbase from './crunchbase'; import git from './git'; import facebook from './facebook'; import n8n from './n8n'; +import youtube from './youtube'; class IntegrationsConfig { get integrations() { @@ -36,6 +37,7 @@ class IntegrationsConfig { hubspot, // make, facebook, + youtube, }; } diff --git a/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue new file mode 100644 index 0000000000..006716c430 --- /dev/null +++ b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue @@ -0,0 +1,199 @@ + + + + + diff --git a/frontend/src/integrations/youtube/components/youtube-connect.vue b/frontend/src/integrations/youtube/components/youtube-connect.vue new file mode 100644 index 0000000000..2787b14505 --- /dev/null +++ b/frontend/src/integrations/youtube/components/youtube-connect.vue @@ -0,0 +1,31 @@ + + + + + diff --git a/frontend/src/integrations/youtube/config.js b/frontend/src/integrations/youtube/config.js new file mode 100644 index 0000000000..56fa066251 --- /dev/null +++ b/frontend/src/integrations/youtube/config.js @@ -0,0 +1,26 @@ +import StackOverflowConnect from './components/youtube-connect.vue'; + +export default { + enabled: true, + name: 'Youtube', + backgroundColor: '#FFFFFF', + borderColor: '#FFFFFF', + description: + 'Connect Youtube to sync comments based on channels or selected keywords.', + image: + 'https://cdn-icons-png.flaticon.com/512/174/174883.png', + connectComponent: StackOverflowConnect, + url: (username) => `https://www.youtube.com/channel/${username}`, + chartColor: '#FF9845', + showProfileLink: true, + activityDisplay: { + showLinkToUrl: true, + }, + conversationDisplay: { + replyContent: (conversation) => ({ + icon: 'ri-reply-line', + copy: 'reply', + number: conversation.activityCount - 1, + }), + }, +}; diff --git a/frontend/src/integrations/youtube/index.js b/frontend/src/integrations/youtube/index.js new file mode 100644 index 0000000000..e81a18f3ed --- /dev/null +++ b/frontend/src/integrations/youtube/index.js @@ -0,0 +1,3 @@ +import config from './config'; + +export default config; diff --git a/frontend/src/modules/integration/integration-service.js b/frontend/src/modules/integration/integration-service.js index 419d5bc9b2..6d51bcd503 100644 --- a/frontend/src/modules/integration/integration-service.js +++ b/frontend/src/modules/integration/integration-service.js @@ -348,4 +348,17 @@ export class IntegrationService { return response.data.isWebhooksReceived; } + + static async youtubeConnect(reqBody) { + // Getting the tenant_id + const tenantId = AuthCurrentTenant.get(); + + // Calling connect devto function in the backend. + const response = await authAxios.post( + `/tenant/${tenantId}/youtube-connect`, + reqBody, + ); + + return response.data; + } } diff --git a/frontend/src/modules/integration/integration-store.js b/frontend/src/modules/integration/integration-store.js index 11fa998513..179ddf328f 100644 --- a/frontend/src/modules/integration/integration-store.js +++ b/frontend/src/modules/integration/integration-store.js @@ -546,5 +546,35 @@ export default { commit('CREATE_ERROR'); } }, + + async doYoutubeConnect( + { commit }, + reqBody + ) { + // Function to connect to Dev.to. We just need to store the + // users and organizations we want to track + + try { + commit('CREATE_STARTED'); + + const integration = await IntegrationService.youtubeConnect(reqBody); + + commit('CREATE_SUCCESS', integration); + + Message.success( + 'The first activities will show up in a couple of seconds.

' + + 'This process might take a few minutes to finish, depending on the amount of data.', + { + title: + 'Youtube integration created successfully', + }, + ); + + router.push('/integrations'); + } catch (error) { + Errors.handle(error); + commit('CREATE_ERROR'); + } + }, }, }; diff --git a/services/libs/integrations/src/integrations/activityTypes.ts b/services/libs/integrations/src/integrations/activityTypes.ts index d6045e6de1..90ef6c8843 100644 --- a/services/libs/integrations/src/integrations/activityTypes.ts +++ b/services/libs/integrations/src/integrations/activityTypes.ts @@ -1,5 +1,6 @@ import { ActivityTypeDisplayProperties, DefaultActivityTypes, PlatformType } from '@crowd/types' import { DevToActivityType } from './devto/types' +import { YoutubeActivityType } from './youtube/types' import { GithubActivityType } from './github/types' import { LinkedinActivityType } from './premium/linkedin/types' import { StackOverflowActivityType } from './stackoverflow/types' @@ -12,6 +13,7 @@ import { isUrl } from '@crowd/common' import { DiscordActivityType } from './discord/types' import { GITHUB_GRID } from './github/grid' import { DEVTO_GRID } from './devto/grid' +import { Youtube_GRID } from './youtube/grid' import { DISCORD_GRID } from './discord/grid' import { HACKERNEWS_GRID } from './hackernews/grid' import { LINKEDIN_GRID } from './premium/linkedin/grid' @@ -478,6 +480,18 @@ export const DEFAULT_ACTIVITY_TYPE_SETTINGS: DefaultActivityTypes = { isContribution: true, }, }, + [PlatformType.YOUTUBE]: { + [YoutubeActivityType.COMMENT]: { + display: { + default: + 'commented on {attributes.videoTitle}', + short: 'commented', + channel: + '{attributes.videoTitle', + }, + isContribution: Youtube_GRID[YoutubeActivityType.COMMENT].isContribution, + }, + }, [PlatformType.DEVTO]: { [DevToActivityType.COMMENT]: { display: { diff --git a/services/libs/integrations/src/integrations/prettyActivityTypes.ts b/services/libs/integrations/src/integrations/prettyActivityTypes.ts index bd6599912e..ed3bd97118 100644 --- a/services/libs/integrations/src/integrations/prettyActivityTypes.ts +++ b/services/libs/integrations/src/integrations/prettyActivityTypes.ts @@ -54,6 +54,9 @@ export const prettyActivityTypes = { [PlatformType.DEVTO]: { comment: 'comment', }, + [PlatformType.YOUTUBE]: { + comment: 'comment', + }, [PlatformType.HACKERNEWS]: { [HackerNewsActivityType.POST]: 'posted', [HackerNewsActivityType.COMMENT]: 'commented ', diff --git a/services/libs/integrations/src/integrations/youtube/api/comments.ts b/services/libs/integrations/src/integrations/youtube/api/comments.ts new file mode 100644 index 0000000000..684e48c152 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/api/comments.ts @@ -0,0 +1,36 @@ +import axios, { AxiosRequestConfig } from 'axios' +import { YoutubeVideoStreamConfig, YoutubeCommentThreadSearch } from '../types' +import { IProcessStreamContext } from '@/types' + +export const getComments = async (ctx: IProcessStreamContext): Promise => { + const videoConfig = ctx.stream.data as YoutubeVideoStreamConfig + + try { + const getThreadedCommentsConfig: AxiosRequestConfig = { + method: 'get', + url: `https://www.googleapis.com/youtube/v3/commentThreads`, + params: { + key: videoConfig.apiKey, + videoId: videoConfig.videoId, + order: 'time', + maxResults: 100, + part: 'snippet', + } + } + + const shouldLoadNextPage = videoConfig.nextPageToken && videoConfig.nextPageToken != '' + if (shouldLoadNextPage) { + getThreadedCommentsConfig.params.pageToken = videoConfig.nextPageToken + } + + const response = (await axios(getThreadedCommentsConfig)).data + return response + } catch (err) { + // we've hit the limits for gettings data or there's an error, anyway just log it + ctx.log.error( + { err, videoConfig }, + 'Error while using the youtube comments api to get a video comments', + ) + throw err + } +} diff --git a/services/libs/integrations/src/integrations/youtube/api/videos.ts b/services/libs/integrations/src/integrations/youtube/api/videos.ts new file mode 100644 index 0000000000..4bb70a2088 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/api/videos.ts @@ -0,0 +1,37 @@ +import axios, { AxiosRequestConfig } from 'axios' +import { YoutubeIntegrationSettings, YoutubeVideoSearch } from '../types' +import { IProcessStreamContext } from '@/types' + +export const getVideos = async (ctx: IProcessStreamContext): Promise => { + const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + + try { + const getChannelVideosConfig: AxiosRequestConfig = { + method: 'get', + url: `https://www.googleapis.com/youtube/v3/search`, + params: { + key: channelSettings.apiKey, + channelId: channelSettings.channelId, + type: 'video', + order: 'date', + maxResults: 50, + part: 'snippet' + } + } + + const shouldLoadNextPage = channelSettings.nextPageToken && channelSettings.nextPageToken != '' + if (shouldLoadNextPage) { + getChannelVideosConfig.params.pageToken = channelSettings.nextPageToken + } + + const response = (await axios(getChannelVideosConfig)).data + return response + } catch (err) { + // we've hit the limits for gettings data or there's an error, anyway just log it + ctx.log.error( + { err, channelSettings }, + 'Error while using the youtube search api to get videos', + ) + throw err + } +} diff --git a/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts b/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts new file mode 100644 index 0000000000..6a1d6dcc18 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts @@ -0,0 +1,46 @@ +import axios, { AxiosRequestConfig } from 'axios' +import { YoutubeIntegrationSettings, YoutubeVideoSearch } from '../types' +import { IProcessStreamContext } from '@/types' + +export const getVideosByKeywords = async (ctx: IProcessStreamContext): Promise => { + const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + + const now = new Date() + const publishedAfter = new Date(now.getTime() - (1000 * 60 * 60 * 24 * 30)) // 30 days + + try { + const getVideosByKeywordsRequestConfig: AxiosRequestConfig = { + method: 'get', + url: `https://www.googleapis.com/youtube/v3/search`, + params: { + key: channelSettings.apiKey, + type: 'video', + order: 'date', + maxResults: 50, + publishedAfter: publishedAfter.toISOString(), + part: 'snippet' + } + } + + const includesKeywords = channelSettings.keywords && channelSettings.keywords.length > 0 + if (includesKeywords) { + // URL escape pipe(|) for the OR operator + getVideosByKeywordsRequestConfig.params.q = channelSettings.keywords.join('%7C') + } + + const shouldLoadNextPage = channelSettings.nextPageToken && channelSettings.nextPageToken != '' + if (shouldLoadNextPage) { + getVideosByKeywordsRequestConfig.params.pageToken = channelSettings.nextPageToken + } + + const response = (await axios(getVideosByKeywordsRequestConfig)).data + return response + } catch (err) { + // we've hit the limits for gettings anymore data or there's an error, anyways just log it + ctx.log.error( + { err, channelSettings }, + 'Error while using the youtube search api to get videos', + ) + throw err + } +} diff --git a/services/libs/integrations/src/integrations/youtube/generateStreams.ts b/services/libs/integrations/src/integrations/youtube/generateStreams.ts new file mode 100644 index 0000000000..11c8daf60b --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/generateStreams.ts @@ -0,0 +1,27 @@ +// generateStreams.ts content +import { YoutubeRootStream, YoutubeIntegrationSettings } from './types' +import { GenerateStreamsHandler } from '../../types' + +const handler: GenerateStreamsHandler = async (ctx) => { + const channelSettings = ctx.integration.settings as YoutubeIntegrationSettings + + if (channelSettings.keywords && channelSettings.keywords.length > 0) { + const searchQuery = channelSettings.keywords.join('|') + await ctx.publishStream( + `${YoutubeRootStream.KEYWORDS_SEARCH}:${searchQuery}`, + { + ...channelSettings, + keywords: channelSettings.keywords + } + ) + } else if (channelSettings.channelId) { + await ctx.publishStream( + `${YoutubeRootStream.UPLOADED_VIDEOS}:${channelSettings.channelId}`, + { + ...channelSettings + } + ) + } +} + +export default handler diff --git a/services/libs/integrations/src/integrations/youtube/grid.ts b/services/libs/integrations/src/integrations/youtube/grid.ts new file mode 100644 index 0000000000..7e1c2f45a8 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/grid.ts @@ -0,0 +1,10 @@ +// grid.ts content +import { IActivityScoringGrid } from '@crowd/types' +import { YoutubeActivityType } from './types' + +export const Youtube_GRID: Record = { + [YoutubeActivityType.COMMENT]: { + score: 6, + isContribution: true + } +} diff --git a/services/libs/integrations/src/integrations/youtube/index.ts b/services/libs/integrations/src/integrations/youtube/index.ts new file mode 100644 index 0000000000..59e4e00796 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/index.ts @@ -0,0 +1,22 @@ +// index.ts content +import { IIntegrationDescriptor } from '../../types' +import generateStreams from './generateStreams' +import { YOUTUBE_MEMBER_ATTRIBUTES } from './memberAttributes' +import processStream from './processStream' +import processData from './processData' +import { PlatformType } from '@crowd/types' + +const descriptor: IIntegrationDescriptor = { + type: PlatformType.YOUTUBE, + memberAttributes: YOUTUBE_MEMBER_ATTRIBUTES, + checkEvery: 60, + generateStreams, + processStream, + processData, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + postProcess: (settings: any) => { + return settings + }, +} + +export default descriptor diff --git a/services/libs/integrations/src/integrations/youtube/memberAttributes.ts b/services/libs/integrations/src/integrations/youtube/memberAttributes.ts new file mode 100644 index 0000000000..4968021270 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/memberAttributes.ts @@ -0,0 +1,24 @@ +// memberAttributes.ts content +import { + IMemberAttribute, + MemberAttributeName, + MemberAttributeType, + MemberAttributes, +} from '@crowd/types' + +export const YOUTUBE_MEMBER_ATTRIBUTES: IMemberAttribute[] = [ + { + name: MemberAttributes[MemberAttributeName.SOURCE_ID].name, + label: MemberAttributes[MemberAttributeName.SOURCE_ID].label, + type: MemberAttributeType.STRING, + canDelete: false, + show: false, + }, + { + name: MemberAttributes[MemberAttributeName.URL].name, + label: MemberAttributes[MemberAttributeName.URL].label, + type: MemberAttributeType.STRING, + canDelete: false, + show: true, + } +] diff --git a/services/libs/integrations/src/integrations/youtube/processData.ts b/services/libs/integrations/src/integrations/youtube/processData.ts new file mode 100644 index 0000000000..08333a730d --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/processData.ts @@ -0,0 +1,65 @@ +// processData.ts content +import {IActivityData, IMemberData, MemberAttributeName, PlatformType } from '@crowd/types' +import { YoutubeVideoStreamConfig, YoutubeComment, YoutubeActivityType, YoutubeCommentThreadSearch} from './types' +import { ProcessDataHandler, IProcessDataContext} from '../../types' +import { Youtube_GRID } from './grid' + +const handler: ProcessDataHandler = async (ctx) => { + const data = ctx.data as {video: YoutubeVideoStreamConfig, comments: YoutubeCommentThreadSearch} + const comments = data.comments + const video = data.video + + for (const comment of comments.items) { + const singleComment = comment.snippet.topLevelComment + await processComment(ctx, video, singleComment) + } +} + +async function processComment( + ctx: IProcessDataContext, + video: YoutubeVideoStreamConfig, + comment: YoutubeComment +) { + const member = getMember(comment) + const scoring = Youtube_GRID[YoutubeActivityType.COMMENT] + + const parentCommentUrl = `https://www.youtube.com/watch?v=${video.videoId}&lc=${comment.id}` + const activity: IActivityData = { + type: YoutubeActivityType.COMMENT, + timestamp: comment.snippet.publishedAt, + sourceId: comment.id, + score: scoring.score, + isContribution: scoring.isContribution, + body: comment.snippet.textOriginal, + url: parentCommentUrl, + attributes: { + videoUrl: `https://www.youtube.com/watch?v=${video.videoId}`, + videoTitle: video.title, + authorChannelUrl: comment.snippet.authorChannelUrl + }, + member + } + await ctx.publishActivity(activity) +} + +function getMember(comment: YoutubeComment): IMemberData { + const member: IMemberData = { + displayName: comment.snippet.authorDisplayName, + identities: [ + { + platform: PlatformType.YOUTUBE, + sourceId: comment.snippet.authorChannelId.value, + username: comment.snippet.authorChannelId.value + }, + ], + attributes: { + [MemberAttributeName.URL]: { + [PlatformType.YOUTUBE]: comment.snippet.authorChannelUrl, + }, + }, + } + + return member +} + +export default handler diff --git a/services/libs/integrations/src/integrations/youtube/processStream.ts b/services/libs/integrations/src/integrations/youtube/processStream.ts new file mode 100644 index 0000000000..5bca1ec556 --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/processStream.ts @@ -0,0 +1,74 @@ +// processStream.ts content +import { YoutubeRootStream, YoutubeIntegrationSettings, YoutubeVideoStreamConfig } from './types' +import { getVideos } from './api/videos' +import { getComments } from './api/comments' +import { getVideosByKeywords } from './api/videosByKeywords' +import { IProcessStreamContext, ProcessStreamHandler } from '../../types' + +const handler: ProcessStreamHandler = async (ctx) => { + if (ctx.stream.identifier.startsWith(YoutubeRootStream.UPLOADED_VIDEOS)) { + await handleChannelStream(ctx) + } else if (ctx.stream.identifier.startsWith(YoutubeRootStream.KEYWORDS_SEARCH)) { + await handleKeywordsSearchStream(ctx) + } else { + await handleVideoStream(ctx) + } +} + +async function handleChannelStream(ctx: IProcessStreamContext) { + const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + const videos = await getVideos(ctx) + + for (const video of videos.items) { + const videoId = video.id.videoId + + await ctx.publishStream(`${YoutubeRootStream.CHANNEL_VIDEO}:${videoId}`, { + title: video.snippet.title, + videoId, + apiKey: channelSettings.apiKey + }) + } + + const shouldLoadNextPage = videos.nextPageToken && videos.nextPageToken != '' + if (shouldLoadNextPage) { + await ctx.publishStream( + `${YoutubeRootStream.UPLOADED_VIDEOS}:${channelSettings.channelId}:${channelSettings.nextPageToken}`, + { + ...channelSettings, + nextPageToken: videos.nextPageToken + }, + ) + } +} + +async function handleKeywordsSearchStream(ctx: IProcessStreamContext) { + const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + const videos = await getVideosByKeywords(ctx) + + for (const video of videos.items) { + const videoId = video.id.videoId + + await ctx.publishStream(`${YoutubeRootStream.CHANNEL_VIDEO}:${videoId}`, { + title: video.snippet.title, + videoId, + apiKey: channelSettings.apiKey + }) + } +} + +async function handleVideoStream(ctx: IProcessStreamContext) { + const videoConfig = ctx.stream.data as YoutubeVideoStreamConfig + const comments = await getComments(ctx) + + await ctx.publishData({ video: videoConfig, comments: comments }) + + const shouldLoadNextPage = comments.nextPageToken && comments.nextPageToken != '' + if (shouldLoadNextPage) { + await ctx.publishStream(`${YoutubeRootStream.CHANNEL_VIDEO}:${videoConfig.videoId}:${comments.nextPageToken}`, { + ...videoConfig, + nextPageToken: comments.nextPageToken + }) + } +} + +export default handler diff --git a/services/libs/integrations/src/integrations/youtube/types.ts b/services/libs/integrations/src/integrations/youtube/types.ts new file mode 100644 index 0000000000..3966999e1e --- /dev/null +++ b/services/libs/integrations/src/integrations/youtube/types.ts @@ -0,0 +1,114 @@ +// types.ts content +export enum YoutubeActivityType { + COMMENT = 'comment', +} + +export enum YoutubeRootStream { + UPLOADED_VIDEOS = 'uploaded_videos', + CHANNEL_VIDEO = 'channel_video', + KEYWORDS_SEARCH = 'keywords_search' +} + +export interface YoutubeIntegrationSettings { + apiKey: string + uploadPlaylistId: string + channelId: string + nextPageToken?: string + keywords?: string[] +} + +export interface YoutubeVideoStreamConfig { + videoId: string + apiKey: string + title: string + nextPageToken?: string +} + +export interface YoutubeVideoSearch { + kind: string + etag: string + nextPageToken: string + regionCode: string + pageInfo: { + totalResults: number + resultsPerPage: number + } + items: YoutubeChannelSearchItem[] +} + +interface YoutubeChannelSearchItem { + kind: string + etag: string + id: { + kind: string + videoId: string + }, + snippet: { + publishedAt: string + channelId: string + title: string + description: string + thumbnails: { + default: YoutubeThumbnail + medium: YoutubeThumbnail + high: YoutubeThumbnail + }, + channelTitle: string + liveBroadcastContent: string + } +} + +interface YoutubeThumbnail { + url: string + width: number + height: number +} + + +export interface YoutubeCommentThreadSearch { + kind: string + etag: string + nextPageToken: string + pageInfo: { + totalResults: number + resultsPerPage: number + }, + items: YoutubeCommentThread[] +} + +export interface YoutubeCommentThread { + kind: string + etag: string + id: string + snippet: { + channelId?: string + videoId: string + topLevelComment: YoutubeComment + }, + canReply: boolean + totalReplyCount: number + isPublic: boolean +} + +export interface YoutubeComment { + kind: string + etag: string + id: string + snippet: { + authorDisplayName: string + authorProfileImageUrl: string + authorChannelUrl: string + authorChannelId: { + value: string + }, + channelId?: string + videoId: string + textDisplay: string + textOriginal: string + canRate: boolean + viewerRating: string + likeCount: number + publishedAt: string + updatedAt: string + } +} From bd609836affc5307255ee60850e69c248193cbc1 Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Wed, 23 Aug 2023 01:08:11 +0300 Subject: [PATCH 002/185] Remove useless comments --- frontend/src/modules/integration/integration-store.js | 3 --- 1 file changed, 3 deletions(-) diff --git a/frontend/src/modules/integration/integration-store.js b/frontend/src/modules/integration/integration-store.js index 179ddf328f..a435d80764 100644 --- a/frontend/src/modules/integration/integration-store.js +++ b/frontend/src/modules/integration/integration-store.js @@ -551,9 +551,6 @@ export default { { commit }, reqBody ) { - // Function to connect to Dev.to. We just need to store the - // users and organizations we want to track - try { commit('CREATE_STARTED'); From 659689b40924d7c3b5ba256db413a8033fabff96 Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Wed, 23 Aug 2023 01:33:18 +0300 Subject: [PATCH 003/185] fix typo --- services/libs/integrations/src/integrations/activityTypes.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/libs/integrations/src/integrations/activityTypes.ts b/services/libs/integrations/src/integrations/activityTypes.ts index 90ef6c8843..523bc0bc51 100644 --- a/services/libs/integrations/src/integrations/activityTypes.ts +++ b/services/libs/integrations/src/integrations/activityTypes.ts @@ -487,7 +487,7 @@ export const DEFAULT_ACTIVITY_TYPE_SETTINGS: DefaultActivityTypes = { 'commented on {attributes.videoTitle}', short: 'commented', channel: - '{attributes.videoTitle', + '{attributes.videoTitle}', }, isContribution: Youtube_GRID[YoutubeActivityType.COMMENT].isContribution, }, From f16e4d70e71aab53e442851f97065eb931bfd4db Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Wed, 23 Aug 2023 01:36:08 +0300 Subject: [PATCH 004/185] Fix more typos --- frontend/src/integrations/youtube/config.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/integrations/youtube/config.js b/frontend/src/integrations/youtube/config.js index 56fa066251..b36f9e6fcc 100644 --- a/frontend/src/integrations/youtube/config.js +++ b/frontend/src/integrations/youtube/config.js @@ -1,4 +1,4 @@ -import StackOverflowConnect from './components/youtube-connect.vue'; +import YoutubeConnect from './components/youtube-connect.vue'; export default { enabled: true, @@ -9,7 +9,7 @@ export default { 'Connect Youtube to sync comments based on channels or selected keywords.', image: 'https://cdn-icons-png.flaticon.com/512/174/174883.png', - connectComponent: StackOverflowConnect, + connectComponent: YoutubeConnect, url: (username) => `https://www.youtube.com/channel/${username}`, chartColor: '#FF9845', showProfileLink: true, From 4bc32b346736d3a91c4ac2d53fd10f9be7836dec Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Sat, 2 Sep 2023 19:28:00 +0300 Subject: [PATCH 005/185] Add more guards and fix issues --- backend/src/services/integrationService.ts | 16 ++++++++++------ .../components/youtube-connect-drawer.vue | 6 +++--- .../modules/integration/integration-service.js | 3 --- .../src/integrations/youtube/api/comments.ts | 3 +-- .../src/integrations/youtube/api/videos.ts | 5 ++--- .../integrations/youtube/api/videosByKeywords.ts | 10 ++++++---- .../src/integrations/youtube/generateStreams.ts | 10 ++++++++-- .../src/integrations/youtube/types.ts | 2 +- 8 files changed, 31 insertions(+), 24 deletions(-) diff --git a/backend/src/services/integrationService.ts b/backend/src/services/integrationService.ts index 2940b9c229..73d8f93289 100644 --- a/backend/src/services/integrationService.ts +++ b/backend/src/services/integrationService.ts @@ -1406,15 +1406,19 @@ export default class IntegrationService { async connectYoutube(integrationData) { this.options.log.info('Creating youtube integration!!') - let playlistId = null + let uploadPlaylistId = null let channelId = null - let keywords = null + let keywords = [] let shouldVerifyChannelId = true const isValidKeyword = integrationData.keywords && Array.isArray(integrationData.keywords) if (isValidKeyword && integrationData.keywords.length > 0) { keywords = integrationData.keywords shouldVerifyChannelId = false + + if (integrationData.keywords.length > 5) { + throw new Error400('The maximum number of keywords is 5') + } } if (shouldVerifyChannelId) { @@ -1432,9 +1436,9 @@ export default class IntegrationService { const response = (await axios(channelDetailsConfig)).data const channelDetails = response.items[0] channelId = channelDetails.id - playlistId = channelDetails.contentDetails.relatedPlaylists.uploads; + uploadPlaylistId = channelDetails.contentDetails.relatedPlaylists.uploads } catch (err) { - throw new Error400(`The channel id your provided channel : ${integrationData.channelId} or the api key : ${integrationData.apiKey} provided is not valid`) + throw new Error400(`The channel: ${integrationData.channelId} with the api key: ${integrationData.apiKey} is not valid`) } } @@ -1447,8 +1451,8 @@ export default class IntegrationService { platform: PlatformType.YOUTUBE, settings: { apiKey: integrationData.apiKey, - uploadPlaylistId: playlistId, - channelId: channelId, + uploadPlaylistId, + channelId, keywords }, status: 'in-progress', diff --git a/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue index 006716c430..bf58171585 100644 --- a/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue +++ b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue @@ -125,7 +125,7 @@ export default { handler(newValue) { if (!newValue && !this.isKeywordsEnabled) { this.isKeywordsEnabled = true - } else if (newValue != '') { + } else if (newValue !== '') { this.isKeywordsEnabled = false } } @@ -144,11 +144,11 @@ export default { return this.keywords.length > 0 }, connectDisabled() { - if (!this.apiKey || this.apiKey == "") { + if (!this.apiKey || this.apiKey === "") { return true; } - if (this.channelId && this.channelId != '') { + if (this.channelId && this.channelId !== '') { return false; } diff --git a/frontend/src/modules/integration/integration-service.js b/frontend/src/modules/integration/integration-service.js index 6d51bcd503..5258079cd9 100644 --- a/frontend/src/modules/integration/integration-service.js +++ b/frontend/src/modules/integration/integration-service.js @@ -350,10 +350,7 @@ export class IntegrationService { } static async youtubeConnect(reqBody) { - // Getting the tenant_id const tenantId = AuthCurrentTenant.get(); - - // Calling connect devto function in the backend. const response = await authAxios.post( `/tenant/${tenantId}/youtube-connect`, reqBody, diff --git a/services/libs/integrations/src/integrations/youtube/api/comments.ts b/services/libs/integrations/src/integrations/youtube/api/comments.ts index 684e48c152..9f9a0de3f0 100644 --- a/services/libs/integrations/src/integrations/youtube/api/comments.ts +++ b/services/libs/integrations/src/integrations/youtube/api/comments.ts @@ -15,7 +15,7 @@ export const getComments = async (ctx: IProcessStreamContext): Promise => { +export const getVideosByKeywords = async ( + ctx: IProcessStreamContext +): Promise => { const channelSettings = ctx.stream.data as YoutubeIntegrationSettings const now = new Date() - const publishedAfter = new Date(now.getTime() - (1000 * 60 * 60 * 24 * 30)) // 30 days + const publishedAfter = new Date(now.getTime() - 1000 * 60 * 60 * 24 * 30) // 30 days try { const getVideosByKeywordsRequestConfig: AxiosRequestConfig = { @@ -18,8 +20,8 @@ export const getVideosByKeywords = async (ctx: IProcessStreamContext): Promise 0 diff --git a/services/libs/integrations/src/integrations/youtube/generateStreams.ts b/services/libs/integrations/src/integrations/youtube/generateStreams.ts index 11c8daf60b..3add232514 100644 --- a/services/libs/integrations/src/integrations/youtube/generateStreams.ts +++ b/services/libs/integrations/src/integrations/youtube/generateStreams.ts @@ -5,13 +5,17 @@ import { GenerateStreamsHandler } from '../../types' const handler: GenerateStreamsHandler = async (ctx) => { const channelSettings = ctx.integration.settings as YoutubeIntegrationSettings + if (!channelSettings.apiKey) { + await ctx.abortRunWithError('Api key must be provided') + return + } + if (channelSettings.keywords && channelSettings.keywords.length > 0) { const searchQuery = channelSettings.keywords.join('|') await ctx.publishStream( `${YoutubeRootStream.KEYWORDS_SEARCH}:${searchQuery}`, { - ...channelSettings, - keywords: channelSettings.keywords + ...channelSettings } ) } else if (channelSettings.channelId) { @@ -21,6 +25,8 @@ const handler: GenerateStreamsHandler = async (ctx) => { ...channelSettings } ) + } else { + await ctx.abortRunWithError('No channel id or keywords provided') } } diff --git a/services/libs/integrations/src/integrations/youtube/types.ts b/services/libs/integrations/src/integrations/youtube/types.ts index 3966999e1e..6c1d889b6e 100644 --- a/services/libs/integrations/src/integrations/youtube/types.ts +++ b/services/libs/integrations/src/integrations/youtube/types.ts @@ -14,7 +14,7 @@ export interface YoutubeIntegrationSettings { uploadPlaylistId: string channelId: string nextPageToken?: string - keywords?: string[] + keywords: string[] } export interface YoutubeVideoStreamConfig { From 982a78cb48f688297803a95eadb30f15c4db090e Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Mon, 11 Sep 2023 20:06:12 +0300 Subject: [PATCH 006/185] add a maximum number of pages to fetch --- .../src/integrations/youtube/api/videos.ts | 4 +-- .../youtube/api/videosByKeywords.ts | 4 +-- .../integrations/youtube/generateStreams.ts | 16 ++++----- .../src/integrations/youtube/index.ts | 2 +- .../src/integrations/youtube/processStream.ts | 36 +++++++++++++++---- .../src/integrations/youtube/types.ts | 4 +-- 6 files changed, 43 insertions(+), 23 deletions(-) diff --git a/services/libs/integrations/src/integrations/youtube/api/videos.ts b/services/libs/integrations/src/integrations/youtube/api/videos.ts index 25e0f9f3ee..1d0394ece0 100644 --- a/services/libs/integrations/src/integrations/youtube/api/videos.ts +++ b/services/libs/integrations/src/integrations/youtube/api/videos.ts @@ -1,9 +1,9 @@ import axios, { AxiosRequestConfig } from 'axios' -import { YoutubeIntegrationSettings, YoutubeVideoSearch } from '../types' +import { YoutubeIntegrationStreamConfig, YoutubeVideoSearch } from '../types' import { IProcessStreamContext } from '@/types' export const getVideos = async (ctx: IProcessStreamContext): Promise => { - const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + const channelSettings = ctx.stream.data as YoutubeIntegrationStreamConfig try { const getChannelVideosConfig: AxiosRequestConfig = { diff --git a/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts b/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts index 3bbf656520..d4e04570ec 100644 --- a/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts +++ b/services/libs/integrations/src/integrations/youtube/api/videosByKeywords.ts @@ -1,11 +1,11 @@ import axios, { AxiosRequestConfig } from 'axios' -import { YoutubeIntegrationSettings, YoutubeVideoSearch } from '../types' +import { YoutubeIntegrationStreamConfig, YoutubeVideoSearch } from '../types' import { IProcessStreamContext } from '@/types' export const getVideosByKeywords = async ( ctx: IProcessStreamContext ): Promise => { - const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + const channelSettings = ctx.stream.data as YoutubeIntegrationStreamConfig const now = new Date() const publishedAfter = new Date(now.getTime() - 1000 * 60 * 60 * 24 * 30) // 30 days diff --git a/services/libs/integrations/src/integrations/youtube/generateStreams.ts b/services/libs/integrations/src/integrations/youtube/generateStreams.ts index 3add232514..b8e02b50b0 100644 --- a/services/libs/integrations/src/integrations/youtube/generateStreams.ts +++ b/services/libs/integrations/src/integrations/youtube/generateStreams.ts @@ -1,9 +1,9 @@ // generateStreams.ts content -import { YoutubeRootStream, YoutubeIntegrationSettings } from './types' +import { YoutubeRootStream, YoutubeIntegrationStreamConfig } from './types' import { GenerateStreamsHandler } from '../../types' const handler: GenerateStreamsHandler = async (ctx) => { - const channelSettings = ctx.integration.settings as YoutubeIntegrationSettings + const channelSettings = ctx.integration.settings as YoutubeIntegrationStreamConfig if (!channelSettings.apiKey) { await ctx.abortRunWithError('Api key must be provided') @@ -13,17 +13,13 @@ const handler: GenerateStreamsHandler = async (ctx) => { if (channelSettings.keywords && channelSettings.keywords.length > 0) { const searchQuery = channelSettings.keywords.join('|') await ctx.publishStream( - `${YoutubeRootStream.KEYWORDS_SEARCH}:${searchQuery}`, - { - ...channelSettings - } + `${YoutubeRootStream.KEYWORDS_SEARCH}:${searchQuery}`, + channelSettings ) - } else if (channelSettings.channelId) { + } else if (channelSettings.channelId != '') { await ctx.publishStream( `${YoutubeRootStream.UPLOADED_VIDEOS}:${channelSettings.channelId}`, - { - ...channelSettings - } + channelSettings ) } else { await ctx.abortRunWithError('No channel id or keywords provided') diff --git a/services/libs/integrations/src/integrations/youtube/index.ts b/services/libs/integrations/src/integrations/youtube/index.ts index 59e4e00796..c8b1c72ce1 100644 --- a/services/libs/integrations/src/integrations/youtube/index.ts +++ b/services/libs/integrations/src/integrations/youtube/index.ts @@ -9,7 +9,7 @@ import { PlatformType } from '@crowd/types' const descriptor: IIntegrationDescriptor = { type: PlatformType.YOUTUBE, memberAttributes: YOUTUBE_MEMBER_ATTRIBUTES, - checkEvery: 60, + checkEvery: 360, generateStreams, processStream, processData, diff --git a/services/libs/integrations/src/integrations/youtube/processStream.ts b/services/libs/integrations/src/integrations/youtube/processStream.ts index 5bca1ec556..517923e215 100644 --- a/services/libs/integrations/src/integrations/youtube/processStream.ts +++ b/services/libs/integrations/src/integrations/youtube/processStream.ts @@ -1,22 +1,32 @@ // processStream.ts content -import { YoutubeRootStream, YoutubeIntegrationSettings, YoutubeVideoStreamConfig } from './types' +import { YoutubeRootStream, YoutubeIntegrationStreamConfig, YoutubeVideoStreamConfig } from './types' import { getVideos } from './api/videos' import { getComments } from './api/comments' import { getVideosByKeywords } from './api/videosByKeywords' import { IProcessStreamContext, ProcessStreamHandler } from '../../types' const handler: ProcessStreamHandler = async (ctx) => { + const isChannelVideoStream = ctx.stream.identifier.startsWith(YoutubeRootStream.CHANNEL_VIDEO) + // if it's keywords search stream or videos stream make sure to fetch at most 40 pages + if (!isChannelVideoStream) { + const channelSettings = ctx.stream.data as YoutubeIntegrationStreamConfig + if (Number(channelSettings.page) >= 41) { + await ctx.log.info(`Finished fetching the maximum number of youtube pages for ${channelSettings.apiKey} at ${Date.now()}`) + return + } + } + if (ctx.stream.identifier.startsWith(YoutubeRootStream.UPLOADED_VIDEOS)) { await handleChannelStream(ctx) } else if (ctx.stream.identifier.startsWith(YoutubeRootStream.KEYWORDS_SEARCH)) { await handleKeywordsSearchStream(ctx) - } else { + } else if (isChannelVideoStream) { await handleVideoStream(ctx) } } async function handleChannelStream(ctx: IProcessStreamContext) { - const channelSettings = ctx.stream.data as YoutubeIntegrationSettings + const channelSettings = ctx.stream.data as YoutubeIntegrationStreamConfig const videos = await getVideos(ctx) for (const video of videos.items) { @@ -32,9 +42,10 @@ async function handleChannelStream(ctx: IProcessStreamContext) { const shouldLoadNextPage = videos.nextPageToken && videos.nextPageToken != '' if (shouldLoadNextPage) { await ctx.publishStream( - `${YoutubeRootStream.UPLOADED_VIDEOS}:${channelSettings.channelId}:${channelSettings.nextPageToken}`, + `${YoutubeRootStream.UPLOADED_VIDEOS}:${channelSettings.channelId}:${videos.nextPageToken}`, { ...channelSettings, + page: channelSettings.page + 1, nextPageToken: videos.nextPageToken }, ) @@ -42,9 +53,9 @@ async function handleChannelStream(ctx: IProcessStreamContext) { } async function handleKeywordsSearchStream(ctx: IProcessStreamContext) { - const channelSettings = ctx.stream.data as YoutubeIntegrationSettings - const videos = await getVideosByKeywords(ctx) + const channelSettings = ctx.stream.data as YoutubeIntegrationStreamConfig + const videos = await getVideosByKeywords(ctx) for (const video of videos.items) { const videoId = video.id.videoId @@ -54,6 +65,19 @@ async function handleKeywordsSearchStream(ctx: IProcessStreamContext) { apiKey: channelSettings.apiKey }) } + + const shouldLoadNextPage = videos.nextPageToken && videos.nextPageToken != '' + if (shouldLoadNextPage) { + const searchQuery = channelSettings.keywords.join('|') + await ctx.publishStream( + `${YoutubeRootStream.KEYWORDS_SEARCH}:${searchQuery}:${videos.nextPageToken}`, + { + ...channelSettings, + page: channelSettings.page + 1, + nextPageToken: videos.nextPageToken + } + ) + } } async function handleVideoStream(ctx: IProcessStreamContext) { diff --git a/services/libs/integrations/src/integrations/youtube/types.ts b/services/libs/integrations/src/integrations/youtube/types.ts index 6c1d889b6e..1f78e22a3e 100644 --- a/services/libs/integrations/src/integrations/youtube/types.ts +++ b/services/libs/integrations/src/integrations/youtube/types.ts @@ -9,12 +9,12 @@ export enum YoutubeRootStream { KEYWORDS_SEARCH = 'keywords_search' } -export interface YoutubeIntegrationSettings { +export interface YoutubeIntegrationStreamConfig { apiKey: string - uploadPlaylistId: string channelId: string nextPageToken?: string keywords: string[] + page: number } export interface YoutubeVideoStreamConfig { From a951cc91e6f74155417882f72d3d3070287b8dcd Mon Sep 17 00:00:00 2001 From: Fahmi Bnchi Date: Mon, 11 Sep 2023 20:13:27 +0300 Subject: [PATCH 007/185] fix formating issues --- backend/src/services/integrationService.ts | 24 +++++----- .../components/youtube-connect-drawer.vue | 48 +++++++++---------- .../integration/integration-service.js | 4 +- .../modules/integration/integration-store.js | 2 +- 4 files changed, 39 insertions(+), 39 deletions(-) diff --git a/backend/src/services/integrationService.ts b/backend/src/services/integrationService.ts index d89e05d896..70361bd29e 100644 --- a/backend/src/services/integrationService.ts +++ b/backend/src/services/integrationService.ts @@ -1516,19 +1516,18 @@ export default class IntegrationService { async connectYoutube(integrationData) { this.options.log.info('Creating youtube integration!!') - let uploadPlaylistId = null - let channelId = null + let channelId = '' let keywords = [] let shouldVerifyChannelId = true const isValidKeyword = integrationData.keywords && Array.isArray(integrationData.keywords) if (isValidKeyword && integrationData.keywords.length > 0) { - keywords = integrationData.keywords - shouldVerifyChannelId = false - if (integrationData.keywords.length > 5) { throw new Error400('The maximum number of keywords is 5') } + + keywords = integrationData.keywords + shouldVerifyChannelId = false } if (shouldVerifyChannelId) { @@ -1536,19 +1535,20 @@ export default class IntegrationService { const channelDetailsConfig: AxiosRequestConfig = { method: 'get', url: `https://www.googleapis.com/youtube/v3/channels`, - params: { + params: { key: integrationData.apiKey, id: integrationData.channelId, part: 'contentDetails', - } + }, } const response = (await axios(channelDetailsConfig)).data - const channelDetails = response.items[0] + const channelDetails = response.items[0] channelId = channelDetails.id - uploadPlaylistId = channelDetails.contentDetails.relatedPlaylists.uploads } catch (err) { - throw new Error400(`The channel: ${integrationData.channelId} with the api key: ${integrationData.apiKey} is not valid`) + throw new Error400( + `The channel: ${integrationData.channelId} with the api key: ${integrationData.apiKey} is not valid`, + ) } } @@ -1561,9 +1561,9 @@ export default class IntegrationService { platform: PlatformType.YOUTUBE, settings: { apiKey: integrationData.apiKey, - uploadPlaylistId, channelId, - keywords + keywords, + page: 1, }, status: 'in-progress', }, diff --git a/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue index bf58171585..819a2ede25 100644 --- a/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue +++ b/frontend/src/integrations/youtube/components/youtube-connect-drawer.vue @@ -22,7 +22,7 @@ > lorem ipsum
- +
@@ -39,9 +39,9 @@ - lorem ipsum
+ lorem ipsum
- +
@@ -55,7 +55,7 @@
-
+
Keywords Search 0 + return this.keywords.length > 0; }, connectDisabled() { - if (!this.apiKey || this.apiKey === "") { + if (!this.apiKey || this.apiKey === '') { return true; } @@ -155,11 +144,22 @@ export default { const validKeywords = this.keywords.filter((k) => !!k); const empty = validKeywords.length === 0; if (this.integration.settings && !empty) { - return validKeywords.length === this.integration.settings.keywords.length + return validKeywords.length === this.integration.settings.keywords.length; } return empty; }, }, + watch: { + channelId: { + handler(newValue) { + if (!newValue && !this.isKeywordsEnabled) { + this.isKeywordsEnabled = true; + } else if (newValue !== '') { + this.isKeywordsEnabled = false; + } + }, + }, + }, methods: { ...mapActions({ doYoutubeConnect: 'integration/doYoutubeConnect', @@ -176,14 +176,14 @@ export default { const youtubeConnectReq = {}; if (this.isKeywordsEnabled) { - youtubeConnectReq.channelId = null - youtubeConnectReq.keywords = relevantKeywords + youtubeConnectReq.channelId = null; + youtubeConnectReq.keywords = relevantKeywords; } else if (this.channelId) { - youtubeConnectReq.channelId = this.channelId - youtubeConnectReq.keywords = [] + youtubeConnectReq.channelId = this.channelId; + youtubeConnectReq.keywords = []; } - youtubeConnectReq.apiKey = this.apiKey + youtubeConnectReq.apiKey = this.apiKey; await this.doYoutubeConnect(youtubeConnectReq); this.isVisible = false; this.loading = false; diff --git a/frontend/src/modules/integration/integration-service.js b/frontend/src/modules/integration/integration-service.js index c18d98b6ea..c335c19e98 100644 --- a/frontend/src/modules/integration/integration-service.js +++ b/frontend/src/modules/integration/integration-service.js @@ -392,11 +392,11 @@ export class IntegrationService { return response.data; } - + static async youtubeConnect(reqBody) { const tenantId = AuthCurrentTenant.get(); const response = await authAxios.post( - `/tenant/${tenantId}/youtube-connect`, + `/tenant/${tenantId}/youtube-connect`, reqBody, ); diff --git a/frontend/src/modules/integration/integration-store.js b/frontend/src/modules/integration/integration-store.js index 027d669203..32ae3561d8 100644 --- a/frontend/src/modules/integration/integration-store.js +++ b/frontend/src/modules/integration/integration-store.js @@ -593,7 +593,7 @@ export default { async doYoutubeConnect( { commit }, - reqBody + reqBody, ) { try { commit('CREATE_STARTED'); From c894429ecc518130edb7ffe9d6623980fa87c3da Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Mon, 11 Dec 2023 09:30:42 +0100 Subject: [PATCH 008/185] init temporal search attribute (#1943) --- scripts/scaffold/temporal/Dockerfile | 4 +++- scripts/scaffold/temporal/entrypoint.sh | 27 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100755 scripts/scaffold/temporal/entrypoint.sh diff --git a/scripts/scaffold/temporal/Dockerfile b/scripts/scaffold/temporal/Dockerfile index c37f6b7186..bd4d2f4568 100755 --- a/scripts/scaffold/temporal/Dockerfile +++ b/scripts/scaffold/temporal/Dockerfile @@ -7,4 +7,6 @@ ENV PATH /root/.temporalio/bin:$PATH EXPOSE 7233 8233 -ENTRYPOINT ["temporal", "server", "start-dev", "--ip", "0.0.0.0"] +COPY ./entrypoint.sh /entrypoint.sh + +ENTRYPOINT [ "/entrypoint.sh" ] diff --git a/scripts/scaffold/temporal/entrypoint.sh b/scripts/scaffold/temporal/entrypoint.sh new file mode 100755 index 0000000000..9891759210 --- /dev/null +++ b/scripts/scaffold/temporal/entrypoint.sh @@ -0,0 +1,27 @@ +#!/bin/sh + +temporal server start-dev --ip 0.0.0.0 & + +# Function to check if Temporal is ready +check_temporal_ready() { + # Execute the Temporal health check command + temporal operator cluster health + + # Return the exit status of the health check command + return $? +} + +# Wait for Temporal server to be ready +echo "Waiting for Temporal server to be ready..." +until check_temporal_ready; do + printf '.' + sleep 1 +done +echo "Temporal server is ready." + +# Run Temporal setup command +temporal operator search-attribute create --name TenantId --type Text --namespace default + +# Keep the container running after setup +# (This could be tailing logs or just a sleep loop) +tail -f /dev/null From 8e4120e81c952dfc2945227a52dd92bb022c3466 Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Mon, 11 Dec 2023 13:15:36 +0000 Subject: [PATCH 009/185] Support incident status in public status endpoint (#1945) --- .../modules/layout/components/system-status/system-status.vue | 4 ++++ frontend/src/modules/layout/types/SystemStatus.ts | 1 + 2 files changed, 5 insertions(+) diff --git a/frontend/src/modules/layout/components/system-status/system-status.vue b/frontend/src/modules/layout/components/system-status/system-status.vue index 44d29c074d..21810ee346 100644 --- a/frontend/src/modules/layout/components/system-status/system-status.vue +++ b/frontend/src/modules/layout/components/system-status/system-status.vue @@ -73,6 +73,10 @@ const StatusDisplay = { label: 'Under Maintenance', color: 'bg-gray-500', }, + [Status.Incident]: { + label: 'Incident', + color: 'bg-yellow-500', + }, } as const; const label = computed(() => { diff --git a/frontend/src/modules/layout/types/SystemStatus.ts b/frontend/src/modules/layout/types/SystemStatus.ts index 08f6e14f45..c08622feeb 100644 --- a/frontend/src/modules/layout/types/SystemStatus.ts +++ b/frontend/src/modules/layout/types/SystemStatus.ts @@ -5,4 +5,5 @@ export enum Status { MajorOutage = 'major_outage', UnderMaintenance = 'under_maintenance', // currently not in use Unknown = 'unknown', + Incident = 'incident', } From 61d83022ee9b6a3d7eacc170c07c4b62ed1122bf Mon Sep 17 00:00:00 2001 From: Mish Savelyev <1564970+sausage-todd@users.noreply.github.com> Date: Mon, 11 Dec 2023 16:18:57 +0100 Subject: [PATCH 010/185] Cube MVs tenant indexes (#1946) --- .../R__cubejs-materialized-views.sql | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/backend/src/database/migrations/R__cubejs-materialized-views.sql b/backend/src/database/migrations/R__cubejs-materialized-views.sql index efd002d274..280a7dc0f0 100644 --- a/backend/src/database/migrations/R__cubejs-materialized-views.sql +++ b/backend/src/database/migrations/R__cubejs-materialized-views.sql @@ -1,3 +1,4 @@ +-- Members DROP MATERIALIZED VIEW IF EXISTS mv_members_cube; CREATE MATERIALIZED VIEW IF NOT EXISTS mv_members_cube AS SELECT @@ -13,6 +14,11 @@ SELECT FROM members m ; +CREATE INDEX IF NOT EXISTS mv_members_cube_tenant ON mv_members_cube ("tenantId"); +CREATE UNIQUE INDEX IF NOT EXISTS mv_members_cube_id ON mv_members_cube (id); + + +-- Activities DROP MATERIALIZED VIEW IF EXISTS mv_activities_cube; CREATE MATERIALIZED VIEW IF NOT EXISTS mv_activities_cube AS SELECT @@ -37,6 +43,13 @@ FROM activities a WHERE a."deletedAt" IS NULL ; +CREATE INDEX IF NOT EXISTS mv_activities_cube_timestamp ON mv_activities_cube (timestamp); +CREATE INDEX IF NOT EXISTS mv_activities_cube_org_id ON mv_activities_cube ("organizationId"); +CREATE UNIQUE INDEX IF NOT EXISTS mv_activities_cube_id ON mv_activities_cube (id); +CREATE INDEX IF NOT EXISTS mv_activities_cube_tenantId_timestamp_idx ON mv_activities_cube ("tenantId", "timestamp"); + + +-- Organizations DROP MATERIALIZED VIEW IF EXISTS mv_organizations_cube; CREATE MATERIALIZED VIEW IF NOT EXISTS mv_organizations_cube AS SELECT @@ -51,6 +64,11 @@ JOIN activities a ON o.id = a."organizationId" GROUP BY o.id ; +CREATE UNIQUE INDEX IF NOT EXISTS mv_organizations_cube_id ON mv_organizations_cube (id); +CREATE INDEX IF NOT EXISTS mv_organizations_cube_tenantId ON mv_organizations_cube ("tenantId"); + + +-- Segments DROP MATERIALIZED VIEW IF EXISTS mv_segments_cube; CREATE MATERIALIZED VIEW IF NOT EXISTS mv_segments_cube AS SELECT @@ -59,11 +77,4 @@ SELECT FROM segments ; -CREATE INDEX IF NOT EXISTS mv_members_cube_tenant ON mv_members_cube ("tenantId"); -CREATE INDEX IF NOT EXISTS mv_activities_cube_timestamp ON mv_activities_cube (timestamp); -CREATE INDEX IF NOT EXISTS mv_activities_cube_org_id ON mv_activities_cube ("organizationId"); - -CREATE UNIQUE INDEX IF NOT EXISTS mv_members_cube_id ON mv_members_cube (id); -CREATE UNIQUE INDEX IF NOT EXISTS mv_activities_cube_id ON mv_activities_cube (id); -CREATE UNIQUE INDEX IF NOT EXISTS mv_organizations_cube_id ON mv_organizations_cube (id); CREATE UNIQUE INDEX IF NOT EXISTS mv_segments_cube_id ON mv_segments_cube (id); From 54aff565e2e2b03046b06b149282d056394632d5 Mon Sep 17 00:00:00 2001 From: Yeganathan S Date: Mon, 11 Dec 2023 16:22:16 +0000 Subject: [PATCH 011/185] Prevent overwrite of dynamic attributes on member merge (#1937) --- backend/src/services/memberService.ts | 32 +++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/backend/src/services/memberService.ts b/backend/src/services/memberService.ts index 4e0d04cac7..f4ea614c10 100644 --- a/backend/src/services/memberService.ts +++ b/backend/src/services/memberService.ts @@ -791,6 +791,8 @@ export default class MemberService extends LoggerBase { return toKeep }, + attributes: (oldAttributes, newAttributes) => + MemberService.safeMerge(oldAttributes, newAttributes), }) } @@ -1261,4 +1263,34 @@ export default class MemberService extends LoggerBase { out.total = lodash.sum(Object.values(out)) return out } + + /** + * Merges two objects, preserving non-null values in the original object. + * + * @param originalObject - The original object + * @param newObject - The object to merge into the original + * @returns The merged object + */ + static safeMerge(originalObject: any, newObject: any) { + const mergeCustomizer = (originalValue, newValue) => { + // Merge arrays, removing duplicates + if (lodash.isArray(originalValue)) { + return lodash.unionWith(originalValue, newValue, lodash.isEqual) + } + + // Recursively merge nested objects + if (lodash.isPlainObject(originalValue)) { + return lodash.mergeWith({}, originalValue, newValue, mergeCustomizer) + } + + // Preserve original non-null or non-empty values + if (newValue === null || (originalValue !== null && originalValue !== '')) { + return originalValue + } + + return undefined + } + + return lodash.mergeWith({}, originalObject, newObject, mergeCustomizer) + } } From b43a1f69cb16fa6c9b28fbbdff7b5b81e101a32c Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Tue, 12 Dec 2023 07:51:38 +0100 Subject: [PATCH 012/185] Bugfix/manual org changes overridden by enrichment c 2903 (#1944) --- .../api/organization/organizationUpdate.ts | 8 +- .../U1702035391__track-manual-org-changes.sql | 2 + .../V1702035391__track-manual-org-changes.sql | 2 + backend/src/database/models/organization.ts | 5 + .../__tests__/organizationRepository.test.ts | 4 + .../repositories/organizationRepository.ts | 156 ++++++++++++------ .../services/__tests__/memberService.test.ts | 3 + backend/src/services/organizationService.ts | 16 +- .../src/repo/organization.repo.ts | 18 ++ 9 files changed, 160 insertions(+), 54 deletions(-) create mode 100644 backend/src/database/migrations/U1702035391__track-manual-org-changes.sql create mode 100644 backend/src/database/migrations/V1702035391__track-manual-org-changes.sql diff --git a/backend/src/api/organization/organizationUpdate.ts b/backend/src/api/organization/organizationUpdate.ts index 2bfed8211e..42509dd842 100644 --- a/backend/src/api/organization/organizationUpdate.ts +++ b/backend/src/api/organization/organizationUpdate.ts @@ -21,7 +21,13 @@ import PermissionChecker from '../../services/user/permissionChecker' export default async (req, res) => { new PermissionChecker(req).validateHas(Permissions.values.organizationEdit) - const payload = await new OrganizationService(req).update(req.params.id, req.body, true) + const payload = await new OrganizationService(req).update( + req.params.id, + req.body, + true, + true, + true, + ) await req.responseHandler.success(req, res, payload) } diff --git a/backend/src/database/migrations/U1702035391__track-manual-org-changes.sql b/backend/src/database/migrations/U1702035391__track-manual-org-changes.sql new file mode 100644 index 0000000000..da2fada1db --- /dev/null +++ b/backend/src/database/migrations/U1702035391__track-manual-org-changes.sql @@ -0,0 +1,2 @@ +alter table organizations + drop column "manuallyChangedFields"; diff --git a/backend/src/database/migrations/V1702035391__track-manual-org-changes.sql b/backend/src/database/migrations/V1702035391__track-manual-org-changes.sql new file mode 100644 index 0000000000..72653660ae --- /dev/null +++ b/backend/src/database/migrations/V1702035391__track-manual-org-changes.sql @@ -0,0 +1,2 @@ +alter table organizations + add column "manuallyChangedFields" text[] null; diff --git a/backend/src/database/models/organization.ts b/backend/src/database/models/organization.ts index f9002bcd3e..9dfae4b65a 100644 --- a/backend/src/database/models/organization.ts +++ b/backend/src/database/models/organization.ts @@ -221,6 +221,11 @@ export default (sequelize) => { type: DataTypes.JSONB, allowNull: true, }, + manuallyChangedFields: { + type: DataTypes.ARRAY(DataTypes.TEXT), + allowNull: true, + default: [], + }, }, { indexes: [ diff --git a/backend/src/database/repositories/__tests__/organizationRepository.test.ts b/backend/src/database/repositories/__tests__/organizationRepository.test.ts index fa36bc6878..36a66f3c99 100644 --- a/backend/src/database/repositories/__tests__/organizationRepository.test.ts +++ b/backend/src/database/repositories/__tests__/organizationRepository.test.ts @@ -227,6 +227,7 @@ describe('OrganizationRepository tests', () => { isTeamOrganization: false, attributes: {}, weakIdentities: [], + manuallyChangedFields: null, } expect(organizationCreated).toStrictEqual(expectedOrganizationCreated) }) @@ -307,6 +308,7 @@ describe('OrganizationRepository tests', () => { isTeamOrganization: false, attributes: {}, weakIdentities: [], + manuallyChangedFields: null, } expect(organizationCreated).toStrictEqual(expectedOrganizationCreated) @@ -364,6 +366,7 @@ describe('OrganizationRepository tests', () => { isTeamOrganization: false, attributes: {}, weakIdentities: [], + manuallyChangedFields: null, } const organizationById = await OrganizationRepository.findById( organizationCreated.id, @@ -1232,6 +1235,7 @@ describe('OrganizationRepository tests', () => { isTeamOrganization: false, attributes: {}, weakIdentities: [], + manuallyChangedFields: [], } expect(organizationUpdated).toStrictEqual(organizationExpected) diff --git a/backend/src/database/repositories/organizationRepository.ts b/backend/src/database/repositories/organizationRepository.ts index ad478a345f..089d983b40 100644 --- a/backend/src/database/repositories/organizationRepository.ts +++ b/backend/src/database/repositories/organizationRepository.ts @@ -517,7 +517,64 @@ class OrganizationRepository { }) } - static async update(id, data, options: IRepositoryOptions, overrideIdentities = false) { + static ORGANIZATION_UPDATE_COLUMNS = [ + 'displayName', + 'description', + 'emails', + 'phoneNumbers', + 'logo', + 'tags', + 'website', + 'location', + 'github', + 'twitter', + 'linkedin', + 'crunchbase', + 'employees', + 'revenueRange', + 'importHash', + 'isTeamOrganization', + 'employeeCountByCountry', + 'type', + 'ticker', + 'headline', + 'profiles', + 'naics', + 'industry', + 'founded', + 'size', + 'employees', + 'twitter', + 'lastEnrichedAt', + 'affiliatedProfiles', + 'allSubsidiaries', + 'alternativeDomains', + 'alternativeNames', + 'averageEmployeeTenure', + 'averageTenureByLevel', + 'averageTenureByRole', + 'directSubsidiaries', + 'employeeChurnRate', + 'employeeCountByMonth', + 'employeeGrowthRate', + 'employeeCountByMonthByLevel', + 'employeeCountByMonthByRole', + 'gicsSector', + 'grossAdditionsByMonth', + 'grossDeparturesByMonth', + 'ultimateParent', + 'immediateParent', + 'attributes', + 'weakIdentities', + ] + + static async update( + id, + data, + options: IRepositoryOptions, + overrideIdentities = false, + manualChange = false, + ) { const currentUser = SequelizeRepository.getCurrentUser(options) const transaction = SequelizeRepository.getTransaction(options) @@ -541,59 +598,56 @@ class OrganizationRepository { delete data.attributes.syncRemote } + if (manualChange) { + const manuallyChangedFields: string[] = record.manuallyChangedFields || [] + + for (const column of this.ORGANIZATION_UPDATE_COLUMNS) { + let changed = false + + // only check fields that are in the data object that will be updated + if (column in data) { + if ( + record[column] !== null && + column in data && + (data[column] === null || data[column] === undefined) + ) { + // column was removed in the update -> will be set to null by sequelize + changed = true + } else if ( + record[column] === null && + data[column] !== null && + data[column] !== undefined + ) { + // column was null before now it's not anymore + changed = true + } else if (record[column] !== data[column]) { + // column value has changed + changed = true + } + } + + if (changed && !manuallyChangedFields.includes(column)) { + manuallyChangedFields.push(column) + } + } + + data.manuallyChangedFields = manuallyChangedFields + } else { + // ignore columns that were manually changed + // by rewriting them with db data + const manuallyChangedFields: string[] = record.manuallyChangedFields || [] + for (const manuallyChangedColumn of manuallyChangedFields) { + data[manuallyChangedColumn] = record[manuallyChangedColumn] + } + + data.manuallyChangedFields = manuallyChangedFields + } + record = await record.update( { - ...lodash.pick(data, [ - 'displayName', - 'description', - 'emails', - 'phoneNumbers', - 'logo', - 'tags', - 'website', - 'location', - 'github', - 'twitter', - 'linkedin', - 'crunchbase', - 'employees', - 'revenueRange', - 'importHash', - 'isTeamOrganization', - 'employeeCountByCountry', - 'type', - 'ticker', - 'headline', - 'profiles', - 'naics', - 'industry', - 'founded', - 'size', - 'employees', - 'twitter', - 'lastEnrichedAt', - 'affiliatedProfiles', - 'allSubsidiaries', - 'alternativeDomains', - 'alternativeNames', - 'averageEmployeeTenure', - 'averageTenureByLevel', - 'averageTenureByRole', - 'directSubsidiaries', - 'employeeChurnRate', - 'employeeCountByMonth', - 'employeeGrowthRate', - 'employeeCountByMonthByLevel', - 'employeeCountByMonthByRole', - 'gicsSector', - 'grossAdditionsByMonth', - 'grossDeparturesByMonth', - 'ultimateParent', - 'immediateParent', - 'attributes', - 'weakIdentities', - ]), + ...lodash.pick(data, this.ORGANIZATION_UPDATE_COLUMNS), updatedById: currentUser.id, + manuallyChangedFields: data.manuallyChangedFields, }, { transaction, diff --git a/backend/src/services/__tests__/memberService.test.ts b/backend/src/services/__tests__/memberService.test.ts index e17404e46d..6860446ea7 100644 --- a/backend/src/services/__tests__/memberService.test.ts +++ b/backend/src/services/__tests__/memberService.test.ts @@ -593,6 +593,7 @@ describe('MemberService tests', () => { grossDeparturesByMonth: null, ultimateParent: null, immediateParent: null, + manuallyChangedFields: null, }) }) @@ -686,6 +687,7 @@ describe('MemberService tests', () => { grossDeparturesByMonth: null, ultimateParent: null, immediateParent: null, + manuallyChangedFields: null, }) }) @@ -738,6 +740,7 @@ describe('MemberService tests', () => { emails: null, phoneNumbers: null, logo: null, + manuallyChangedFields: null, memberOrganizations: { dateEnd: null, dateStart: null, diff --git a/backend/src/services/organizationService.ts b/backend/src/services/organizationService.ts index 1c7c1baabd..bd681c65fc 100644 --- a/backend/src/services/organizationService.ts +++ b/backend/src/services/organizationService.ts @@ -604,7 +604,13 @@ export default class OrganizationService extends LoggerBase { return OrganizationRepository.findOrganizationsWithMergeSuggestions(args, this.options) } - async update(id, data, overrideIdentities = false, syncToOpensearch = true) { + async update( + id, + data, + overrideIdentities = false, + syncToOpensearch = true, + manualChange = false, + ) { let tx try { @@ -646,7 +652,13 @@ export default class OrganizationService extends LoggerBase { } } - const record = await OrganizationRepository.update(id, data, repoOptions, overrideIdentities) + const record = await OrganizationRepository.update( + id, + data, + repoOptions, + overrideIdentities, + manualChange, + ) await SequelizeRepository.commitTransaction(tx) diff --git a/services/apps/data_sink_worker/src/repo/organization.repo.ts b/services/apps/data_sink_worker/src/repo/organization.repo.ts index dafb5bddab..a0edcff222 100644 --- a/services/apps/data_sink_worker/src/repo/organization.repo.ts +++ b/services/apps/data_sink_worker/src/repo/organization.repo.ts @@ -450,6 +450,23 @@ export class OrganizationRepository extends RepositoryBase): Promise { + // handle manuallyChanged + const result = await this.db().oneOrNone( + `select id, "manuallyChangedFields" from organizations where id = $(id)`, + { + id, + }, + ) + if (!result) { + throw new Error(`Organization with id ${id} not found!`) + } + const manuallyChangedFields = result.manuallyChangedFields || [] + for (const column of manuallyChangedFields) { + if (column in data) { + delete data[column] + } + } + const keys = Object.keys(data) keys.push('updatedAt') // construct dynamic column set @@ -458,6 +475,7 @@ export class OrganizationRepository extends RepositoryBase Date: Tue, 12 Dec 2023 16:43:38 +0700 Subject: [PATCH 013/185] Essential free plan cancelation banner (#1950) --- .../src/modules/layout/components/layout.vue | 22 +++++++++++++++++++ frontend/src/modules/tenant/store/getters.js | 14 ++++++++++++ 2 files changed, 36 insertions(+) diff --git a/frontend/src/modules/layout/components/layout.vue b/frontend/src/modules/layout/components/layout.vue index c3c0d3b2cf..72fa8ad5ff 100644 --- a/frontend/src/modules/layout/components/layout.vue +++ b/frontend/src/modules/layout/components/layout.vue @@ -118,6 +118,27 @@ You can expect major improvements by Tuesday, Aug 15th. 🚧
+ +
+

+ crowd.dev's free plan ends on January 1st, 2024. + Your access to this workspace will be revoked after. +

+ + + Upgrade workspace + + +
+
@@ -168,6 +189,7 @@ export default { showIntegrationsNeedReconnectAlert: 'tenant/showIntegrationsNeedReconnectAlert', showOrganizationsAlertBanner: 'tenant/showOrganizationsAlertBanner', + showUpgradeEssentialBanner: 'tenant/showUpgradeEssentialBanner', showBanner: 'tenant/showBanner', }), diff --git a/frontend/src/modules/tenant/store/getters.js b/frontend/src/modules/tenant/store/getters.js index 1431b5796c..960f2986a4 100644 --- a/frontend/src/modules/tenant/store/getters.js +++ b/frontend/src/modules/tenant/store/getters.js @@ -1,6 +1,7 @@ import sharedGetters from '@/shared/store/getters'; import { router } from '@/router'; import moment from 'moment'; +import Plans from '@/security/plans'; export default { ...sharedGetters(), @@ -75,6 +76,18 @@ export default { return today.isSameOrBefore(limit, 'day'); }, + showUpgradeEssentialBanner: ( + _state, + _getters, + _rootState, + rootGetters, + ) => { + const today = moment(); + const limit = moment('2024-01-01').startOf('day'); + const currentTenant = rootGetters['auth/currentTenant']; + return currentTenant.plan === Plans.values.essential && today.isBefore(limit); + }, + showBanner: (_state, getters) => ( getters.showSampleDataAlert || getters.showIntegrationsErrorAlert @@ -82,6 +95,7 @@ export default { || getters.showIntegrationsInProgressAlert || getters.showIntegrationsNeedReconnectAlert || getters.showOrganizationsAlertBanner + || getters.showUpgradeEssentialBanner ), limit: () => 40, From ddb1143c77cd849443f0c0007fcef988c5ff9bc6 Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Tue, 12 Dec 2023 12:29:07 +0100 Subject: [PATCH 014/185] Enhancement/queue prioritization c 2633 (#1900) --- .github/actions/deploy-service/action.yaml | 29 +- .../workflows/lf-production-deploy-new.yaml | 12 +- .../lf-production-deploy-original.yaml | 10 + .../workflows/lf-staging-deploy-backend.yaml | 12 +- .../lf-staging-deploy-data-sink-worker.yaml | 10 +- ...taging-deploy-integration-data-worker.yaml | 10 +- ...staging-deploy-integration-run-worker.yaml | 10 +- ...ging-deploy-integration-stream-worker.yaml | 10 +- .../lf-staging-deploy-search-sync-worker.yaml | 10 +- .github/workflows/production-deploy-new.yaml | 4 +- .../workflows/production-deploy-original.yaml | 5 + .github/workflows/staging-deploy-backend.yaml | 12 +- .../staging-deploy-data-sink-worker.yaml | 10 +- ...taging-deploy-integration-data-worker.yaml | 10 +- ...staging-deploy-integration-run-worker.yaml | 10 +- ...ging-deploy-integration-stream-worker.yaml | 10 +- ...taging-deploy-integration-sync-worker.yaml | 10 +- .../staging-deploy-search-sync-worker.yaml | 10 +- .gitignore | 2 +- backend/.env.dist.composed | 3 +- backend/.env.dist.local | 4 +- .../config/custom-environment-variables.json | 1 - backend/package.json | 4 +- .../premium/enrichment/memberEnrichBulk.ts | 5 +- .../src/bin/jobs/checkStuckIntegrationRuns.ts | 35 +- .../src/bin/jobs/integrationDataChecker.ts | 12 +- backend/src/bin/jobs/mergeSuggestions.ts | 14 +- backend/src/bin/jobs/organizationEnricher.ts | 18 +- backend/src/bin/jobs/refreshSampleData.ts | 10 +- backend/src/bin/nodejs-worker.ts | 104 +--- backend/src/bin/scripts/continue-run.ts | 103 ---- .../enrich-members-and-organizations.ts | 26 +- .../bin/scripts/generate-merge-suggestions.ts | 12 +- .../src/bin/scripts/process-integration.ts | 210 ------- backend/src/bin/scripts/process-stream.ts | 82 --- backend/src/bin/scripts/process-webhook.ts | 135 ----- backend/src/bin/worker/integrations.ts | 41 -- backend/src/conf/configTypes.ts | 1 - .../twitterSourceIdsFixedTimestamps.ts | 63 -- .../U1701080323__tenant-priority-level.sql | 2 + .../V1701080323__tenant-priority-level.sql | 2 + .../priorityLevelContextRepository.ts | 30 + .../services/integrationProcessor.ts | 57 +- .../services/integrationRunProcessor.ts | 573 ------------------ .../services/integrationServiceBase.ts | 181 ------ .../services/integrationTickProcessor.ts | 221 ++----- .../integrations/services/webhookProcessor.ts | 130 ---- .../workers/sendgridWebhookWorker.ts | 13 +- .../workers/stripeWebhookWorker.ts | 15 +- .../nodejs/csv-export/csvExportWorker.ts | 3 +- .../microservices/nodejs/messageTypes.ts | 6 +- backend/src/serverless/types/workerTypes.ts | 2 - backend/src/serverless/utils/nodeWorkerSQS.ts | 171 ------ backend/src/serverless/utils/serviceSQS.ts | 120 +++- backend/src/services/memberService.ts | 9 +- backend/src/services/organizationService.ts | 15 +- backend/src/services/searchSyncService.ts | 22 +- .../mq/nodeWorkerIntegrationProcessMessage.ts | 19 - .../mq/nodeWorkerProcessWebhookMessage.ts | 13 - pnpm-lock.yaml | 107 +++- scripts/scaffold.yaml | 28 +- scripts/scaffold/sqs/queue.conf | 48 -- scripts/services/api.yaml | 1 + scripts/services/automations-worker.yaml | 1 + scripts/services/data-sink-worker.yaml | 2 + scripts/services/discord-ws.yaml | 1 + scripts/services/emails-worker.yaml | 1 + scripts/services/integration-data-worker.yaml | 2 + scripts/services/integration-run-worker.yaml | 2 + .../services/integration-stream-worker.yaml | 2 + scripts/services/integration-sync-worker.yaml | 2 + scripts/services/job-generator.yaml | 1 + scripts/services/nodejs-worker.yaml | 2 + scripts/services/search-sync-api.yaml | 1 + scripts/services/search-sync-worker.yaml | 2 + scripts/services/webhook-api.yaml | 1 + .../config/custom-environment-variables.json | 3 + .../apps/data_sink_worker/config/default.json | 3 +- services/apps/data_sink_worker/package.json | 1 + .../src/bin/map-member-to-org.ts | 52 +- .../src/bin/map-tenant-members-to-org.ts | 54 +- .../src/bin/process-results.ts | 54 +- .../src/bin/restart-all-failed-results.ts | 29 +- .../src/bin/restart-failed-results.ts | 38 +- .../src/bin/restart-result.ts | 36 +- .../src/bin/restart-x-failed-results.ts | 27 +- .../apps/data_sink_worker/src/conf/index.ts | 2 + .../src/jobs/processOldResults.ts | 6 +- services/apps/data_sink_worker/src/main.ts | 49 +- .../apps/data_sink_worker/src/queue/index.ts | 27 +- .../src/repo/dataSink.data.ts | 18 +- .../src/repo/dataSink.repo.ts | 39 +- .../src/service/activity.service.ts | 46 +- .../src/service/dataSink.service.ts | 8 +- .../src/service/member.service.ts | 14 +- .../config/custom-environment-variables.json | 7 + .../config/default.json | 4 +- .../apps/integration_data_worker/package.json | 2 + .../src/bin/process-data-for-tenant.ts | 37 +- .../src/bin/process-data.ts | 37 +- .../integration_data_worker/src/conf/index.ts | 13 + .../src/jobs/processOldData.ts | 2 +- .../apps/integration_data_worker/src/main.ts | 38 +- .../src/queue/index.ts | 14 +- .../src/service/integrationDataService.ts | 42 +- .../config/custom-environment-variables.json | 7 + .../config/default.json | 4 +- .../apps/integration_run_worker/package.json | 2 + .../src/bin/continue-run.ts | 26 +- .../src/bin/onboard-integration.ts | 25 +- .../src/bin/process-repo.ts | 25 +- .../src/bin/trigger-all-onboardings.ts | 24 +- .../src/bin/trigger-stream-processed.ts | 42 +- .../integration_run_worker/src/conf/index.ts | 13 + .../apps/integration_run_worker/src/main.ts | 63 +- .../integration_run_worker/src/queue/index.ts | 29 +- .../src/repo/integrationRun.data.ts | 1 + .../src/repo/integrationRun.repo.ts | 6 +- .../src/service/integrationRunService.ts | 31 +- .../config/custom-environment-variables.json | 7 + .../config/default.json | 4 +- .../integration_stream_worker/package.json | 2 + .../process-all-streams-for-integration.ts | 49 +- .../src/bin/process-all-streams.ts | 49 +- .../src/bin/process-all-webhooks.ts | 47 +- .../src/bin/process-stream.ts | 46 +- .../src/bin/process-webhook.ts | 47 +- .../src/bin/trigger-all-failed-webhooks.ts | 29 +- .../trigger-all-streams-for-integration.ts | 38 +- .../src/bin/trigger-all-streams.ts | 38 +- .../src/bin/trigger-webhook.ts | 32 +- .../src/conf/index.ts | 13 + .../src/jobs/processOldStreams.ts | 8 +- .../integration_stream_worker/src/main.ts | 49 +- .../src/queue/index.ts | 21 +- .../src/repo/integrationStream.data.ts | 1 + .../src/repo/integrationStream.repo.ts | 12 +- .../src/service/integrationStreamService.ts | 34 +- .../config/custom-environment-variables.json | 3 +- .../config/default.json | 4 +- .../apps/integration_sync_worker/package.json | 1 - .../integration_sync_worker/src/conf/index.ts | 3 +- .../apps/integration_sync_worker/src/main.ts | 4 +- .../src/queue/index.ts | 23 +- .../config/custom-environment-variables.json | 3 +- .../search_sync_worker/config/default.json | 4 +- .../apps/search_sync_worker/src/conf/index.ts | 2 + services/apps/search_sync_worker/src/main.ts | 3 +- .../search_sync_worker/src/queue/index.ts | 20 +- .../config/custom-environment-variables.json | 10 + services/apps/webhook_api/config/default.json | 2 + services/apps/webhook_api/package.json | 3 + services/apps/webhook_api/src/conf/index.ts | 21 +- services/apps/webhook_api/src/main.ts | 33 +- .../webhook_api/src/middleware/emitters.ts | 18 + .../apps/webhook_api/src/middleware/index.ts | 8 +- .../apps/webhook_api/src/routes/discourse.ts | 20 +- .../apps/webhook_api/src/routes/github.ts | 16 +- .../apps/webhook_api/src/routes/groupsio.ts | 16 +- services/libs/common_services/.eslintrc.cjs | 21 + services/libs/common_services/.prettierignore | 3 + services/libs/common_services/.prettierrc.cjs | 7 + services/libs/common_services/package.json | 31 + services/libs/common_services/src/index.ts | 2 + .../libs/common_services/src/repos/index.ts | 1 + .../src/repos/priorityLevelContext.repo.ts | 26 + .../emitters/dataSinkWorker.emitter.ts | 96 +++ .../src/services/emitters/index.ts | 7 + .../emitters/integrationDataWorker.emitter.ts | 40 ++ .../emitters/integrationRunWorker.emitter.ts | 91 +++ .../integrationStreamWorker.emitter.ts | 91 +++ .../integrationSyncWorker.emitter.ts} | 41 +- .../services/emitters/nodejsWorker.emitter.ts | 208 +++++++ .../emitters/searchSyncWorker.emitter.ts} | 161 ++++- .../common_services/src/services/index.ts | 2 + .../src/services/priority.service.ts | 219 +++++++ services/libs/common_services/tsconfig.json | 17 + services/libs/feature-flags/src/index.ts | 5 +- services/libs/integrations/src/types.ts | 18 +- services/libs/opensearch/src/apiClient.ts | 10 +- services/libs/sqs/src/config.ts | 15 +- services/libs/sqs/src/index.ts | 2 +- .../libs/sqs/src/instances/dataSinkWorker.ts | 47 -- services/libs/sqs/src/instances/index.ts | 7 - .../src/instances/integrationDataWorker.ts | 16 - .../sqs/src/instances/integrationRunWorker.ts | 53 -- .../src/instances/integrationStreamWorker.ts | 46 -- .../libs/sqs/src/instances/nodejsWorker.ts | 44 -- services/libs/sqs/src/prioritization.ts | 172 ++++++ services/libs/sqs/src/queue.ts | 7 +- services/libs/sqs/src/types.ts | 10 + services/libs/types/src/enums/featureFlags.ts | 1 + services/libs/types/src/enums/index.ts | 1 + services/libs/types/src/enums/priorities.ts | 14 + services/libs/types/src/queue/index.ts | 15 +- .../queue/integration_sync_worker/index.ts | 29 + .../types/src/queue/nodejs_worker/index.ts | 122 +++- 197 files changed, 3318 insertions(+), 3010 deletions(-) delete mode 100644 backend/src/bin/scripts/continue-run.ts delete mode 100644 backend/src/bin/scripts/process-integration.ts delete mode 100644 backend/src/bin/scripts/process-stream.ts delete mode 100644 backend/src/bin/scripts/process-webhook.ts delete mode 100644 backend/src/bin/worker/integrations.ts delete mode 100644 backend/src/database/initializers/twitterSourceIdsFixedTimestamps.ts create mode 100644 backend/src/database/migrations/U1701080323__tenant-priority-level.sql create mode 100644 backend/src/database/migrations/V1701080323__tenant-priority-level.sql create mode 100644 backend/src/database/repositories/priorityLevelContextRepository.ts delete mode 100644 backend/src/serverless/integrations/services/integrationRunProcessor.ts delete mode 100644 backend/src/serverless/integrations/services/integrationServiceBase.ts delete mode 100644 backend/src/serverless/integrations/services/webhookProcessor.ts delete mode 100644 backend/src/serverless/utils/nodeWorkerSQS.ts delete mode 100644 backend/src/types/mq/nodeWorkerIntegrationProcessMessage.ts delete mode 100644 backend/src/types/mq/nodeWorkerProcessWebhookMessage.ts create mode 100644 services/apps/webhook_api/src/middleware/emitters.ts create mode 100644 services/libs/common_services/.eslintrc.cjs create mode 100644 services/libs/common_services/.prettierignore create mode 100644 services/libs/common_services/.prettierrc.cjs create mode 100644 services/libs/common_services/package.json create mode 100644 services/libs/common_services/src/index.ts create mode 100644 services/libs/common_services/src/repos/index.ts create mode 100644 services/libs/common_services/src/repos/priorityLevelContext.repo.ts create mode 100644 services/libs/common_services/src/services/emitters/dataSinkWorker.emitter.ts create mode 100644 services/libs/common_services/src/services/emitters/index.ts create mode 100644 services/libs/common_services/src/services/emitters/integrationDataWorker.emitter.ts create mode 100644 services/libs/common_services/src/services/emitters/integrationRunWorker.emitter.ts create mode 100644 services/libs/common_services/src/services/emitters/integrationStreamWorker.emitter.ts rename services/libs/{sqs/src/instances/integrationSyncWorker.ts => common_services/src/services/emitters/integrationSyncWorker.emitter.ts} (74%) create mode 100644 services/libs/common_services/src/services/emitters/nodejsWorker.emitter.ts rename services/libs/{sqs/src/instances/searchSyncWorker.ts => common_services/src/services/emitters/searchSyncWorker.emitter.ts} (55%) create mode 100644 services/libs/common_services/src/services/index.ts create mode 100644 services/libs/common_services/src/services/priority.service.ts create mode 100644 services/libs/common_services/tsconfig.json delete mode 100644 services/libs/sqs/src/instances/dataSinkWorker.ts delete mode 100644 services/libs/sqs/src/instances/index.ts delete mode 100644 services/libs/sqs/src/instances/integrationDataWorker.ts delete mode 100644 services/libs/sqs/src/instances/integrationRunWorker.ts delete mode 100644 services/libs/sqs/src/instances/integrationStreamWorker.ts delete mode 100644 services/libs/sqs/src/instances/nodejsWorker.ts create mode 100644 services/libs/sqs/src/prioritization.ts create mode 100644 services/libs/types/src/enums/priorities.ts diff --git a/.github/actions/deploy-service/action.yaml b/.github/actions/deploy-service/action.yaml index 922d93eefd..d97b98588b 100644 --- a/.github/actions/deploy-service/action.yaml +++ b/.github/actions/deploy-service/action.yaml @@ -14,6 +14,16 @@ inputs: description: To which cloud cluster to deploy required: true + prioritized: + description: Is the service listening on prioritized queues? + required: false + default: "false" + + only_normal_queue_level: + description: Is the service prioritized but only listening on normal level? (staging/lf) + required: false + default: "false" + runs: using: composite steps: @@ -25,10 +35,25 @@ runs: AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }} AWS_REGION: ${{ env.AWS_REGION }} - - name: Deploy image + - name: Deploy image (non prioritized) + if: inputs.prioritized == "false" shell: bash run: kubectl set image deployments/${{ inputs.service }}-dpl ${{ inputs.service }}=${{ inputs.image }} + - name: Deploy image (prioritized) + if: inputs.prioritized == "true" && inputs.only_normal_queue_level == "false" + shell: bash + run: | + kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} + kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} + kubectl set image deployments/${{ inputs.service }}-high-dpl ${{ inputs.service }}-high=${{ inputs.image }} + kubectl set image deployments/${{ inputs.service }}-urgent-dpl ${{ inputs.service }}-urgent=${{ inputs.image }} + + - name: Deploy image (prioritized - normal only) + if: inputs.prioritized == "true" && inputs.only_normal_queue_level == "true" + shell: bash + run: kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} + - uses: ./.github/actions/slack-notify with: - message: 'Service *${{ inputs.service }}* was just deployed using docker image `${{ inputs.image }}`' + message: "Service *${{ inputs.service }}* was just deployed using docker image `${{ inputs.image }}`" diff --git a/.github/workflows/lf-production-deploy-new.yaml b/.github/workflows/lf-production-deploy-new.yaml index 4ce5052b2c..2cb8fa2c5c 100644 --- a/.github/workflows/lf-production-deploy-new.yaml +++ b/.github/workflows/lf-production-deploy-new.yaml @@ -8,9 +8,9 @@ on: required: true type: boolean deploy_search_sync_api: - description: Deploy search-sync-api service? - required: true - type: boolean + description: Deploy search-sync-api service? + required: true + type: boolean deploy_integration_sync_worker: description: Deploy integration-sync-worker service? required: true @@ -64,7 +64,7 @@ jobs: - name: Set docker image output id: image run: echo "IMAGE=${{ steps.image-builder.outputs.image }}" >> $GITHUB_OUTPUT - + build-and-push-search-sync-api: runs-on: ubuntu-latest if: ${{ inputs.deploy_search_sync_api }} @@ -208,6 +208,8 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push-search-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-search-sync-api: needs: build-and-push-search-sync-api @@ -244,6 +246,8 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push-integration-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-webhook-api: needs: build-and-push-webhook-api diff --git a/.github/workflows/lf-production-deploy-original.yaml b/.github/workflows/lf-production-deploy-original.yaml index 3e5fd30a4a..f17493efce 100644 --- a/.github/workflows/lf-production-deploy-original.yaml +++ b/.github/workflows/lf-production-deploy-original.yaml @@ -237,6 +237,8 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push-backend.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-discord-ws: needs: build-and-push-backend @@ -291,6 +293,8 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push-integration-run-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-integration-stream-worker: needs: build-and-push-integration-stream-worker @@ -309,6 +313,8 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push-integration-stream-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-integration-data-worker: needs: build-and-push-integration-data-worker @@ -327,6 +333,8 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push-integration-data-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-data-sink-worker: needs: build-and-push-data-sink-worker @@ -345,6 +353,8 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push-data-sink-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-frontend: needs: build-and-push-frontend diff --git a/.github/workflows/lf-staging-deploy-backend.yaml b/.github/workflows/lf-staging-deploy-backend.yaml index 7cee20ac98..b857f1bf60 100644 --- a/.github/workflows/lf-staging-deploy-backend.yaml +++ b/.github/workflows/lf-staging-deploy-backend.yaml @@ -3,12 +3,12 @@ name: LF Staging Deploy Backend services on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'backend/**' - - 'services/libs/**' - - '!backend/src/serverless/microservices/python/**' + - "backend/**" + - "services/libs/**" + - "!backend/src/serverless/microservices/python/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -76,6 +76,8 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true deploy-job-generator: needs: build-and-push diff --git a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml index 3aa92424ba..bce35c6aba 100644 --- a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Data Sink Worker on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'services/libs/**' - - 'services/apps/data_sink_worker/**' + - "services/libs/**" + - "services/apps/data_sink_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml index a059404c0c..5342817544 100644 --- a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Data Worker on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_data_worker/**' + - "services/libs/**" + - "services/apps/integration_data_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml index c26285a4e8..64cf768a78 100644 --- a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Run Worker on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_data_worker/**' + - "services/libs/**" + - "services/apps/integration_data_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml index e69c76ced3..db9bfb61ee 100644 --- a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Stream Worker on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_stream_worker/**' + - "services/libs/**" + - "services/apps/integration_stream_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml index 022c152bd6..cb76c2839a 100644 --- a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Search Sync Worker on: push: branches: - - 'lf-staging/**' - - 'lf-staging-**' + - "lf-staging/**" + - "lf-staging-**" paths: - - 'services/libs/**' - - 'services/apps/search_sync_worker/**' + - "services/libs/**" + - "services/apps/search_sync_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/production-deploy-new.yaml b/.github/workflows/production-deploy-new.yaml index 25baf153c2..421b769bfc 100644 --- a/.github/workflows/production-deploy-new.yaml +++ b/.github/workflows/production-deploy-new.yaml @@ -64,7 +64,7 @@ jobs: - name: Set docker image output id: image run: echo "IMAGE=${{ steps.image-builder.outputs.image }}" >> $GITHUB_OUTPUT - + build-and-push-search-sync-api: runs-on: ubuntu-latest if: ${{ inputs.deploy_search_sync_api }} @@ -208,6 +208,7 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push-search-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-search-sync-api: needs: build-and-push-search-sync-api @@ -244,6 +245,7 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push-integration-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-webhook-api: needs: build-and-push-webhook-api diff --git a/.github/workflows/production-deploy-original.yaml b/.github/workflows/production-deploy-original.yaml index 195cf84c20..be5f892bf4 100644 --- a/.github/workflows/production-deploy-original.yaml +++ b/.github/workflows/production-deploy-original.yaml @@ -237,6 +237,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push-backend.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-discord-ws: needs: build-and-push-backend @@ -291,6 +292,7 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push-integration-run-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-integration-stream-worker: needs: build-and-push-integration-stream-worker @@ -309,6 +311,7 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push-integration-stream-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-integration-data-worker: needs: build-and-push-integration-data-worker @@ -327,6 +330,7 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push-integration-data-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-data-sink-worker: needs: build-and-push-data-sink-worker @@ -345,6 +349,7 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push-data-sink-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true deploy-frontend: needs: build-and-push-frontend diff --git a/.github/workflows/staging-deploy-backend.yaml b/.github/workflows/staging-deploy-backend.yaml index b8c6f73d6d..833ba4ce83 100644 --- a/.github/workflows/staging-deploy-backend.yaml +++ b/.github/workflows/staging-deploy-backend.yaml @@ -3,12 +3,12 @@ name: Staging Deploy Backend services on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'backend/**' - - 'services/libs/**' - - '!backend/src/serverless/microservices/python/**' + - "backend/**" + - "services/libs/**" + - "!backend/src/serverless/microservices/python/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -76,6 +76,8 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + prioritized: true + only_normal_queue_level: true deploy-job-generator: needs: build-and-push diff --git a/.github/workflows/staging-deploy-data-sink-worker.yaml b/.github/workflows/staging-deploy-data-sink-worker.yaml index 3f9e32557e..1e6e1fda67 100644 --- a/.github/workflows/staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/staging-deploy-data-sink-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Data Sink Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/data_sink_worker/**' + - "services/libs/**" + - "services/apps/data_sink_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/staging-deploy-integration-data-worker.yaml b/.github/workflows/staging-deploy-integration-data-worker.yaml index 74b9383984..bb1c26b2d6 100644 --- a/.github/workflows/staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/staging-deploy-integration-data-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Data Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_data_worker/**' + - "services/libs/**" + - "services/apps/integration_data_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/staging-deploy-integration-run-worker.yaml b/.github/workflows/staging-deploy-integration-run-worker.yaml index 2f733da9bd..35fac4eb7a 100644 --- a/.github/workflows/staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/staging-deploy-integration-run-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Run Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_data_worker/**' + - "services/libs/**" + - "services/apps/integration_data_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/staging-deploy-integration-stream-worker.yaml b/.github/workflows/staging-deploy-integration-stream-worker.yaml index d7500f1d3a..8176728bfb 100644 --- a/.github/workflows/staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/staging-deploy-integration-stream-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Stream Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_stream_worker/**' + - "services/libs/**" + - "services/apps/integration_stream_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/staging-deploy-integration-sync-worker.yaml b/.github/workflows/staging-deploy-integration-sync-worker.yaml index e8674313c6..55fcad8c5a 100644 --- a/.github/workflows/staging-deploy-integration-sync-worker.yaml +++ b/.github/workflows/staging-deploy-integration-sync-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Sync Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/integration_sync_worker/**' + - "services/libs/**" + - "services/apps/integration_sync_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.github/workflows/staging-deploy-search-sync-worker.yaml b/.github/workflows/staging-deploy-search-sync-worker.yaml index 21e59a4da7..3607b0c0ca 100644 --- a/.github/workflows/staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/staging-deploy-search-sync-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Search Sync Worker on: push: branches: - - 'staging/**' - - 'staging-**' + - "staging/**" + - "staging-**" paths: - - 'services/libs/**' - - 'services/apps/search_sync_worker/**' + - "services/libs/**" + - "services/apps/search_sync_worker/**" env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,3 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + only_normal_queue_level: true + prioritized: true diff --git a/.gitignore b/.gitignore index fc6c23bc76..c45d95311e 100644 --- a/.gitignore +++ b/.gitignore @@ -51,4 +51,4 @@ docker/volume services/libs/*/dist -**/.cubestore \ No newline at end of file +**/.cubestore diff --git a/backend/.env.dist.composed b/backend/.env.dist.composed index 3f548385e6..75bcfb7a47 100644 --- a/backend/.env.dist.composed +++ b/backend/.env.dist.composed @@ -1,8 +1,7 @@ # SQS settings CROWD_SQS_HOST="sqs" CROWD_SQS_ENDPOINT=http://sqs:9324 -CROWD_SQS_NODEJS_WORKER_QUEUE="http://sqs:9324/000000000000/nodejs-worker.fifo" -CROWD_SQS_NODEJS_WORKER_DELAYABLE_QUEUE=http://sqs:9324/000000000000/nodejs-worker +CROWD_SQS_NODEJS_WORKER_QUEUE="http://sqs:9324/000000000000/nodejs-worker-normal.fifo" CROWD_SQS_PYTHON_WORKER_QUEUE="http://sqs:9324/000000000000/python-worker.fifo" # Redis settings diff --git a/backend/.env.dist.local b/backend/.env.dist.local index 7d9a3815a2..246da4d990 100755 --- a/backend/.env.dist.local +++ b/backend/.env.dist.local @@ -2,6 +2,7 @@ KUBE_MODE=1 CROWD_EDITION=community TENANT_MODE=multi +QUEUE_PRIORITY_LEVEL=normal # API settings CROWD_API_URL=https://localhost/api @@ -14,8 +15,7 @@ CROWD_API_JWT_EXPIRES_IN='100 years' CROWD_SQS_HOST=localhost CROWD_SQS_PORT=9324 CROWD_SQS_ENDPOINT=http://localhost:9324 -CROWD_SQS_NODEJS_WORKER_QUEUE=http://localhost:9324/000000000000/nodejs-worker.fifo -CROWD_SQS_NODEJS_WORKER_DELAYABLE_QUEUE=http://localhost:9324/000000000000/nodejs-worker +CROWD_SQS_NODEJS_WORKER_QUEUE=http://localhost:9324/000000000000/nodejs-worker-normal.fifo CROWD_SQS_PYTHON_WORKER_QUEUE=http://localhost:9324/000000000000/python-worker.fifo CROWD_SQS_AWS_ACCOUNT_ID=000000000000 CROWD_SQS_AWS_ACCESS_KEY_ID=x diff --git a/backend/config/custom-environment-variables.json b/backend/config/custom-environment-variables.json index 185844c2f3..11381c299a 100644 --- a/backend/config/custom-environment-variables.json +++ b/backend/config/custom-environment-variables.json @@ -17,7 +17,6 @@ "host": "CROWD_SQS_HOST", "port": "CROWD_SQS_PORT", "nodejsWorkerQueue": "CROWD_SQS_NODEJS_WORKER_QUEUE", - "nodejsWorkerDelayableQueue": "CROWD_SQS_NODEJS_WORKER_DELAYABLE_QUEUE", "integrationRunWorkerQueue": "CROWD_SQS_INTEGRATION_RUN_WORKER_QUEUE", "pythonWorkerQueue": "CROWD_SQS_PYTHON_WORKER_QUEUE", "aws": { diff --git a/backend/package.json b/backend/package.json index 9194823c55..49bb3280f5 100644 --- a/backend/package.json +++ b/backend/package.json @@ -31,11 +31,8 @@ "format": "prettier --write .", "format-check": "prettier --check .", "tsc-check": "tsc --noEmit", - "script:process-integration": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/process-integration.ts", - "script:process-stream": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/process-stream.ts", "script:continue-run": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/continue-run.ts", "script:change-tenant-plan": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/change-tenant-plan.ts", - "script:process-webhook": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/process-webhook.ts", "script:trigger-webhook": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/trigger-webhook.ts", "script:send-weekly-analytics-email": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/send-weekly-analytics-email.ts", "script:unleash-init": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/unleash-init.ts", @@ -68,6 +65,7 @@ "@crowd/temporal": "file:../services/libs/temporal", "@crowd/tracing": "file:../services/libs/tracing", "@crowd/types": "file:../services/libs/types", + "@crowd/common_services": "file:../services/libs/common_services", "@cubejs-client/core": "^0.30.4", "@google-cloud/storage": "5.3.0", "@octokit/auth-app": "^3.6.1", diff --git a/backend/src/api/premium/enrichment/memberEnrichBulk.ts b/backend/src/api/premium/enrichment/memberEnrichBulk.ts index c385fa6204..6485d93ccb 100644 --- a/backend/src/api/premium/enrichment/memberEnrichBulk.ts +++ b/backend/src/api/premium/enrichment/memberEnrichBulk.ts @@ -5,11 +5,11 @@ import { FeatureFlag, FeatureFlagRedisKey } from '@crowd/types' import { getSecondsTillEndOfMonth } from '../../../utils/timing' import Permissions from '../../../security/permissions' import identifyTenant from '../../../segment/identifyTenant' -import { sendBulkEnrichMessage } from '../../../serverless/utils/nodeWorkerSQS' import PermissionChecker from '../../../services/user/permissionChecker' import track from '../../../segment/track' import { PLAN_LIMITS } from '../../../feature-flags/isFeatureEnabled' import SequelizeRepository from '../../../database/repositories/sequelizeRepository' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' const log = getServiceLogger() @@ -54,7 +54,8 @@ export default async (req, res) => { ) // send the message - await sendBulkEnrichMessage(tenant, membersToEnrich, segmentIds) + const emitter = await getNodejsWorkerEmitter() + await emitter.bulkEnrich(tenant, membersToEnrich, segmentIds) // update enrichment count, we'll also check failed enrichments and deduct these from grand total in bulkEnrichmentWorker const secondsRemainingUntilEndOfMonth = getSecondsTillEndOfMonth() diff --git a/backend/src/bin/jobs/checkStuckIntegrationRuns.ts b/backend/src/bin/jobs/checkStuckIntegrationRuns.ts index 3c859c29ec..9b26846fcc 100644 --- a/backend/src/bin/jobs/checkStuckIntegrationRuns.ts +++ b/backend/src/bin/jobs/checkStuckIntegrationRuns.ts @@ -1,20 +1,15 @@ -import { processPaginated } from '@crowd/common' import { Logger, getChildLogger, getServiceChildLogger } from '@crowd/logging' +import { IntegrationRunState } from '@crowd/types' import cronGenerator from 'cron-time-generator' import moment from 'moment' -import { IntegrationRunState } from '@crowd/types' import { INTEGRATION_PROCESSING_CONFIG } from '../../conf' -import IncomingWebhookRepository from '../../database/repositories/incomingWebhookRepository' import IntegrationRepository from '../../database/repositories/integrationRepository' import IntegrationRunRepository from '../../database/repositories/integrationRunRepository' import IntegrationStreamRepository from '../../database/repositories/integrationStreamRepository' import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' import { IntegrationRun } from '../../types/integrationRunTypes' import { IntegrationStreamState } from '../../types/integrationStreamTypes' import { CrowdJob } from '../../types/jobTypes' -import { NodeWorkerProcessWebhookMessage } from '../../types/mq/nodeWorkerProcessWebhookMessage' -import { WebhookProcessor } from '../../serverless/integrations/services/webhookProcessor' const log = getServiceChildLogger('checkStuckIntegrationRuns') @@ -221,32 +216,6 @@ export const checkRuns = async (): Promise => { } } -export const checkStuckWebhooks = async (): Promise => { - const dbOptions = await SequelizeRepository.getDefaultIRepositoryOptions() - const repo = new IncomingWebhookRepository(dbOptions) - - // update retryable error state webhooks to pending state - let errorWebhooks = await repo.findError(1, 20, WebhookProcessor.MAX_RETRY_LIMIT) - - while (errorWebhooks.length > 0) { - await repo.markAllPending(errorWebhooks.map((w) => w.id)) - errorWebhooks = await repo.findError(1, 20, WebhookProcessor.MAX_RETRY_LIMIT) - } - - await processPaginated( - async (page) => repo.findPending(page, 20), - async (webhooks) => { - for (const webhook of webhooks) { - log.warn({ id: webhook.id }, 'Found stuck webhook! Restarting it!') - await sendNodeWorkerMessage( - webhook.tenantId, - new NodeWorkerProcessWebhookMessage(webhook.tenantId, webhook.id), - ) - } - }, - ) -} - const job: CrowdJob = { name: 'Detect & Fix Stuck Integration Runs', cronTime: cronGenerator.every(90).minutes(), @@ -254,7 +223,7 @@ const job: CrowdJob = { if (!running) { running = true try { - await Promise.all([checkRuns(), checkStuckIntegrations(), checkStuckWebhooks()]) + await Promise.all([checkRuns(), checkStuckIntegrations()]) } finally { running = false } diff --git a/backend/src/bin/jobs/integrationDataChecker.ts b/backend/src/bin/jobs/integrationDataChecker.ts index 8568b2e1fe..32a1600db9 100644 --- a/backend/src/bin/jobs/integrationDataChecker.ts +++ b/backend/src/bin/jobs/integrationDataChecker.ts @@ -1,8 +1,6 @@ +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import SequelizeRepository from '../../database/repositories/sequelizeRepository' import { CrowdJob } from '../../types/jobTypes' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from '../../types/mq/nodeWorkerMessageBase' const job: CrowdJob = { name: 'Integration Data Checker', @@ -17,13 +15,9 @@ const job: CrowdJob = { }, }) + const emitter = await getNodejsWorkerEmitter() for (const integration of integrations) { - await sendNodeWorkerMessage(integration.id, { - tenantId: integration.tenantId, - type: NodeWorkerMessageType.NODE_MICROSERVICE, - integrationId: integration.id, - service: 'integration-data-checker', - } as NodeWorkerMessageBase) + await emitter.integrationDataChecker(integration.tenantId, integration.id) } }, } diff --git a/backend/src/bin/jobs/mergeSuggestions.ts b/backend/src/bin/jobs/mergeSuggestions.ts index 8cbc69b1cf..e646cb10fa 100644 --- a/backend/src/bin/jobs/mergeSuggestions.ts +++ b/backend/src/bin/jobs/mergeSuggestions.ts @@ -1,10 +1,8 @@ -import cronGenerator from 'cron-time-generator' import { timeout } from '@crowd/common' +import cronGenerator from 'cron-time-generator' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import TenantService from '../../services/tenantService' import { CrowdJob } from '../../types/jobTypes' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from '../../types/mq/nodeWorkerMessageBase' const job: CrowdJob = { name: 'Merge suggestions', @@ -12,12 +10,10 @@ const job: CrowdJob = { cronTime: cronGenerator.every(12).hours(), onTrigger: async () => { const tenants = await TenantService._findAndCountAllForEveryUser({}) + const emitter = await getNodejsWorkerEmitter() + for (const tenant of tenants.rows) { - await sendNodeWorkerMessage(tenant.id, { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - tenant: tenant.id, - service: 'merge-suggestions', - } as NodeWorkerMessageBase) + await emitter.mergeSuggestions(tenant.id) await timeout(300) } diff --git a/backend/src/bin/jobs/organizationEnricher.ts b/backend/src/bin/jobs/organizationEnricher.ts index 4d0eb48ffa..a9a4b8b357 100644 --- a/backend/src/bin/jobs/organizationEnricher.ts +++ b/backend/src/bin/jobs/organizationEnricher.ts @@ -1,11 +1,9 @@ -import cronGenerator from 'cron-time-generator' import { getServiceLogger } from '@crowd/logging' +import cronGenerator from 'cron-time-generator' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { CrowdJob } from '../../types/jobTypes' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerMessageBase } from '../../types/mq/nodeWorkerMessageBase' -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' import TenantRepository from '../../database/repositories/tenantRepository' +import { CrowdJob } from '../../types/jobTypes' const job: CrowdJob = { name: 'organization enricher', @@ -18,14 +16,10 @@ async function sendWorkerMessage() { const log = getServiceLogger() const tenants = await TenantRepository.getPayingTenantIds(options) log.info(tenants) + + const emitter = await getNodejsWorkerEmitter() for (const { id } of tenants) { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'enrich-organizations', - tenantId: id, - } as NodeWorkerMessageBase - log.info({ payload }, 'enricher worker payload') - await sendNodeWorkerMessage(id, payload) + await emitter.enrichOrganizations(id) } } diff --git a/backend/src/bin/jobs/refreshSampleData.ts b/backend/src/bin/jobs/refreshSampleData.ts index efed37c975..62e16bad3e 100644 --- a/backend/src/bin/jobs/refreshSampleData.ts +++ b/backend/src/bin/jobs/refreshSampleData.ts @@ -1,17 +1,13 @@ +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import { CrowdJob } from '../../types/jobTypes' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from '../../types/mq/nodeWorkerMessageBase' const job: CrowdJob = { name: 'Refresh sample data', // every day cronTime: '0 0 * * *', onTrigger: async () => { - await sendNodeWorkerMessage('refresh-sample-data', { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'refresh-sample-data', - } as NodeWorkerMessageBase) + const emitter = await getNodejsWorkerEmitter() + await emitter.refreshSampleData() }, } diff --git a/backend/src/bin/nodejs-worker.ts b/backend/src/bin/nodejs-worker.ts index ce29a53699..fe1c82591f 100644 --- a/backend/src/bin/nodejs-worker.ts +++ b/backend/src/bin/nodejs-worker.ts @@ -1,33 +1,27 @@ import { timeout } from '@crowd/common' import { Logger, getChildLogger, getServiceLogger } from '@crowd/logging' +import { RedisClient, getRedisClient } from '@crowd/redis' import { SqsDeleteMessageRequest, SqsMessage, SqsReceiveMessageRequest, deleteMessage, receiveMessage, - sendMessage, } from '@crowd/sqs' -import { SpanStatusCode, getServiceTracer } from '@crowd/tracing' -import moment from 'moment' -import { getRedisClient, RedisClient } from '@crowd/redis' -import { Sequelize, QueryTypes } from 'sequelize' import fs from 'fs' import path from 'path' +import { QueryTypes, Sequelize } from 'sequelize' import telemetry from '@crowd/telemetry' +import { SQS_CLIENT, getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' +import { databaseInit } from '@/database/databaseConnection' import { REDIS_CONFIG, SQS_CONFIG } from '../conf' import { processDbOperationsMessage } from '../serverless/dbOperations/workDispatcher' import { processNodeMicroserviceMessage } from '../serverless/microservices/nodejs/workDispatcher' import { NodeWorkerMessageType } from '../serverless/types/workerTypes' -import { sendNodeWorkerMessage } from '../serverless/utils/nodeWorkerSQS' import { NodeWorkerMessageBase } from '../types/mq/nodeWorkerMessageBase' -import { processIntegration, processWebhook } from './worker/integrations' -import { SQS_CLIENT } from '@/serverless/utils/serviceSQS' -import { databaseInit } from '@/database/databaseConnection' /* eslint-disable no-constant-condition */ -const tracer = getServiceTracer() const serviceLogger = getServiceLogger() let exiting = false @@ -39,12 +33,9 @@ process.on('SIGTERM', async () => { exiting = true }) -const receive = async (delayed?: boolean): Promise => { +const receive = async (): Promise => { const params: SqsReceiveMessageRequest = { - QueueUrl: delayed ? SQS_CONFIG.nodejsWorkerDelayableQueue : SQS_CONFIG.nodejsWorkerQueue, - MessageAttributeNames: !delayed - ? undefined - : ['remainingDelaySeconds', 'tenantId', 'targetQueueUrl'], + QueueUrl: SQS_CONFIG.nodejsWorkerQueue, } const messages = await receiveMessage(SQS_CLIENT(), params) @@ -56,83 +47,15 @@ const receive = async (delayed?: boolean): Promise => { return undefined } -const removeFromQueue = (receiptHandle: string, delayed?: boolean): Promise => { +const removeFromQueue = (receiptHandle: string): Promise => { const params: SqsDeleteMessageRequest = { - QueueUrl: delayed ? SQS_CONFIG.nodejsWorkerDelayableQueue : SQS_CONFIG.nodejsWorkerQueue, + QueueUrl: SQS_CONFIG.nodejsWorkerQueue, ReceiptHandle: receiptHandle, } return deleteMessage(SQS_CLIENT(), params) } -async function handleDelayedMessages() { - const delayedHandlerLogger = getChildLogger('delayedMessages', serviceLogger, { - queue: SQS_CONFIG.nodejsWorkerDelayableQueue, - }) - delayedHandlerLogger.info('Listing for delayed messages!') - - // noinspection InfiniteLoopJS - while (!exiting) { - const message = await receive(true) - - if (message) { - await tracer.startActiveSpan('ProcessDelayedMessage', async (span) => { - try { - const msg: NodeWorkerMessageBase = JSON.parse(message.Body) - const messageLogger = getChildLogger('messageHandler', serviceLogger, { - messageId: message.MessageId, - type: msg.type, - }) - - if (message.MessageAttributes && message.MessageAttributes.remainingDelaySeconds) { - // re-delay - const newDelay = parseInt( - message.MessageAttributes.remainingDelaySeconds.StringValue, - 10, - ) - const tenantId = message.MessageAttributes.tenantId.StringValue - messageLogger.debug({ newDelay, tenantId }, 'Re-delaying message!') - await sendNodeWorkerMessage(tenantId, msg, newDelay) - } else { - // just emit to the normal queue for processing - const tenantId = message.MessageAttributes.tenantId.StringValue - - if (message.MessageAttributes.targetQueueUrl) { - const targetQueueUrl = message.MessageAttributes.targetQueueUrl.StringValue - messageLogger.debug({ tenantId, targetQueueUrl }, 'Successfully delayed a message!') - await sendMessage(SQS_CLIENT(), { - QueueUrl: targetQueueUrl, - MessageGroupId: tenantId, - MessageDeduplicationId: `${tenantId}-${moment().valueOf()}`, - MessageBody: JSON.stringify(msg), - }) - } else { - messageLogger.debug({ tenantId }, 'Successfully delayed a message!') - await sendNodeWorkerMessage(tenantId, msg) - } - } - - await removeFromQueue(message.ReceiptHandle, true) - span.setStatus({ - code: SpanStatusCode.OK, - }) - } catch (err) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: err, - }) - } finally { - span.end() - } - }) - } else { - delayedHandlerLogger.trace('No message received!') - } - } - - delayedHandlerLogger.warn('Exiting!') -} - let processingMessages = 0 const isWorkerAvailable = (): boolean => processingMessages <= 3 const addWorkerJob = (): void => { @@ -176,18 +99,12 @@ async function handleMessages() { let processFunction: (msg: NodeWorkerMessageBase, logger?: Logger) => Promise switch (msg.type) { - case NodeWorkerMessageType.INTEGRATION_PROCESS: - processFunction = processIntegration - break case NodeWorkerMessageType.NODE_MICROSERVICE: processFunction = processNodeMicroserviceMessage break case NodeWorkerMessageType.DB_OPERATIONS: processFunction = processDbOperationsMessage break - case NodeWorkerMessageType.PROCESS_WEBHOOK: - processFunction = processWebhook - break default: messageLogger.error('Error while parsing queue message! Invalid type.') @@ -262,8 +179,9 @@ const initRedisSeq = async () => { setImmediate(async () => { await initRedisSeq() - const promises = [handleMessages(), handleDelayedMessages()] - await Promise.all(promises) + + await getNodejsWorkerEmitter() + await handleMessages() }) const liveFilePath = path.join(__dirname, 'tmp/nodejs-worker-live.tmp') diff --git a/backend/src/bin/scripts/continue-run.ts b/backend/src/bin/scripts/continue-run.ts deleted file mode 100644 index af5dd5be49..0000000000 --- a/backend/src/bin/scripts/continue-run.ts +++ /dev/null @@ -1,103 +0,0 @@ -import commandLineArgs from 'command-line-args' -import commandLineUsage from 'command-line-usage' -import * as fs from 'fs' -import path from 'path' -import { getServiceLogger } from '@crowd/logging' -import { IntegrationRunState } from '@crowd/types' -import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerIntegrationProcessMessage } from '../../types/mq/nodeWorkerIntegrationProcessMessage' -import IntegrationRunRepository from '../../database/repositories/integrationRunRepository' - -/* eslint-disable no-console */ - -const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') - -const log = getServiceLogger() - -const options = [ - { - name: 'run', - alias: 'r', - typeLabel: '{underline runId}', - type: String, - description: - 'The unique ID of integration run that you would like to continue processing. Use comma delimiter when sending multiple integration runs.', - }, - { - name: 'disableFiringCrowdWebhooks', - alias: 'd', - typeLabel: '{underline disableFiringCrowdWebhooks}', - type: Boolean, - defaultOption: false, - description: 'Should it disable firing outgoing crowd webhooks?', - }, - { - name: 'help', - alias: 'h', - type: Boolean, - description: 'Print this usage guide.', - }, -] -const sections = [ - { - content: banner, - raw: true, - }, - { - header: 'Continue Processing Integration Run', - content: 'Trigger processing of integration run.', - }, - { - header: 'Options', - optionList: options, - }, -] - -const usage = commandLineUsage(sections) -const parameters = commandLineArgs(options) - -if (parameters.help && !parameters.run) { - console.log(usage) -} else { - setImmediate(async () => { - const options = await SequelizeRepository.getDefaultIRepositoryOptions() - - const fireCrowdWebhooks = !parameters.disableFiringCrowdWebhooks - - const runRepo = new IntegrationRunRepository(options) - - const runIds = parameters.run.split(',') - for (const runId of runIds) { - const run = await runRepo.findById(runId) - - if (!run) { - log.error({ runId }, 'Integration run not found!') - process.exit(1) - } else { - await log.info({ runId }, 'Integration run found - triggering SQS message!') - - if (run.state !== IntegrationRunState.PENDING) { - log.warn( - { currentState: run.state }, - `Setting integration state to ${IntegrationRunState.PENDING}!`, - ) - await runRepo.restart(run.id) - } - - if (!fireCrowdWebhooks) { - log.info( - 'fireCrowdWebhooks is false - This continue-run will not trigger outgoing crowd webhooks!', - ) - } - - await sendNodeWorkerMessage( - run.tenantId, - new NodeWorkerIntegrationProcessMessage(run.id, null, fireCrowdWebhooks), - ) - } - } - - process.exit(0) - }) -} diff --git a/backend/src/bin/scripts/enrich-members-and-organizations.ts b/backend/src/bin/scripts/enrich-members-and-organizations.ts index 0943cc3045..ea89bbf40e 100644 --- a/backend/src/bin/scripts/enrich-members-and-organizations.ts +++ b/backend/src/bin/scripts/enrich-members-and-organizations.ts @@ -1,16 +1,14 @@ +import { getServiceLogger } from '@crowd/logging' import commandLineArgs from 'command-line-args' import commandLineUsage from 'command-line-usage' import * as fs from 'fs' import path from 'path' -import { getServiceLogger } from '@crowd/logging' -import SequelizeRepository from '@/database/repositories/sequelizeRepository' +import { IRepositoryOptions } from '@/database/repositories/IRepositoryOptions' import MemberRepository from '@/database/repositories/memberRepository' -import { sendBulkEnrichMessage, sendNodeWorkerMessage } from '@/serverless/utils/nodeWorkerSQS' import OrganizationRepository from '@/database/repositories/organizationRepository' -import { NodeWorkerMessageType } from '@/serverless/types/workerTypes' -import { NodeWorkerMessageBase } from '@/types/mq/nodeWorkerMessageBase' +import SequelizeRepository from '@/database/repositories/sequelizeRepository' import getUserContext from '@/database/utils/getUserContext' -import { IRepositoryOptions } from '@/database/repositories/IRepositoryOptions' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import SegmentService from '@/services/segmentService' /* eslint-disable no-console */ @@ -80,6 +78,7 @@ if (parameters.help || (!parameters.tenant && (!parameters.organization || !para const enrichMembers = parameters.member const enrichOrganizations = parameters.organization const limit = 1000 + const emitter = await getNodejsWorkerEmitter() for (const tenantId of tenantIds) { const options = await SequelizeRepository.getDefaultIRepositoryOptions() @@ -112,7 +111,7 @@ if (parameters.help || (!parameters.tenant && (!parameters.organization || !para if (enrichMembers) { if (parameters.memberIds) { const memberIds = parameters.memberIds.split(',') - await sendBulkEnrichMessage(tenantId, memberIds, segmentIds, false, true) + await emitter.bulkEnrich(tenantId, memberIds, segmentIds, false, true) log.info( { tenantId }, `Enrichment message for ${memberIds.length} sent to nodejs-worker!`, @@ -134,7 +133,7 @@ if (parameters.help || (!parameters.tenant && (!parameters.organization || !para optionsWithTenant, ) - await sendBulkEnrichMessage(tenantId, memberIds, segmentIds, false, true) + await emitter.bulkEnrich(tenantId, memberIds, segmentIds, false, true) offset += limit } while (totalMembers > offset) @@ -150,16 +149,7 @@ if (parameters.help || (!parameters.tenant && (!parameters.organization || !para log.info({ tenantId }, `Total organizations found in the tenant: ${totalOrganizations}`) - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'enrich-organizations', - tenantId, - // Since there is no pagination implemented for the organizations enrichment, - // we set a limit of 10,000 to ensure all organizations are included when enriched in bulk. - maxEnrichLimit: 10000, - } as NodeWorkerMessageBase - - await sendNodeWorkerMessage(tenantId, payload) + await emitter.enrichOrganizations(tenantId, 10000) log.info( { tenantId }, `Organizations enrichment operation finished for tenant ${tenantId}`, diff --git a/backend/src/bin/scripts/generate-merge-suggestions.ts b/backend/src/bin/scripts/generate-merge-suggestions.ts index 911f5fc184..4d96f1a3e1 100644 --- a/backend/src/bin/scripts/generate-merge-suggestions.ts +++ b/backend/src/bin/scripts/generate-merge-suggestions.ts @@ -2,9 +2,7 @@ import commandLineArgs from 'command-line-args' import commandLineUsage from 'command-line-usage' import * as fs from 'fs' import path from 'path' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from '@/types/mq/nodeWorkerMessageBase' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' /* eslint-disable no-console */ @@ -48,13 +46,9 @@ if (parameters.help || !parameters.tenant) { } else { setImmediate(async () => { const tenantIds = parameters.tenant.split(',') - + const emitter = await getNodejsWorkerEmitter() for (const tenantId of tenantIds) { - await sendNodeWorkerMessage(tenantId, { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - tenant: tenantId, - service: 'merge-suggestions', - } as NodeWorkerMessageBase) + await emitter.mergeSuggestions(tenantId) } process.exit(0) diff --git a/backend/src/bin/scripts/process-integration.ts b/backend/src/bin/scripts/process-integration.ts deleted file mode 100644 index 499571d6bf..0000000000 --- a/backend/src/bin/scripts/process-integration.ts +++ /dev/null @@ -1,210 +0,0 @@ -import { processPaginated, singleOrDefault } from '@crowd/common' -import { INTEGRATION_SERVICES } from '@crowd/integrations' -import { getServiceLogger } from '@crowd/logging' -import commandLineArgs from 'command-line-args' -import commandLineUsage from 'command-line-usage' -import * as fs from 'fs' -import path from 'path' -import { IntegrationRunState } from '@crowd/types' -import IntegrationRepository from '../../database/repositories/integrationRepository' -import IntegrationRunRepository from '../../database/repositories/integrationRunRepository' -import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { getIntegrationRunWorkerEmitter } from '../../serverless/utils/serviceSQS' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerIntegrationProcessMessage } from '../../types/mq/nodeWorkerIntegrationProcessMessage' - -/* eslint-disable no-console */ - -const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') - -const log = getServiceLogger() - -const options = [ - { - name: 'integration', - alias: 'i', - typeLabel: '{underline integrationId}', - type: String, - description: - 'The unique ID of integration that you would like to process. Use comma delimiter when sending multiple integrations.', - }, - { - name: 'onboarding', - alias: 'o', - description: 'Process integration as if it was onboarding.', - type: Boolean, - defaultValue: false, - }, - { - name: 'disableFiringCrowdWebhooks', - alias: 'd', - typeLabel: '{underline disableFiringCrowdWebhooks}', - type: Boolean, - defaultOption: false, - description: 'Should it disable firing outgoing crowd webhooks?', - }, - { - name: 'platform', - alias: 'p', - description: 'The platform for which we should run all integrations.', - }, - { - name: 'help', - alias: 'h', - type: Boolean, - description: 'Print this usage guide.', - }, -] -const sections = [ - { - content: banner, - raw: true, - }, - { - header: 'Process Integration', - content: 'Trigger processing of integrations.', - }, - { - header: 'Options', - optionList: options, - }, -] - -const usage = commandLineUsage(sections) -const parameters = commandLineArgs(options) - -const triggerIntegrationRun = async ( - runRepo: IntegrationRunRepository, - tenantId: string, - integrationId: string, - onboarding: boolean, - fireCrowdWebhooks: boolean, -) => { - const existingRun = await runRepo.findLastProcessingRun(integrationId) - - if (existingRun && existingRun.onboarding) { - log.error('Integration is already processing, skipping!') - return - } - - log.info( - { integrationId, onboarding }, - 'Integration found - creating a new run in the old framework!', - ) - const run = await runRepo.create({ - integrationId, - tenantId, - onboarding, - state: IntegrationRunState.PENDING, - }) - - log.info( - { integrationId, onboarding }, - 'Triggering SQS message for the old framework integration!', - ) - await sendNodeWorkerMessage( - tenantId, - new NodeWorkerIntegrationProcessMessage(run.id, null, fireCrowdWebhooks), - ) -} - -const triggerNewIntegrationRun = async ( - tenantId: string, - integrationId: string, - platform: string, - onboarding: boolean, -) => { - log.info( - { integrationId, onboarding }, - 'Triggering SQS message for the new framework integration!', - ) - - const emitter = await getIntegrationRunWorkerEmitter() - await emitter.triggerIntegrationRun(tenantId, platform, integrationId, onboarding) -} - -if (parameters.help || (!parameters.integration && !parameters.platform)) { - console.log(usage) -} else { - setImmediate(async () => { - const onboarding = parameters.onboarding - const options = await SequelizeRepository.getDefaultIRepositoryOptions() - - const fireCrowdWebhooks = !parameters.disableFiringCrowdWebhooks - - const runRepo = new IntegrationRunRepository(options) - - if (parameters.platform) { - let inNewFramework = false - - if (singleOrDefault(INTEGRATION_SERVICES, (s) => s.type === parameters.platform)) { - inNewFramework = true - } - - await processPaginated( - async (page) => IntegrationRepository.findAllActive(parameters.platform, page, 10), - async (integrations) => { - for (const i of integrations) { - const integration = i as any - - if (inNewFramework) { - await triggerNewIntegrationRun( - integration.tenantId, - integration.id, - integration.platform, - onboarding, - ) - } else { - await triggerIntegrationRun( - runRepo, - integration.tenantId, - integration.id, - onboarding, - fireCrowdWebhooks, - ) - } - } - }, - ) - } else { - const integrationIds = parameters.integration.split(',') - for (const integrationId of integrationIds) { - const integration = await options.database.integration.findOne({ - where: { id: integrationId }, - }) - - if (!integration) { - log.error({ integrationId }, 'Integration not found!') - process.exit(1) - } else { - log.info({ integrationId, onboarding }, 'Integration found - triggering SQS message!') - - let inNewFramework = false - - if (singleOrDefault(INTEGRATION_SERVICES, (s) => s.type === integration.platform)) { - inNewFramework = true - } - - if (inNewFramework) { - await triggerNewIntegrationRun( - integration.tenantId, - integration.id, - integration.platform, - onboarding, - ) - } else { - await triggerIntegrationRun( - runRepo, - integration.tenantId, - integration.id, - onboarding, - fireCrowdWebhooks, - ) - } - } - } - } - - process.exit(0) - }) -} diff --git a/backend/src/bin/scripts/process-stream.ts b/backend/src/bin/scripts/process-stream.ts deleted file mode 100644 index a207c14e6f..0000000000 --- a/backend/src/bin/scripts/process-stream.ts +++ /dev/null @@ -1,82 +0,0 @@ -import commandLineArgs from 'command-line-args' -import commandLineUsage from 'command-line-usage' -import * as fs from 'fs' -import path from 'path' -import { getServiceLogger } from '@crowd/logging' -import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { sendNodeWorkerMessage } from '../../serverless/utils/nodeWorkerSQS' -import { NodeWorkerIntegrationProcessMessage } from '../../types/mq/nodeWorkerIntegrationProcessMessage' -import IntegrationRunRepository from '../../database/repositories/integrationRunRepository' -import IntegrationStreamRepository from '../../database/repositories/integrationStreamRepository' - -/* eslint-disable no-console */ - -const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') - -const log = getServiceLogger() - -const options = [ - { - name: 'stream', - alias: 's', - typeLabel: '{underline streamId}', - type: String, - description: - 'The unique ID of integration stream that you would like to process. Use comma delimiter when sending multiple integration streams.', - }, - { - name: 'help', - alias: 'h', - type: Boolean, - description: 'Print this usage guide.', - }, -] -const sections = [ - { - content: banner, - raw: true, - }, - { - header: 'Process integration stream', - content: 'Trigger processing of integration stream.', - }, - { - header: 'Options', - optionList: options, - }, -] - -const usage = commandLineUsage(sections) -const parameters = commandLineArgs(options) - -if (parameters.help && !parameters.stream) { - console.log(usage) -} else { - setImmediate(async () => { - const options = await SequelizeRepository.getDefaultIRepositoryOptions() - - const streamRepo = new IntegrationStreamRepository(options) - const runRepo = new IntegrationRunRepository(options) - - const streamIds = parameters.stream.split(',') - for (const streamId of streamIds) { - const stream = await streamRepo.findById(streamId) - - if (!stream) { - log.error({ streamId }, 'Integration stream not found!') - process.exit(1) - } else { - log.info({ streamId }, 'Integration stream found! Triggering SQS message!') - - const run = await runRepo.findById(stream.runId) - - await sendNodeWorkerMessage( - run.tenantId, - new NodeWorkerIntegrationProcessMessage(run.id, stream.id), - ) - } - } - - process.exit(0) - }) -} diff --git a/backend/src/bin/scripts/process-webhook.ts b/backend/src/bin/scripts/process-webhook.ts deleted file mode 100644 index 8026b15b9a..0000000000 --- a/backend/src/bin/scripts/process-webhook.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { getServiceLogger } from '@crowd/logging' -import { getRedisClient } from '@crowd/redis' -import commandLineArgs from 'command-line-args' -import commandLineUsage from 'command-line-usage' -import * as fs from 'fs' -import path from 'path' -import { QueryTypes } from 'sequelize' -import { IntegrationProcessor } from '@/serverless/integrations/services/integrationProcessor' -import { REDIS_CONFIG } from '../../conf' -import SequelizeRepository from '../../database/repositories/sequelizeRepository' - -/* eslint-disable no-console */ - -const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') - -const log = getServiceLogger() - -const options = [ - { - name: 'webhook', - alias: 'w', - typeLabel: '{underline webhookId}', - type: String, - description: - 'The unique ID of webhook that you would like to process. Use comma delimiter when sending multiple webhooks.', - }, - { - name: 'tenant', - alias: 't', - typeLabel: '{underline tenantId}', - type: String, - description: - 'The unique ID of tenant that you would like to process. Use in combination with type.', - }, - { - name: 'type', - alias: 'p', - typeLabel: '{underline type}', - type: String, - description: 'The webhook type to process. Use in combination with tenant.', - }, - { - name: 'help', - alias: 'h', - type: Boolean, - description: 'Print this usage guide.', - }, -] -const sections = [ - { - content: banner, - raw: true, - }, - { - header: 'Process Webhook', - content: 'Trigger processing of webhooks.', - }, - { - header: 'Options', - optionList: options, - }, -] - -const usage = commandLineUsage(sections) -const parameters = commandLineArgs(options) - -if (parameters.help || (!parameters.webhook && (!parameters.tenant || !parameters.type))) { - console.log(usage) -} else { - setImmediate(async () => { - const options = await SequelizeRepository.getDefaultIRepositoryOptions() - const redisEmitter = await getRedisClient(REDIS_CONFIG) - const integrationProcessorInstance = new IntegrationProcessor(options, redisEmitter) - - if (parameters.webhook) { - const webhookIds = parameters.webhook.split(',') - - for (const webhookId of webhookIds) { - log.info({ webhookId }, 'Webhook found - processing!') - await integrationProcessorInstance.processWebhook(webhookId, true, true) - } - } else if (parameters.tenant && parameters.type) { - const seq = SequelizeRepository.getSequelize(options) - - let ids = ( - await seq.query( - ` - select id from "incomingWebhooks" - where state in ('PENDING', 'ERROR') - and "tenantId" = :tenantId and type = :type - order by id - limit 100 - `, - { - type: QueryTypes.SELECT, - replacements: { - tenantId: parameters.tenant, - type: parameters.type, - }, - }, - ) - ).map((r) => (r as any).id) - - while (ids.length > 0) { - for (const webhookId of ids) { - log.info({ webhookId }, 'Webhook found - processing!') - await integrationProcessorInstance.processWebhook(webhookId, true, true) - } - - ids = ( - await seq.query( - ` - select id from "incomingWebhooks" - where state in ('PENDING', 'ERROR') - and "tenantId" = :tenantId and type = :type - and id > :id - order by id - limit 100 - `, - { - type: QueryTypes.SELECT, - replacements: { - tenantId: parameters.tenant, - type: parameters.type, - id: ids[ids.length - 1], - }, - }, - ) - ).map((r) => (r as any).id) - } - } - - process.exit(0) - }) -} diff --git a/backend/src/bin/worker/integrations.ts b/backend/src/bin/worker/integrations.ts deleted file mode 100644 index 387de96fa8..0000000000 --- a/backend/src/bin/worker/integrations.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { getRedisClient } from '@crowd/redis' -import { Logger } from '@crowd/logging' -import { REDIS_CONFIG } from '../../conf' -import SequelizeRepository from '../../database/repositories/sequelizeRepository' -import { IntegrationProcessor } from '../../serverless/integrations/services/integrationProcessor' -import { IServiceOptions } from '../../services/IServiceOptions' -import { NodeWorkerIntegrationProcessMessage } from '../../types/mq/nodeWorkerIntegrationProcessMessage' -import { NodeWorkerProcessWebhookMessage } from '../../types/mq/nodeWorkerProcessWebhookMessage' - -let integrationProcessorInstance: IntegrationProcessor - -async function getIntegrationProcessor(logger: Logger): Promise { - if (integrationProcessorInstance) return integrationProcessorInstance - - const options: IServiceOptions = { - ...(await SequelizeRepository.getDefaultIRepositoryOptions()), - log: logger, - } - - const redisEmitter = await getRedisClient(REDIS_CONFIG) - - integrationProcessorInstance = new IntegrationProcessor(options, redisEmitter) - - return integrationProcessorInstance -} - -export const processIntegration = async ( - msg: NodeWorkerIntegrationProcessMessage, - messageLogger: Logger, -): Promise => { - const processor = await getIntegrationProcessor(messageLogger) - await processor.process(msg) -} - -export const processWebhook = async ( - msg: NodeWorkerProcessWebhookMessage, - messageLogger: Logger, -): Promise => { - const processor = await getIntegrationProcessor(messageLogger) - await processor.processWebhook(msg.webhookId, msg.force, msg.fireCrowdWebhooks) -} diff --git a/backend/src/conf/configTypes.ts b/backend/src/conf/configTypes.ts index 87e91e4886..830c4d774c 100644 --- a/backend/src/conf/configTypes.ts +++ b/backend/src/conf/configTypes.ts @@ -23,7 +23,6 @@ export interface SQSConfiguration { host?: string port?: number nodejsWorkerQueue: string - nodejsWorkerDelayableQueue: string integrationRunWorkerQueue: string pythonWorkerQueue: string aws: AwsCredentials diff --git a/backend/src/database/initializers/twitterSourceIdsFixedTimestamps.ts b/backend/src/database/initializers/twitterSourceIdsFixedTimestamps.ts deleted file mode 100644 index dfe74294f0..0000000000 --- a/backend/src/database/initializers/twitterSourceIdsFixedTimestamps.ts +++ /dev/null @@ -1,63 +0,0 @@ -/** - * This script is responsible for regenerating - * sourceIds for twitter follow activities that have timestamp > 1970-01-01 - */ - -import dotenv from 'dotenv' -import dotenvExpand from 'dotenv-expand' -import { getServiceLogger } from '@crowd/logging' -import { PlatformType } from '@crowd/types' -import ActivityService from '../../services/activityService' -import IntegrationService from '../../services/integrationService' -import TenantService from '../../services/tenantService' -import getUserContext from '../utils/getUserContext' -import { IntegrationServiceBase } from '../../serverless/integrations/services/integrationServiceBase' - -const path = require('path') - -const env = dotenv.config({ - path: path.resolve(__dirname, `../../../.env.staging`), -}) - -dotenvExpand.expand(env) - -const log = getServiceLogger() - -async function twitterFollowsFixSourceIdsWithTimestamp() { - const tenants = await TenantService._findAndCountAllForEveryUser({}) - - // for each tenant - for (const t of tenants.rows) { - const tenantId = t.id - // get user context - const userContext = await getUserContext(tenantId) - const integrationService = new IntegrationService(userContext) - - const twitterIntegration = ( - await integrationService.findAndCountAll({ filter: { platform: PlatformType.TWITTER } }) - ).rows[0] - - if (twitterIntegration) { - const actService = new ActivityService(userContext) - - // get activities where timestamp != 1970-01-01, we can query by > 2000-01-01 - const activities = await actService.findAndCountAll({ - filter: { type: 'follow', timestampRange: ['2000-01-01'] }, - }) - - for (const activity of activities.rows) { - log.info({ activity }, 'Activity') - // calculate sourceId with fixed timestamps - const sourceIdRegenerated = IntegrationServiceBase.generateSourceIdHash( - activity.communityMember.username.twitter, - 'follow', - '1970-01-01T00:00:00+00:00', - 'twitter', - ) - await actService.update(activity.id, { sourceId: sourceIdRegenerated }) - } - } - } -} - -twitterFollowsFixSourceIdsWithTimestamp() diff --git a/backend/src/database/migrations/U1701080323__tenant-priority-level.sql b/backend/src/database/migrations/U1701080323__tenant-priority-level.sql new file mode 100644 index 0000000000..595398663d --- /dev/null +++ b/backend/src/database/migrations/U1701080323__tenant-priority-level.sql @@ -0,0 +1,2 @@ +alter table tenants + drop column "priorityLevel"; \ No newline at end of file diff --git a/backend/src/database/migrations/V1701080323__tenant-priority-level.sql b/backend/src/database/migrations/V1701080323__tenant-priority-level.sql new file mode 100644 index 0000000000..5b7461bdbe --- /dev/null +++ b/backend/src/database/migrations/V1701080323__tenant-priority-level.sql @@ -0,0 +1,2 @@ +alter table tenants + add column "priorityLevel" varchar(255); \ No newline at end of file diff --git a/backend/src/database/repositories/priorityLevelContextRepository.ts b/backend/src/database/repositories/priorityLevelContextRepository.ts new file mode 100644 index 0000000000..8466726600 --- /dev/null +++ b/backend/src/database/repositories/priorityLevelContextRepository.ts @@ -0,0 +1,30 @@ +import { IQueuePriorityCalculationContext } from '@crowd/types' +import { QueryTypes } from 'sequelize' +import { IRepositoryOptions } from './IRepositoryOptions' +import SequelizeRepository from './sequelizeRepository' + +export class PriorityLevelContextRepository { + public constructor(private readonly options: IRepositoryOptions) {} + + public async loadPriorityLevelContext( + tenantId: string, + ): Promise { + const seq = SequelizeRepository.getSequelize(this.options) + + const results = await seq.query( + `select plan, "priorityLevel" as "dbPriority" from tenants where id = :tenantId`, + { + replacements: { + tenantId, + }, + type: QueryTypes.SELECT, + }, + ) + + if (results.length === 1) { + return results[0] as IQueuePriorityCalculationContext + } + + throw new Error(`Tenant not found: ${tenantId}!`) + } +} diff --git a/backend/src/serverless/integrations/services/integrationProcessor.ts b/backend/src/serverless/integrations/services/integrationProcessor.ts index f74f190ab5..4fafe5b3c5 100644 --- a/backend/src/serverless/integrations/services/integrationProcessor.ts +++ b/backend/src/serverless/integrations/services/integrationProcessor.ts @@ -1,73 +1,20 @@ import { LoggerBase } from '@crowd/logging' -import { ApiPubSubEmitter, RedisClient } from '@crowd/redis' import IntegrationRunRepository from '../../../database/repositories/integrationRunRepository' -import IntegrationStreamRepository from '../../../database/repositories/integrationStreamRepository' import { IServiceOptions } from '../../../services/IServiceOptions' -import { NodeWorkerIntegrationProcessMessage } from '../../../types/mq/nodeWorkerIntegrationProcessMessage' -import { IntegrationRunProcessor } from './integrationRunProcessor' import { IntegrationTickProcessor } from './integrationTickProcessor' -import { WebhookProcessor } from './webhookProcessor' export class IntegrationProcessor extends LoggerBase { private readonly tickProcessor: IntegrationTickProcessor - private readonly webhookProcessor: WebhookProcessor - - private readonly runProcessor: IntegrationRunProcessor | undefined - - constructor(options: IServiceOptions, redisEmitterClient?: RedisClient) { + constructor(options: IServiceOptions) { super(options.log) - const integrationServices = [] - - this.log.debug( - { supportedIntegrations: integrationServices.map((i) => i.type) }, - 'Successfully detected supported integrations!', - ) - - let apiPubSubEmitter: ApiPubSubEmitter | undefined - - if (redisEmitterClient) { - apiPubSubEmitter = new ApiPubSubEmitter(redisEmitterClient, this.log) - } - const integrationRunRepository = new IntegrationRunRepository(options) - const integrationStreamRepository = new IntegrationStreamRepository(options) - this.tickProcessor = new IntegrationTickProcessor( - options, - integrationServices, - integrationRunRepository, - ) - - this.webhookProcessor = new WebhookProcessor(options, integrationServices) - - if (apiPubSubEmitter) { - this.runProcessor = new IntegrationRunProcessor( - options, - integrationServices, - integrationRunRepository, - integrationStreamRepository, - apiPubSubEmitter, - ) - } else { - this.log.warn('No apiPubSubEmitter provided, runProcessor will not be initialized!') - } + this.tickProcessor = new IntegrationTickProcessor(options, integrationRunRepository) } async processTick() { await this.tickProcessor.processTick() } - - async processWebhook(webhookId: string, force?: boolean, fireCrowdWebhooks?: boolean) { - await this.webhookProcessor.processWebhook(webhookId, force, fireCrowdWebhooks) - } - - async process(req: NodeWorkerIntegrationProcessMessage) { - if (this.runProcessor) { - await this.runProcessor.process(req) - } else { - throw new Error('runProcessor is not initialized!') - } - } } diff --git a/backend/src/serverless/integrations/services/integrationRunProcessor.ts b/backend/src/serverless/integrations/services/integrationRunProcessor.ts deleted file mode 100644 index 3575b726e4..0000000000 --- a/backend/src/serverless/integrations/services/integrationRunProcessor.ts +++ /dev/null @@ -1,573 +0,0 @@ -import moment from 'moment' -import { ApiPubSubEmitter } from '@crowd/redis' -import { Logger, getChildLogger, LoggerBase } from '@crowd/logging' -import { i18n, singleOrDefault } from '@crowd/common' -import { IntegrationRunState, PlatformType } from '@crowd/types' -import { sendSlackAlert, SlackAlertTypes } from '@crowd/alerting' -import IntegrationRepository from '../../../database/repositories/integrationRepository' -import IntegrationRunRepository from '../../../database/repositories/integrationRunRepository' -import IntegrationStreamRepository from '../../../database/repositories/integrationStreamRepository' -import MicroserviceRepository from '../../../database/repositories/microserviceRepository' -import getUserContext from '../../../database/utils/getUserContext' -import { twitterFollowers } from '../../../database/utils/keys/microserviceTypes' -import { IServiceOptions } from '../../../services/IServiceOptions' -import { - IIntegrationStream, - IProcessStreamResults, - IStepContext, -} from '../../../types/integration/stepResult' -import { IntegrationRun } from '../../../types/integrationRunTypes' -import { NodeWorkerIntegrationProcessMessage } from '../../../types/mq/nodeWorkerIntegrationProcessMessage' -import { IntegrationServiceBase } from './integrationServiceBase' -import SampleDataService from '../../../services/sampleDataService' -import { - DbIntegrationStreamCreateData, - IntegrationStream, - IntegrationStreamState, -} from '../../../types/integrationStreamTypes' -import bulkOperations from '../../dbOperations/operationsWorker' -import UserRepository from '../../../database/repositories/userRepository' -import EmailSender from '../../../services/emailSender' -import { API_CONFIG, SLACK_ALERTING_CONFIG } from '../../../conf' -import SegmentRepository from '../../../database/repositories/segmentRepository' - -export class IntegrationRunProcessor extends LoggerBase { - constructor( - options: IServiceOptions, - private readonly integrationServices: IntegrationServiceBase[], - private readonly integrationRunRepository: IntegrationRunRepository, - private readonly integrationStreamRepository: IntegrationStreamRepository, - private readonly apiPubSubEmitter?: ApiPubSubEmitter, - ) { - super(options.log) - } - - async process(req: NodeWorkerIntegrationProcessMessage) { - if (!req.runId) { - this.log.warn("No runId provided! Skipping because it's an old message.") - return - } - - this.log.info({ runId: req.runId }, 'Detected integration run!') - - const run = await this.integrationRunRepository.findById(req.runId) - - const userContext = await getUserContext(run.tenantId) - - let integration - - if (run.integrationId) { - integration = await IntegrationRepository.findById(run.integrationId, userContext) - } else if (run.microserviceId) { - const microservice = await MicroserviceRepository.findById(run.microserviceId, userContext) - - switch (microservice.type) { - case twitterFollowers: - integration = await IntegrationRepository.findByPlatform( - PlatformType.TWITTER, - userContext, - ) - break - default: - throw new Error(`Microservice type '${microservice.type}' is not supported!`) - } - } else { - this.log.error({ runId: req.runId }, 'Integration run has no integration or microservice!') - throw new Error(`Integration run '${req.runId}' has no integration or microservice!`) - } - - const segmentRepository = new SegmentRepository(userContext) - userContext.currentSegments = [await segmentRepository.findById(integration.segmentId)] - - const logger = getChildLogger('process', this.log, { - runId: req.runId, - type: integration.platform, - tenantId: integration.tenantId, - integrationId: run.integrationId, - onboarding: run.onboarding, - microserviceId: run.microserviceId, - }) - - logger.info('Processing integration!') - - userContext.log = logger - - // get the relevant integration service that is supposed to be configured already - const intService = singleOrDefault( - this.integrationServices, - (s) => s.type === integration.platform, - ) - if (intService === undefined) { - logger.error('No integration service configured!') - throw new Error(`No integration service configured for type '${integration.platform}'!`) - } - - const stepContext: IStepContext = { - startTimestamp: moment().utc().unix(), - limitCount: integration.limitCount || 0, - onboarding: run.onboarding, - pipelineData: {}, - runId: req.runId, - integration, - serviceContext: userContext, - repoContext: userContext, - logger, - } - - if (!req.streamId) { - const existingRun = await this.integrationRunRepository.findLastProcessingRun( - run.integrationId, - run.microserviceId, - req.runId, - ) - - if (existingRun) { - logger.info('Integration is already being processed!') - await this.integrationRunRepository.markError(req.runId, { - errorPoint: 'check_existing_run', - message: 'Integration is already being processed!', - existingRunId: existingRun.id, - }) - return - } - - if (run.state === IntegrationRunState.PROCESSED) { - logger.warn('Integration is already processed!') - return - } - - if (run.state === IntegrationRunState.PENDING) { - logger.info('Started processing integration!') - } else if (run.state === IntegrationRunState.DELAYED) { - logger.info('Continued processing delayed integration!') - } else if (run.state === IntegrationRunState.ERROR) { - logger.info('Restarted processing errored integration!') - } else if (run.state === IntegrationRunState.PROCESSING) { - throw new Error(`Invalid state '${run.state}' for integration run!`) - } - - await this.integrationRunRepository.markProcessing(req.runId) - run.state = IntegrationRunState.PROCESSING - - if (integration.settings.updateMemberAttributes) { - logger.trace('Updating member attributes!') - - await intService.createMemberAttributes(stepContext) - - integration.settings.updateMemberAttributes = false - await IntegrationRepository.update( - integration.id, - { settings: integration.settings }, - userContext, - ) - } - - // delete sample data on onboarding - if (run.onboarding) { - try { - await new SampleDataService(userContext).deleteSampleData() - } catch (err) { - logger.error(err, { tenantId: integration.tenantId }, 'Error deleting sample data!') - await this.integrationRunRepository.markError(req.runId, { - errorPoint: 'delete_sample_data', - message: err.message, - stack: err.stack, - errorString: JSON.stringify(err), - }) - return - } - } - } - - try { - // check global limit reset - if (intService.limitResetFrequencySeconds > 0 && integration.limitLastResetAt) { - const secondsSinceLastReset = moment() - .utc() - .diff(moment(integration.limitLastResetAt).utc(), 'seconds') - - if (secondsSinceLastReset >= intService.limitResetFrequencySeconds) { - integration.limitCount = 0 - integration.limitLastResetAt = moment().utc().toISOString() - - await IntegrationRepository.update( - integration.id, - { - limitCount: integration.limitCount, - limitLastResetAt: integration.limitLastResetAt, - }, - userContext, - ) - } - } - - // preprocess if needed - logger.trace('Preprocessing integration!') - try { - await intService.preprocess(stepContext) - } catch (err) { - if (err.rateLimitResetSeconds) { - // need to delay integration processing - logger.warn(err, 'Rate limit reached while preprocessing integration! Delaying...') - await this.handleRateLimitError(logger, run, err.rateLimitResetSeconds, stepContext) - return - } - - logger.error(err, 'Error preprocessing integration!') - await this.integrationRunRepository.markError(req.runId, { - errorPoint: 'preprocessing', - message: err.message, - stack: err.stack, - errorString: JSON.stringify(err), - }) - return - } - - // detect streams to process for this integration - - let forcedStream: IntegrationStream | undefined - if (req.streamId) { - forcedStream = await this.integrationStreamRepository.findById(req.streamId) - - if (!forcedStream) { - logger.error({ streamId: req.streamId }, 'Stream not found!') - throw new Error(`Stream '${req.streamId}' not found!`) - } - } else { - const dbStreams = await this.integrationStreamRepository.findByRunId(req.runId, 1, 1) - if (dbStreams.length > 0) { - logger.trace('Streams already detected and saved to the database!') - } else { - // need to optimize this as well since it may happen that we have a lot of streams - logger.trace('Detecting streams!') - try { - const pendingStreams = await intService.getStreams(stepContext) - const createStreams: DbIntegrationStreamCreateData[] = pendingStreams.map((s) => ({ - runId: req.runId, - tenantId: run.tenantId, - integrationId: run.integrationId, - microserviceId: run.microserviceId, - name: s.value, - metadata: s.metadata, - })) - await this.integrationStreamRepository.bulkCreate(createStreams) - await this.integrationRunRepository.touch(run.id) - } catch (err) { - if (err.rateLimitResetSeconds) { - // need to delay integration processing - logger.warn(err, 'Rate limit reached while getting integration streams! Delaying...') - await this.handleRateLimitError(logger, run, err.rateLimitResetSeconds, stepContext) - return - } - - throw err - } - } - } - - // process streams - let processedCount = 0 - let notifyCount = 0 - - let nextStream: IntegrationStream | undefined - if (forcedStream) { - nextStream = forcedStream - } else { - nextStream = await this.integrationStreamRepository.getNextStreamToProcess(req.runId) - } - - while (nextStream) { - if ((req as any).exiting) { - if (!run.onboarding) { - logger.warn('Stopped processing integration (not onboarding)!') - break - } else { - logger.warn('Stopped processing integration (onboarding)!') - const delayUntil = moment() - .add(3 * 60, 'seconds') - .toDate() - await this.integrationRunRepository.delay(req.runId, delayUntil) - break - } - } - - const stream: IIntegrationStream = { - id: nextStream.id, - value: nextStream.name, - metadata: nextStream.metadata, - } - - processedCount++ - notifyCount++ - - let processStreamResult: IProcessStreamResults - - logger.trace({ streamId: stream.id }, 'Processing stream!') - await this.integrationStreamRepository.markProcessing(stream.id) - await this.integrationRunRepository.touch(run.id) - try { - processStreamResult = await intService.processStream(stream, stepContext) - } catch (err) { - if (err.rateLimitResetSeconds) { - logger.warn( - { streamId: stream.id, message: err.message }, - 'Rate limit reached while processing stream! Delaying...', - ) - await this.handleRateLimitError( - logger, - run, - err.rateLimitResetSeconds, - stepContext, - stream, - ) - return - } - - const retries = await this.integrationStreamRepository.markError(stream.id, { - errorPoint: 'process_stream', - message: err.message, - stack: err.stack, - errorString: JSON.stringify(err), - }) - await this.integrationRunRepository.touch(run.id) - - logger.error(err, { retries, streamId: stream.id }, 'Error while processing stream!') - } - - if (processStreamResult) { - // surround with try catch so if one stream fails we try all of them as well just in case - try { - logger.trace({ stream: JSON.stringify(stream) }, `Processing stream results!`) - - if (processStreamResult.newStreams && processStreamResult.newStreams.length > 0) { - const dbCreateStreams: DbIntegrationStreamCreateData[] = - processStreamResult.newStreams.map((s) => ({ - runId: req.runId, - tenantId: run.tenantId, - integrationId: run.integrationId, - microserviceId: run.microserviceId, - name: s.value, - metadata: s.metadata, - })) - - await this.integrationStreamRepository.bulkCreate(dbCreateStreams) - await this.integrationRunRepository.touch(run.id) - - logger.info( - `Detected ${processStreamResult.newStreams.length} new streams to process!`, - ) - } - - for (const operation of processStreamResult.operations) { - if (operation.records.length > 0) { - logger.trace( - { operationType: operation.type }, - `Processing bulk operation with ${operation.records.length} records!`, - ) - stepContext.limitCount += operation.records.length - await bulkOperations( - operation.type, - operation.records, - userContext, - req.fireCrowdWebhooks ?? true, - ) - } - } - - if (processStreamResult.nextPageStream !== undefined) { - if ( - !run.onboarding && - (await intService.isProcessingFinished( - stepContext, - stream, - processStreamResult.operations, - processStreamResult.lastRecordTimestamp, - )) - ) { - logger.warn('Integration processing finished because of service implementation!') - } else { - logger.trace( - { currentStream: JSON.stringify(stream) }, - `Detected next page stream!`, - ) - await this.integrationStreamRepository.create({ - runId: req.runId, - tenantId: run.tenantId, - integrationId: run.integrationId, - microserviceId: run.microserviceId, - name: processStreamResult.nextPageStream.value, - metadata: processStreamResult.nextPageStream.metadata, - }) - await this.integrationRunRepository.touch(run.id) - } - } - - if (processStreamResult.sleep !== undefined && processStreamResult.sleep > 0) { - logger.warn( - `Stream processing resulted in a requested delay of ${processStreamResult.sleep}! Will delay remaining streams!`, - ) - - const delayUntil = moment().add(processStreamResult.sleep, 'seconds').toDate() - await this.integrationRunRepository.delay(req.runId, delayUntil) - break - } - - if (intService.globalLimit > 0 && stepContext.limitCount >= intService.globalLimit) { - // if limit reset frequency is 0 we don't need to care about limits - if (intService.limitResetFrequencySeconds > 0) { - logger.warn( - { - limitCount: stepContext.limitCount, - globalLimit: intService.globalLimit, - }, - 'We reached a global limit - stopping processing!', - ) - - integration.limitCount = stepContext.limitCount - - const secondsSinceLastReset = moment() - .utc() - .diff(moment(integration.limitLastResetAt).utc(), 'seconds') - - if (secondsSinceLastReset < intService.limitResetFrequencySeconds) { - const delayUntil = moment() - .add(intService.limitResetFrequencySeconds - secondsSinceLastReset, 'seconds') - .toDate() - await this.integrationRunRepository.delay(req.runId, delayUntil) - } - - break - } - } - - if (notifyCount === 50) { - logger.info(`Processed ${processedCount} streams!`) - notifyCount = 0 - } - - await this.integrationStreamRepository.markProcessed(stream.id) - await this.integrationRunRepository.touch(run.id) - } catch (err) { - logger.error( - err, - { stream: JSON.stringify(stream) }, - 'Error processing stream results!', - ) - await this.integrationStreamRepository.markError(stream.id, { - errorPoint: 'process_stream_results', - message: err.message, - stack: err.stack, - errorString: JSON.stringify(err), - }) - await this.integrationRunRepository.touch(run.id) - } - } - - if (forcedStream) { - break - } - - nextStream = await this.integrationStreamRepository.getNextStreamToProcess(req.runId) - } - - // postprocess integration settings - await intService.postprocess(stepContext) - - logger.info('Done processing integration!') - } catch (err) { - logger.error(err, 'Error while processing integration!') - } finally { - const newState = await this.integrationRunRepository.touchState(req.runId) - - let emailSentAt - if (newState === IntegrationRunState.PROCESSED) { - if (!integration.emailSentAt) { - const tenantUsers = await UserRepository.findAllUsersOfTenant(integration.tenantId) - emailSentAt = new Date() - for (const user of tenantUsers) { - await new EmailSender(EmailSender.TEMPLATES.INTEGRATION_DONE, { - integrationName: i18n('en', `entities.integration.name.${integration.platform}`), - link: API_CONFIG.frontendUrl, - }).sendTo(user.email) - } - } - } - - let status - switch (newState) { - case IntegrationRunState.PROCESSED: - status = 'done' - break - case IntegrationRunState.ERROR: - status = 'error' - break - default: - status = integration.status - } - - await IntegrationRepository.update( - integration.id, - { - status, - emailSentAt, - settings: stepContext.integration.settings, - refreshToken: stepContext.integration.refreshToken, - token: stepContext.integration.token, - }, - userContext, - ) - - if (newState === IntegrationRunState.PROCESSING && !req.streamId) { - const failedStreams = await this.integrationStreamRepository.findByRunId(req.runId, 1, 1, [ - IntegrationStreamState.ERROR, - ]) - if (failedStreams.length > 0) { - logger.warn('Integration ended but we are still processing - delaying for a minute!') - const delayUntil = moment().add(60, 'seconds') - await this.integrationRunRepository.delay(run.id, delayUntil.toDate()) - } else { - logger.error('Integration ended but we are still processing!') - } - } else if (newState === IntegrationRunState.ERROR) { - await sendSlackAlert({ - slackURL: SLACK_ALERTING_CONFIG.url, - alertType: SlackAlertTypes.INTEGRATION_ERROR, - integration, - userContext, - log: logger, - frameworkVersion: 'old', - }) - } - - if (run.onboarding && this.apiPubSubEmitter) { - this.apiPubSubEmitter.emitIntegrationCompleted(integration.tenantId, integration.id, status) - } - } - } - - private async handleRateLimitError( - logger: Logger, - run: IntegrationRun, - rateLimitResetSeconds: number, - context: IStepContext, - stream?: IIntegrationStream, - ): Promise { - await IntegrationRepository.update( - context.integration.id, - { - settings: context.integration.settings, - refreshToken: context.integration.refreshToken, - token: context.integration.token, - }, - context.repoContext, - ) - - logger.warn('Rate limit reached, delaying integration processing!') - const delayUntil = moment().add(rateLimitResetSeconds + 30, 'seconds') - await this.integrationRunRepository.delay(run.id, delayUntil.toDate()) - - if (stream) { - await this.integrationStreamRepository.reset(stream.id) - } - } -} diff --git a/backend/src/serverless/integrations/services/integrationServiceBase.ts b/backend/src/serverless/integrations/services/integrationServiceBase.ts deleted file mode 100644 index 6e459dbe0a..0000000000 --- a/backend/src/serverless/integrations/services/integrationServiceBase.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { SuperfaceClient } from '@superfaceai/one-sdk' -import moment from 'moment' -import crypto from 'crypto' -import { getServiceChildLogger } from '@crowd/logging' -import { IntegrationRunState, IntegrationType } from '@crowd/types' -import { IRepositoryOptions } from '../../../database/repositories/IRepositoryOptions' -import { - IIntegrationStream, - IPendingStream, - IProcessStreamResults, - IProcessWebhookResults, - IStepContext, - IStreamResultOperation, -} from '../../../types/integration/stepResult' -import { IS_TEST_ENV } from '../../../conf' -import { sendNodeWorkerMessage } from '../../utils/nodeWorkerSQS' -import { NodeWorkerIntegrationProcessMessage } from '../../../types/mq/nodeWorkerIntegrationProcessMessage' -import IntegrationRunRepository from '../../../database/repositories/integrationRunRepository' - -const logger = getServiceChildLogger('integrationService') - -/* eslint class-methods-use-this: 0 */ - -/* eslint-disable @typescript-eslint/no-unused-vars */ - -export abstract class IntegrationServiceBase { - /** - * How many records to process before we stop - */ - public globalLimit: number - - /** - * If onboarding globalLimit will be multiplied by this factor for that run - */ - public onboardingLimitModifierFactor: number - - /** - * How many seconds between global limit reset (0 for auto reset) - */ - public limitResetFrequencySeconds: number - - /** - * Every new integration should extend this class and implement its methods. - * - * @param type What integration is this? - * @param ticksBetweenChecks How many ticks to skip between each integration checks (each tick is 1 minute). If 0 it will be triggered every tick same as if it was 1. If negative it will never be triggered. - */ - protected constructor( - public readonly type: IntegrationType, - public readonly ticksBetweenChecks: number, - ) { - this.globalLimit = 0 - this.onboardingLimitModifierFactor = 1.0 - this.limitResetFrequencySeconds = 0 - } - - async triggerIntegrationCheck(integrations: any[], options: IRepositoryOptions): Promise { - const repository = new IntegrationRunRepository(options) - - for (const integration of integrations) { - const run = await repository.create({ - integrationId: integration.id, - tenantId: integration.tenantId, - onboarding: false, - state: IntegrationRunState.PENDING, - }) - - logger.info( - { integrationId: integration.id, runId: run.id }, - 'Triggering integration processing!', - ) - await sendNodeWorkerMessage( - integration.tenantId, - new NodeWorkerIntegrationProcessMessage(run.id), - ) - } - } - - async preprocess(context: IStepContext): Promise { - // do nothing - override if something is needed - } - - async createMemberAttributes(context: IStepContext): Promise { - // do nothing - override if something is needed - } - - abstract getStreams(context: IStepContext): Promise - - abstract processStream( - stream: IIntegrationStream, - context: IStepContext, - ): Promise - - async isProcessingFinished( - context: IStepContext, - currentStream: IIntegrationStream, - lastOperations: IStreamResultOperation[], - lastRecord?: any, - lastRecordTimestamp?: number, - ): Promise { - return false - } - - async postprocess(context: IStepContext): Promise { - // do nothing - override if something is needed - } - - async processWebhook(webhook: any, context: IStepContext): Promise { - throw new Error('Not implemented') - } - - static superfaceClient(): SuperfaceClient { - if (IS_TEST_ENV) { - return undefined - } - - return new SuperfaceClient() - } - - /** - * Check whether the last record is over the retrospect that we are interested in - * @param lastRecordTimestamp The last activity timestamp we got - * @param startTimestamp The timestamp when we started - * @param maxRetrospect The maximum time we want to crawl - * @returns Whether we are over the retrospect already - */ - static isRetrospectOver( - lastRecordTimestamp: number, - startTimestamp: number, - maxRetrospect: number, - ): boolean { - return startTimestamp - moment(lastRecordTimestamp).unix() > maxRetrospect - } - - /** - * Some activities will not have a remote(API) counterparts so they will miss sourceIds. - * Since we're using sourceIds to find out if an activity already exists in our DB, - * sourceIds are required when creating an activity. - * This function generates an md5 hash that can be used as a sourceId of an activity. - * Prepends string `gen-` to the beginning so generated and remote sourceIds - * can be distinguished. - * - * @param {string} uniqueRemoteId remote member id from an integration. This id needs to be unique in a platform - * @param {string} type type of the activity - * @param {string} timestamp unix timestamp of the activity - * @param {string} platform platform of the activity - * @returns 32 bit md5 hash generated from the given data, prepended with string `gen-` - */ - static generateSourceIdHash( - uniqueRemoteId: string, - type: string, - timestamp: string, - platform: string, - ) { - if (!uniqueRemoteId || !type || !timestamp || !platform) { - throw new Error('Bad hash input') - } - - const data = `${uniqueRemoteId}-${type}-${timestamp}-${platform}` - return `gen-${crypto.createHash('md5').update(data).digest('hex')}` - } - - /** - * Get the number of seconds from a date to a unix timestamp. - * Adding a 25% padding for security. - * If the unix timestamp is before the date, return 3 minutes for security - * @param date The date to get the seconds from - * @param unixTimestamp The unix timestamp to get the seconds from - * @returns The number of seconds from the date to the unix timestamp - */ - static secondsUntilTimestamp( - unixTimestamp: number, - date: Date = moment().utc().toDate(), - ): number { - const timestampedDate: number = moment.utc(date).unix() - if (timestampedDate > unixTimestamp) { - return 60 * 3 - } - return Math.floor(unixTimestamp - timestampedDate) - } -} diff --git a/backend/src/serverless/integrations/services/integrationTickProcessor.ts b/backend/src/serverless/integrations/services/integrationTickProcessor.ts index 183e59d508..e418573eb7 100644 --- a/backend/src/serverless/integrations/services/integrationTickProcessor.ts +++ b/backend/src/serverless/integrations/services/integrationTickProcessor.ts @@ -1,26 +1,20 @@ import { processPaginated, singleOrDefault } from '@crowd/common' -import { INTEGRATION_SERVICES } from '@crowd/integrations' -import { LoggerBase, getChildLogger } from '@crowd/logging' import { + DataSinkWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - DataSinkWorkerEmitter, -} from '@crowd/sqs' -import { IntegrationRunState, IntegrationType } from '@crowd/types' -import SequelizeRepository from '@/database/repositories/sequelizeRepository' -import MicroserviceRepository from '@/database/repositories/microserviceRepository' +} from '@crowd/common_services' +import { INTEGRATION_SERVICES } from '@crowd/integrations' +import { LoggerBase, getChildLogger } from '@crowd/logging' +import { IntegrationType } from '@crowd/types' import IntegrationRepository from '@/database/repositories/integrationRepository' -import { IRepositoryOptions } from '@/database/repositories/IRepositoryOptions' import IntegrationRunRepository from '../../../database/repositories/integrationRunRepository' import { IServiceOptions } from '../../../services/IServiceOptions' -import { NodeWorkerIntegrationProcessMessage } from '../../../types/mq/nodeWorkerIntegrationProcessMessage' -import { sendNodeWorkerMessage } from '../../utils/nodeWorkerSQS' import { + getDataSinkWorkerEmitter, getIntegrationRunWorkerEmitter, getIntegrationStreamWorkerEmitter, - getDataSinkWorkerEmitter, } from '../../utils/serviceSQS' -import { IntegrationServiceBase } from './integrationServiceBase' export class IntegrationTickProcessor extends LoggerBase { private tickTrackingMap: Map = new Map() @@ -35,15 +29,10 @@ export class IntegrationTickProcessor extends LoggerBase { constructor( options: IServiceOptions, - private readonly integrationServices: IntegrationServiceBase[], private readonly integrationRunRepository: IntegrationRunRepository, ) { super(options.log) - for (const intService of this.integrationServices) { - this.tickTrackingMap[intService.type] = 0 - } - for (const intService of INTEGRATION_SERVICES) { this.tickTrackingMap[intService.type] = 0 } @@ -67,18 +56,11 @@ export class IntegrationTickProcessor extends LoggerBase { private async processCheckTick() { this.log.trace('Processing integration processor tick!') - const tickers: IIntTicker[] = this.integrationServices.map((i) => ({ + const tickers: IIntTicker[] = INTEGRATION_SERVICES.map((i) => ({ type: i.type, - ticksBetweenChecks: i.ticksBetweenChecks, + ticksBetweenChecks: i.checkEvery || -1, })) - for (const service of INTEGRATION_SERVICES) { - tickers.push({ - type: service.type, - ticksBetweenChecks: service.checkEvery || -1, - }) - } - const promises: Promise[] = [] for (const intService of tickers) { @@ -121,134 +103,62 @@ export class IntegrationTickProcessor extends LoggerBase { const logger = getChildLogger('processCheck', this.log, { IntegrationType: type }) logger.trace('Processing integration check!') - if (type === IntegrationType.TWITTER_REACH) { - await processPaginated( - async (page) => MicroserviceRepository.findAllByType('twitter_followers', page, 10), - async (microservices) => { - this.log.debug({ type, count: microservices.length }, 'Found microservices to check!') - for (const micro of microservices) { - const existingRun = await this.integrationRunRepository.findLastProcessingRun( - undefined, - micro.id, - ) - if (!existingRun) { - const microservice = micro as any + const newIntService = singleOrDefault(INTEGRATION_SERVICES, (i) => i.type === type) - const run = await this.integrationRunRepository.create({ - microserviceId: microservice.id, - tenantId: microservice.tenantId, - onboarding: false, - state: IntegrationRunState.PENDING, - }) - - this.log.debug({ type, runId: run.id }, 'Triggering microservice processing!') - - await sendNodeWorkerMessage( - microservice.tenantId, - new NodeWorkerIntegrationProcessMessage(run.id), - ) - } - } - }, - ) - } else { - const options = - (await SequelizeRepository.getDefaultIRepositoryOptions()) as IRepositoryOptions - - // get the relevant integration service that is supposed to be configured already - const intService = singleOrDefault(this.integrationServices, (s) => s.type === type) + if (!newIntService) { + throw new Error(`No integration service found for type ${type}!`) + } - if (intService) { - await processPaginated( - async (page) => IntegrationRepository.findAllActive(type, page, 10), - async (integrations) => { - logger.debug( - { integrationIds: integrations.map((i) => i.id) }, - 'Found old integrations to check!', - ) - const inactiveIntegrations: any[] = [] - for (const integration of integrations as any[]) { - const existingRun = await this.integrationRunRepository.findLastProcessingRun( - integration.id, - ) - if (!existingRun) { - inactiveIntegrations.push(integration) - } - } + const emitter = await getIntegrationRunWorkerEmitter() - if (inactiveIntegrations.length > 0) { - logger.info( - { integrationIds: inactiveIntegrations.map((i) => i.id) }, - 'Triggering old integration checks!', - ) - await intService.triggerIntegrationCheck(inactiveIntegrations, options) - } - }, + await processPaginated( + async (page) => IntegrationRepository.findAllActive(type, page, 10), + async (integrations) => { + logger.debug( + { integrationIds: integrations.map((i) => i.id) }, + 'Found new integrations to check!', ) - } else { - const newIntService = singleOrDefault(INTEGRATION_SERVICES, (i) => i.type === type) - - if (!newIntService) { - throw new Error(`No integration service found for type ${type}!`) - } - - const emitter = await getIntegrationRunWorkerEmitter() - - await processPaginated( - async (page) => IntegrationRepository.findAllActive(type, page, 10), - async (integrations) => { - logger.debug( - { integrationIds: integrations.map((i) => i.id) }, - 'Found new integrations to check!', - ) - for (const integration of integrations as any[]) { - const existingRun = - await this.integrationRunRepository.findLastProcessingRunInNewFramework( + for (const integration of integrations as any[]) { + const existingRun = + await this.integrationRunRepository.findLastProcessingRunInNewFramework(integration.id) + if (!existingRun) { + const CHUNKS = 3 // Define the number of chunks + const DELAY_BETWEEN_CHUNKS = 30 * 60 * 1000 // Define the delay between chunks in milliseconds + const rand = Math.random() * CHUNKS + const chunkIndex = Math.min(Math.floor(rand), CHUNKS - 1) + const delay = chunkIndex * DELAY_BETWEEN_CHUNKS + + // Divide integrations into chunks for Discord + if (newIntService.type === IntegrationType.DISCORD) { + setTimeout(async () => { + logger.info( + { integrationId: integration.id }, + `Triggering new delayed integration check for Discord in ${ + delay / 60 / 1000 + } minutes!`, + ) + await emitter.triggerIntegrationRun( + integration.tenantId, + integration.platform, integration.id, + false, ) - if (!existingRun) { - const CHUNKS = 3 // Define the number of chunks - const DELAY_BETWEEN_CHUNKS = 30 * 60 * 1000 // Define the delay between chunks in milliseconds - const rand = Math.random() * CHUNKS - const chunkIndex = Math.min(Math.floor(rand), CHUNKS - 1) - const delay = chunkIndex * DELAY_BETWEEN_CHUNKS - - // Divide integrations into chunks for Discord - if (newIntService.type === IntegrationType.DISCORD) { - setTimeout(async () => { - logger.info( - { integrationId: integration.id }, - `Triggering new delayed integration check for Discord in ${ - delay / 60 / 1000 - } minutes!`, - ) - await emitter.triggerIntegrationRun( - integration.tenantId, - integration.platform, - integration.id, - false, - ) - }, delay) - } else { - logger.info( - { integrationId: integration.id }, - 'Triggering new integration check!', - ) - await emitter.triggerIntegrationRun( - integration.tenantId, - integration.platform, - integration.id, - false, - ) - } - } else { - logger.info({ integrationId: integration.id }, 'Existing run found, skipping!') - } + }, delay) + } else { + logger.info({ integrationId: integration.id }, 'Triggering new integration check!') + await emitter.triggerIntegrationRun( + integration.tenantId, + integration.platform, + integration.id, + false, + ) } - }, - ) - } - } + } else { + logger.info({ integrationId: integration.id }, 'Existing run found, skipping!') + } + } + }, + ) } private async processDelayedTick() { @@ -256,23 +166,6 @@ export class IntegrationTickProcessor extends LoggerBase { await this.intRunWorkerEmitter.checkRuns() await this.intStreamWorkerEmitter.checkStreams() await this.dataSinkWorkerEmitter.checkResults() - - // TODO check streams as well - this.log.trace('Checking for delayed integration runs!') - - await processPaginated( - async (page) => this.integrationRunRepository.findDelayedRuns(page, 10), - async (delayedRuns) => { - for (const run of delayedRuns) { - this.log.info({ runId: run.id }, 'Triggering delayed integration run processing!') - - await sendNodeWorkerMessage( - new Date().toISOString(), - new NodeWorkerIntegrationProcessMessage(run.id), - ) - } - }, - ) } } diff --git a/backend/src/serverless/integrations/services/webhookProcessor.ts b/backend/src/serverless/integrations/services/webhookProcessor.ts deleted file mode 100644 index af4ad4d1d1..0000000000 --- a/backend/src/serverless/integrations/services/webhookProcessor.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { LoggerBase, getChildLogger } from '@crowd/logging' -import moment from 'moment' -import { singleOrDefault } from '@crowd/common' -import { IRepositoryOptions } from '../../../database/repositories/IRepositoryOptions' -import IncomingWebhookRepository from '../../../database/repositories/incomingWebhookRepository' -import IntegrationRepository from '../../../database/repositories/integrationRepository' -import SequelizeRepository from '../../../database/repositories/sequelizeRepository' -import getUserContext from '../../../database/utils/getUserContext' -import { IServiceOptions } from '../../../services/IServiceOptions' -import { IStepContext } from '../../../types/integration/stepResult' -import { NodeWorkerProcessWebhookMessage } from '../../../types/mq/nodeWorkerProcessWebhookMessage' -import { WebhookState } from '../../../types/webhooks' -import bulkOperations from '../../dbOperations/operationsWorker' -import { sendNodeWorkerMessage } from '../../utils/nodeWorkerSQS' -import { IntegrationServiceBase } from './integrationServiceBase' -import SegmentRepository from '../../../database/repositories/segmentRepository' - -export class WebhookProcessor extends LoggerBase { - constructor( - options: IServiceOptions, - private readonly integrationServices: IntegrationServiceBase[], - ) { - super(options.log) - } - - static readonly MAX_RETRY_LIMIT = 5 - - async processWebhook(webhookId: string, force?: boolean, fireCrowdWebhooks?: boolean) { - const options = (await SequelizeRepository.getDefaultIRepositoryOptions()) as IRepositoryOptions - const repo = new IncomingWebhookRepository(options) - const webhook = await repo.findById(webhookId) - let logger = getChildLogger('processWebhook', this.log, { webhookId }) - - if (webhook === null || webhook === undefined) { - logger.error('Webhook not found!') - return - } - - logger.debug('Processing webhook!') - - logger = getChildLogger('processWebhook', this.log, { - type: webhook.type, - tenantId: webhook.tenantId, - integrationId: webhook.integrationId, - }) - - logger.debug('Webhook found!') - - if (!(force === true) && webhook.state !== WebhookState.PENDING) { - logger.error({ state: webhook.state }, 'Webhook is not in pending state!') - return - } - - const userContext = await getUserContext(webhook.tenantId) - userContext.log = logger - - const integration = await IntegrationRepository.findById(webhook.integrationId, userContext) - if (integration.platform === 'github' || integration.platform === 'discord') { - return - } - const segment = await new SegmentRepository(userContext).findById(integration.segmentId) - userContext.currentSegments = [segment] - - const intService = singleOrDefault( - this.integrationServices, - (s) => s.type === integration.platform, - ) - if (intService === undefined) { - logger.error('No integration service configured!') - throw new Error(`No integration service configured for type '${integration.platform}'!`) - } - - const stepContext: IStepContext = { - startTimestamp: moment().utc().unix(), - limitCount: integration.limitCount || 0, - onboarding: false, - pipelineData: {}, - webhook, - integration, - serviceContext: userContext, - repoContext: userContext, - logger, - } - - if (integration.settings.updateMemberAttributes) { - logger.trace('Updating member attributes!') - - await intService.createMemberAttributes(stepContext) - - integration.settings.updateMemberAttributes = false - await IntegrationRepository.update( - integration.id, - { settings: integration.settings }, - userContext, - ) - } - - const whContext = { ...userContext } - whContext.transaction = await SequelizeRepository.createTransaction(whContext) - - try { - const result = await intService.processWebhook(webhook, stepContext) - for (const operation of result.operations) { - if (operation.records.length > 0) { - logger.trace( - { operationType: operation.type }, - `Processing bulk operation with ${operation.records.length} records!`, - ) - await bulkOperations(operation.type, operation.records, userContext, fireCrowdWebhooks) - } - } - await repo.markCompleted(webhook.id) - logger.debug('Webhook processed!') - } catch (err) { - if (err.rateLimitResetSeconds) { - logger.warn(err, 'Rate limit reached while processing webhook! Delaying...') - await sendNodeWorkerMessage( - integration.tenantId, - new NodeWorkerProcessWebhookMessage(integration.tenantId, webhookId), - err.rateLimitResetSeconds + 5, - ) - } else { - logger.error(err, 'Error processing webhook!') - await repo.markError(webhook.id, err) - } - } finally { - await SequelizeRepository.commitTransaction(whContext.transaction) - } - } -} diff --git a/backend/src/serverless/integrations/workers/sendgridWebhookWorker.ts b/backend/src/serverless/integrations/workers/sendgridWebhookWorker.ts index f66833339c..645407dd04 100644 --- a/backend/src/serverless/integrations/workers/sendgridWebhookWorker.ts +++ b/backend/src/serverless/integrations/workers/sendgridWebhookWorker.ts @@ -1,15 +1,13 @@ import { getServiceChildLogger } from '@crowd/logging' -import { EventWebhook, EventWebhookHeader } from '@sendgrid/eventwebhook' import { PlatformType } from '@crowd/types' +import { EventWebhook, EventWebhookHeader } from '@sendgrid/eventwebhook' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import { IS_PROD_ENV, SENDGRID_CONFIG } from '../../../conf' import SequelizeRepository from '../../../database/repositories/sequelizeRepository' import UserRepository from '../../../database/repositories/userRepository' import getUserContext from '../../../database/utils/getUserContext' import EagleEyeContentService from '../../../services/eagleEyeContentService' -import { NodeWorkerMessageBase } from '../../../types/mq/nodeWorkerMessageBase' import { SendgridWebhookEvent, SendgridWebhookEventType } from '../../../types/webhooks' -import { NodeWorkerMessageType } from '../../types/workerTypes' -import { sendNodeWorkerMessage } from '../../utils/nodeWorkerSQS' const log = getServiceChildLogger('sendgridWebhookWorker') @@ -46,13 +44,10 @@ export default async function sendgridWebhookWorker(req) { } } + const emitter = await getNodejsWorkerEmitter() for (const event of events) { if (event.sg_template_id === SENDGRID_CONFIG.templateEagleEyeDigest) { - await sendNodeWorkerMessage(event.sg_event_id, { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - event, - service: 'sendgrid-webhooks', - } as NodeWorkerMessageBase) + await emitter.sendgridWebhook(event) } } diff --git a/backend/src/serverless/integrations/workers/stripeWebhookWorker.ts b/backend/src/serverless/integrations/workers/stripeWebhookWorker.ts index 813d1c2f3a..b100860c50 100644 --- a/backend/src/serverless/integrations/workers/stripeWebhookWorker.ts +++ b/backend/src/serverless/integrations/workers/stripeWebhookWorker.ts @@ -1,15 +1,13 @@ +import { timeout } from '@crowd/common' import { getServiceChildLogger } from '@crowd/logging' import { getRedisClient, RedisPubSubEmitter } from '@crowd/redis' +import { ApiWebsocketMessage } from '@crowd/types' import moment from 'moment' import { Stripe } from 'stripe' -import { timeout } from '@crowd/common' -import { ApiWebsocketMessage } from '@crowd/types' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import { PLANS_CONFIG, REDIS_CONFIG } from '../../../conf' import SequelizeRepository from '../../../database/repositories/sequelizeRepository' import Plans from '../../../security/plans' -import { NodeWorkerMessageBase } from '../../../types/mq/nodeWorkerMessageBase' -import { NodeWorkerMessageType } from '../../types/workerTypes' -import { sendNodeWorkerMessage } from '../../utils/nodeWorkerSQS' const log = getServiceChildLogger('stripeWebhookWorker') @@ -24,11 +22,8 @@ export default async function stripeWebhookWorker(req) { try { event = stripe.webhooks.constructEvent(req.rawBody, sig, PLANS_CONFIG.stripWebhookSigningSecret) - await sendNodeWorkerMessage(event.id, { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - event, - service: 'stripe-webhooks', - } as NodeWorkerMessageBase) + const emitter = await getNodejsWorkerEmitter() + await emitter.stripeWebhook(event) } catch (err) { log.error(`Webhook Error: ${err.message}`) return { diff --git a/backend/src/serverless/microservices/nodejs/csv-export/csvExportWorker.ts b/backend/src/serverless/microservices/nodejs/csv-export/csvExportWorker.ts index 0d1e77eee2..768ea45f1d 100644 --- a/backend/src/serverless/microservices/nodejs/csv-export/csvExportWorker.ts +++ b/backend/src/serverless/microservices/nodejs/csv-export/csvExportWorker.ts @@ -6,10 +6,11 @@ import { Hash } from '@aws-sdk/hash-node' import { parseUrl } from '@aws-sdk/url-parser' import { formatUrl } from '@aws-sdk/util-format-url' import { getServiceChildLogger } from '@crowd/logging' +import { ExportableEntity } from '@crowd/types' import getUserContext from '../../../../database/utils/getUserContext' import EmailSender from '../../../../services/emailSender' import { S3_CONFIG } from '../../../../conf' -import { BaseOutput, ExportableEntity } from '../messageTypes' +import { BaseOutput } from '../messageTypes' import getStage from '../../../../services/helpers/getStage' import { s3 } from '../../../../services/aws' import MemberService from '../../../../services/memberService' diff --git a/backend/src/serverless/microservices/nodejs/messageTypes.ts b/backend/src/serverless/microservices/nodejs/messageTypes.ts index 30b9aa44dc..acf34ad08b 100644 --- a/backend/src/serverless/microservices/nodejs/messageTypes.ts +++ b/backend/src/serverless/microservices/nodejs/messageTypes.ts @@ -1,4 +1,4 @@ -import { AutomationTrigger, AutomationType } from '@crowd/types' +import { AutomationTrigger, AutomationType, ExportableEntity } from '@crowd/types' export type BaseNodeMicroserviceMessage = { service: string @@ -68,10 +68,6 @@ export interface AnalyticsEmailsOutput extends BaseOutput { emailSent: boolean } -export enum ExportableEntity { - MEMBERS = 'members', -} - export type BulkEnrichMessage = { service: string tenant: string diff --git a/backend/src/serverless/types/workerTypes.ts b/backend/src/serverless/types/workerTypes.ts index 9ab75a0917..91c9bc3dbd 100644 --- a/backend/src/serverless/types/workerTypes.ts +++ b/backend/src/serverless/types/workerTypes.ts @@ -1,9 +1,7 @@ export enum NodeWorkerMessageType { INTEGRATION_CHECK = 'integration_check', - INTEGRATION_PROCESS = 'integration_process', NODE_MICROSERVICE = 'node_microservice', DB_OPERATIONS = 'db_operations', - PROCESS_WEBHOOK = 'process_webhook', } export enum PythonWorkerMessageType { diff --git a/backend/src/serverless/utils/nodeWorkerSQS.ts b/backend/src/serverless/utils/nodeWorkerSQS.ts deleted file mode 100644 index 0bbe22ddd7..0000000000 --- a/backend/src/serverless/utils/nodeWorkerSQS.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { getServiceChildLogger } from '@crowd/logging' -import { SqsMessageAttributes, sendMessage } from '@crowd/sqs' -import { AutomationTrigger } from '@crowd/types' -import moment from 'moment' -import { IS_TEST_ENV, SQS_CONFIG } from '../../conf' -import { NodeWorkerMessageBase } from '../../types/mq/nodeWorkerMessageBase' -import { ExportableEntity } from '../microservices/nodejs/messageTypes' -import { NodeWorkerMessageType } from '../types/workerTypes' -import { SQS_CLIENT } from './serviceSQS' - -const log = getServiceChildLogger('nodeWorkerSQS') - -// 15 minute limit for delaying is max for SQS -const limitSeconds = 15 * 60 - -export const sendNodeWorkerMessage = async ( - tenantId: string, - body: NodeWorkerMessageBase, - delaySeconds?: number, - targetQueueUrl?: string, -): Promise => { - if (IS_TEST_ENV) { - return - } - - // we can only delay for 15 minutes then we have to re-delay message - let attributes: SqsMessageAttributes - let delay: number - let delayed = false - if (delaySeconds) { - if (delaySeconds > limitSeconds) { - // delay for 15 minutes and add the remaineder to the attributes - const remainedSeconds = delaySeconds - limitSeconds - attributes = { - tenantId: { - DataType: 'String', - StringValue: tenantId, - }, - remainingDelaySeconds: { - DataType: 'Number', - StringValue: `${remainedSeconds}`, - }, - } - - if (targetQueueUrl) { - attributes.targetQueueUrl = { DataType: 'String', StringValue: targetQueueUrl } - } - delay = limitSeconds - } else { - attributes = { - tenantId: { - DataType: 'String', - StringValue: tenantId, - }, - } - if (targetQueueUrl) { - attributes.targetQueueUrl = { DataType: 'String', StringValue: targetQueueUrl } - } - delay = delaySeconds - } - - delayed = true - } - - const now = moment().valueOf() - - const params = { - QueueUrl: delayed ? SQS_CONFIG.nodejsWorkerDelayableQueue : SQS_CONFIG.nodejsWorkerQueue, - MessageGroupId: delayed ? undefined : `${now}`, - MessageDeduplicationId: delayed ? undefined : `${tenantId}-${now}`, - MessageBody: JSON.stringify(body), - MessageAttributes: attributes, - DelaySeconds: delay, - } - - log.debug( - { - messageType: body.type, - body, - }, - 'Sending nodejs-worker sqs message!', - ) - await sendMessage(SQS_CLIENT(), params) -} - -export const sendNewActivityNodeSQSMessage = async ( - tenant: string, - activityId: string, - segmentId: string, -): Promise => { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - tenant, - activityId, - segmentId, - trigger: AutomationTrigger.NEW_ACTIVITY, - service: 'automation', - } - await sendNodeWorkerMessage(tenant, payload as NodeWorkerMessageBase) -} - -export const sendNewMemberNodeSQSMessage = async ( - tenant: string, - memberId: string, - segmentId: string, -): Promise => { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - tenant, - memberId, - segmentId, - trigger: AutomationTrigger.NEW_MEMBER, - service: 'automation', - } - await sendNodeWorkerMessage(tenant, payload as NodeWorkerMessageBase) -} - -export const sendExportCSVNodeSQSMessage = async ( - tenant: string, - user: string, - entity: ExportableEntity, - segmentIds: string[], - criteria: any, -): Promise => { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'csv-export', - user, - tenant, - entity, - criteria, - segmentIds, - } - await sendNodeWorkerMessage(tenant, payload as NodeWorkerMessageBase) -} - -export const sendBulkEnrichMessage = async ( - tenant: string, - memberIds: string[], - segmentIds: string[], - notifyFrontend: boolean = true, - skipCredits: boolean = false, -): Promise => { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'bulk-enrich', - memberIds, - tenant, - segmentIds, - notifyFrontend, - skipCredits, - } - await sendNodeWorkerMessage(tenant, payload as NodeWorkerMessageBase) -} - -export const sendOrgMergeMessage = async ( - tenantId: string, - primaryOrgId: string, - secondaryOrgId: string, - notifyFrontend: boolean = true, -): Promise => { - const payload = { - type: NodeWorkerMessageType.NODE_MICROSERVICE, - service: 'org-merge', - tenantId, - primaryOrgId, - secondaryOrgId, - notifyFrontend, - } - await sendNodeWorkerMessage(tenantId, payload as NodeWorkerMessageBase) -} diff --git a/backend/src/serverless/utils/serviceSQS.ts b/backend/src/serverless/utils/serviceSQS.ts index 2f25f478b2..cfdf20d01c 100644 --- a/backend/src/serverless/utils/serviceSQS.ts +++ b/backend/src/serverless/utils/serviceSQS.ts @@ -1,15 +1,20 @@ +import { SqsClient, getSqsClient } from '@crowd/sqs' +import { getServiceChildLogger } from '@crowd/logging' +import { getServiceTracer } from '@crowd/tracing' import { IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - IntegrationSyncWorkerEmitter, SearchSyncWorkerEmitter, DataSinkWorkerEmitter, - SqsClient, - getSqsClient, -} from '@crowd/sqs' -import { getServiceChildLogger } from '@crowd/logging' -import { getServiceTracer } from '@crowd/tracing' -import { SQS_CONFIG } from '../../conf' + IntegrationSyncWorkerEmitter, + QueuePriorityContextLoader, + NodejsWorkerEmitter, +} from '@crowd/common_services' +import { UnleashClient, getUnleashClient } from '@crowd/feature-flags' +import { RedisClient, getRedisClient } from '@crowd/redis' +import { REDIS_CONFIG, SERVICE, SQS_CONFIG, UNLEASH_CONFIG } from '../../conf' +import SequelizeRepository from '@/database/repositories/sequelizeRepository' +import { PriorityLevelContextRepository } from '@/database/repositories/priorityLevelContextRepository' const tracer = getServiceTracer() const log = getServiceChildLogger('service.sqs') @@ -29,11 +34,58 @@ export const SQS_CLIENT = (): SqsClient => { return sqsClient } +let unleashClient: UnleashClient | undefined +let unleashClientInitialized = false +const UNLEASH_CLIENT = async (): Promise => { + if (unleashClientInitialized) { + return unleashClient + } + + unleashClient = await getUnleashClient({ + url: UNLEASH_CONFIG.url, + apiKey: UNLEASH_CONFIG.backendApiKey, + appName: SERVICE, + }) + unleashClientInitialized = true + return unleashClient +} + +let redisClient: RedisClient +const REDIS_CLIENT = async (): Promise => { + if (redisClient) { + return redisClient + } + + redisClient = await getRedisClient(REDIS_CONFIG, true) + + return redisClient +} + +let loader: QueuePriorityContextLoader +export const QUEUE_PRIORITY_LOADER = async (): Promise => { + if (loader) { + return loader + } + + const options = await SequelizeRepository.getDefaultIRepositoryOptions() + const repo = new PriorityLevelContextRepository(options) + + loader = (tenantId: string) => repo.loadPriorityLevelContext(tenantId) + return loader +} + let runWorkerEmitter: IntegrationRunWorkerEmitter export const getIntegrationRunWorkerEmitter = async (): Promise => { if (runWorkerEmitter) return runWorkerEmitter - runWorkerEmitter = new IntegrationRunWorkerEmitter(SQS_CLIENT(), tracer, log) + runWorkerEmitter = new IntegrationRunWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) await runWorkerEmitter.init() return runWorkerEmitter } @@ -43,7 +95,14 @@ export const getIntegrationStreamWorkerEmitter = async (): Promise => { if (streamWorkerEmitter) return streamWorkerEmitter - streamWorkerEmitter = new IntegrationStreamWorkerEmitter(SQS_CLIENT(), tracer, log) + streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) await streamWorkerEmitter.init() return streamWorkerEmitter } @@ -52,7 +111,14 @@ let searchSyncWorkerEmitter: SearchSyncWorkerEmitter export const getSearchSyncWorkerEmitter = async (): Promise => { if (searchSyncWorkerEmitter) return searchSyncWorkerEmitter - searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(SQS_CLIENT(), tracer, log) + searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) await searchSyncWorkerEmitter.init() return searchSyncWorkerEmitter } @@ -61,7 +127,14 @@ let integrationSyncWorkerEmitter: IntegrationSyncWorkerEmitter export const getIntegrationSyncWorkerEmitter = async (): Promise => { if (integrationSyncWorkerEmitter) return integrationSyncWorkerEmitter - integrationSyncWorkerEmitter = new IntegrationSyncWorkerEmitter(SQS_CLIENT(), tracer, log) + integrationSyncWorkerEmitter = new IntegrationSyncWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) await integrationSyncWorkerEmitter.init() return integrationSyncWorkerEmitter } @@ -70,7 +143,30 @@ let dataSinkWorkerEmitter: DataSinkWorkerEmitter export const getDataSinkWorkerEmitter = async (): Promise => { if (dataSinkWorkerEmitter) return dataSinkWorkerEmitter - dataSinkWorkerEmitter = new DataSinkWorkerEmitter(SQS_CLIENT(), tracer, log) + dataSinkWorkerEmitter = new DataSinkWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) await dataSinkWorkerEmitter.init() return dataSinkWorkerEmitter } + +let nodejsWorkerEmitter: NodejsWorkerEmitter +export const getNodejsWorkerEmitter = async (): Promise => { + if (nodejsWorkerEmitter) return nodejsWorkerEmitter + + nodejsWorkerEmitter = new NodejsWorkerEmitter( + SQS_CLIENT(), + await REDIS_CLIENT(), + tracer, + await UNLEASH_CLIENT(), + await QUEUE_PRIORITY_LOADER(), + log, + ) + await nodejsWorkerEmitter.init() + return nodejsWorkerEmitter +} diff --git a/backend/src/services/memberService.ts b/backend/src/services/memberService.ts index f4ea614c10..203dcb723c 100644 --- a/backend/src/services/memberService.ts +++ b/backend/src/services/memberService.ts @@ -4,6 +4,7 @@ import { SERVICE, Error400, isDomainExcluded } from '@crowd/common' import { LoggerBase } from '@crowd/logging' import { WorkflowIdReusePolicy } from '@crowd/temporal' import { + ExportableEntity, FeatureFlag, IOrganization, ISearchSyncOptions, @@ -30,8 +31,6 @@ import { } from '../database/repositories/types/memberTypes' import isFeatureEnabled from '../feature-flags/isFeatureEnabled' import telemetryTrack from '../segment/telemetryTrack' -import { ExportableEntity } from '../serverless/microservices/nodejs/messageTypes' -import { sendExportCSVNodeSQSMessage } from '../serverless/utils/nodeWorkerSQS' import { IServiceOptions } from './IServiceOptions' import merge from './helpers/merge' import MemberAttributeSettingsService from './memberAttributeSettingsService' @@ -40,6 +39,7 @@ import SearchSyncService from './searchSyncService' import SettingsService from './settingsService' import { GITHUB_TOKEN_CONFIG } from '../conf' import { ServiceType } from '@/conf/configTypes' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import MemberOrganizationService from './memberOrganizationService' export default class MemberService extends LoggerBase { @@ -1203,14 +1203,15 @@ export default class MemberService extends LoggerBase { } async export(data) { - const result = await sendExportCSVNodeSQSMessage( + const emitter = await getNodejsWorkerEmitter() + await emitter.exportCSV( this.options.currentTenant.id, this.options.currentUser.id, ExportableEntity.MEMBERS, SequelizeRepository.getSegmentIds(this.options), data, ) - return result + return {} } async findMembersWithMergeSuggestions(args) { diff --git a/backend/src/services/organizationService.ts b/backend/src/services/organizationService.ts index bd681c65fc..01871d70d1 100644 --- a/backend/src/services/organizationService.ts +++ b/backend/src/services/organizationService.ts @@ -20,7 +20,6 @@ import organizationCacheRepository from '../database/repositories/organizationCa import OrganizationRepository from '../database/repositories/organizationRepository' import SequelizeRepository from '../database/repositories/sequelizeRepository' import telemetryTrack from '../segment/telemetryTrack' -import { sendOrgMergeMessage } from '../serverless/utils/nodeWorkerSQS' import { IServiceOptions } from './IServiceOptions' import merge from './helpers/merge' import { @@ -29,6 +28,7 @@ import { mergeUniqueStringArrayItems, } from './helpers/mergeFunctions' import SearchSyncService from './searchSyncService' +import { getNodejsWorkerEmitter } from '@/serverless/utils/serviceSQS' import MemberOrganizationService from './memberOrganizationService' export default class OrganizationService extends LoggerBase { @@ -44,7 +44,8 @@ export default class OrganizationService extends LoggerBase { await MergeActionsRepository.add(MergeActionType.ORG, originalId, toMergeId, this.options) - await sendOrgMergeMessage(tenantId, originalId, toMergeId) + const emitter = await getNodejsWorkerEmitter() + await emitter.mergeOrg(tenantId, originalId, toMergeId) } async mergeSync(originalId, toMergeId) { @@ -193,10 +194,16 @@ export default class OrganizationService extends LoggerBase { await searchSyncService.triggerRemoveOrganization(this.options.currentTenant.id, toMergeId) // sync organization members - await searchSyncService.triggerOrganizationMembersSync(originalId) + await searchSyncService.triggerOrganizationMembersSync( + this.options.currentTenant.id, + originalId, + ) // sync organization activities - await searchSyncService.triggerOrganizationActivitiesSync(originalId) + await searchSyncService.triggerOrganizationActivitiesSync( + this.options.currentTenant.id, + originalId, + ) this.options.log.info({ originalId, toMergeId }, 'Organizations merged!') return { status: 200, mergedId: originalId } diff --git a/backend/src/services/searchSyncService.ts b/backend/src/services/searchSyncService.ts index 8ce3c5469b..aaac0d5828 100644 --- a/backend/src/services/searchSyncService.ts +++ b/backend/src/services/searchSyncService.ts @@ -1,7 +1,7 @@ import { LoggerBase } from '@crowd/logging' import { SearchSyncApiClient } from '@crowd/opensearch' -import { SearchSyncWorkerEmitter } from '@crowd/sqs' import { FeatureFlag, SyncMode } from '@crowd/types' +import { SearchSyncWorkerEmitter } from '@crowd/common_services' import { getSearchSyncApiClient } from '../utils/apiClients' import { getSearchSyncWorkerEmitter } from '@/serverless/utils/serviceSQS' import isFeatureEnabled from '@/feature-flags/isFeatureEnabled' @@ -52,7 +52,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerMemberSync(memberId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerMemberSync(tenantId, memberId) + await client.triggerMemberSync(tenantId, memberId, false) } else { throw new Error('Unexpected search client type!') } @@ -68,11 +68,11 @@ export default class SearchSyncService extends LoggerBase { } } - async triggerOrganizationMembersSync(organizationId: string) { + async triggerOrganizationMembersSync(tenantId: string, organizationId: string) { const client = await this.getSearchSyncClient() if (client instanceof SearchSyncApiClient || client instanceof SearchSyncWorkerEmitter) { - await client.triggerOrganizationMembersSync(organizationId) + await client.triggerOrganizationMembersSync(tenantId, organizationId, false) } else { throw new Error('Unexpected search client type!') } @@ -84,7 +84,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerRemoveMember(memberId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerRemoveMember(tenantId, memberId) + await client.triggerRemoveMember(tenantId, memberId, false) } else { throw new Error('Unexpected search client type!') } @@ -106,7 +106,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerActivitySync(activityId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerActivitySync(tenantId, activityId) + await client.triggerActivitySync(tenantId, activityId, false) } else { throw new Error('Unexpected search client type!') } @@ -122,11 +122,11 @@ export default class SearchSyncService extends LoggerBase { } } - async triggerOrganizationActivitiesSync(organizationId: string) { + async triggerOrganizationActivitiesSync(tenantId: string, organizationId: string) { const client = await this.getSearchSyncClient() if (client instanceof SearchSyncApiClient || client instanceof SearchSyncWorkerEmitter) { - await client.triggerOrganizationActivitiesSync(organizationId) + await client.triggerOrganizationActivitiesSync(tenantId, organizationId, false) } else { throw new Error('Unexpected search client type!') } @@ -138,7 +138,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerRemoveActivity(activityId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerRemoveActivity(tenantId, activityId) + await client.triggerRemoveActivity(tenantId, activityId, false) } else { throw new Error('Unexpected search client type!') } @@ -160,7 +160,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerOrganizationSync(organizationId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerOrganizationSync(tenantId, organizationId) + await client.triggerOrganizationSync(tenantId, organizationId, false) } else { throw new Error('Unexpected search client type!') } @@ -182,7 +182,7 @@ export default class SearchSyncService extends LoggerBase { if (client instanceof SearchSyncApiClient) { await client.triggerRemoveOrganization(organizationId) } else if (client instanceof SearchSyncWorkerEmitter) { - await client.triggerRemoveOrganization(tenantId, organizationId) + await client.triggerRemoveOrganization(tenantId, organizationId, false) } else { throw new Error('Unexpected search client type!') } diff --git a/backend/src/types/mq/nodeWorkerIntegrationProcessMessage.ts b/backend/src/types/mq/nodeWorkerIntegrationProcessMessage.ts deleted file mode 100644 index ee1481010c..0000000000 --- a/backend/src/types/mq/nodeWorkerIntegrationProcessMessage.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from './nodeWorkerMessageBase' -import { IIntegrationStream } from '../integration/stepResult' - -export interface IIntegrationStreamRetry { - id: string - stream: IIntegrationStream - retryCount: number -} - -export class NodeWorkerIntegrationProcessMessage extends NodeWorkerMessageBase { - constructor( - public readonly runId: string, - public readonly streamId?: string, - public readonly fireCrowdWebhooks?: boolean, - ) { - super(NodeWorkerMessageType.INTEGRATION_PROCESS) - } -} diff --git a/backend/src/types/mq/nodeWorkerProcessWebhookMessage.ts b/backend/src/types/mq/nodeWorkerProcessWebhookMessage.ts deleted file mode 100644 index 1c08543ae7..0000000000 --- a/backend/src/types/mq/nodeWorkerProcessWebhookMessage.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { NodeWorkerMessageType } from '../../serverless/types/workerTypes' -import { NodeWorkerMessageBase } from './nodeWorkerMessageBase' - -export class NodeWorkerProcessWebhookMessage extends NodeWorkerMessageBase { - constructor( - public readonly tenantId: string, - public readonly webhookId: string, - public readonly force?: boolean, - public readonly fireCrowdWebhooks?: boolean, - ) { - super(NodeWorkerMessageType.PROCESS_WEBHOOK) - } -} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fe2e44b2e3..8c654133e8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -32,6 +32,9 @@ importers: '@crowd/common': specifier: file:../services/libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../services/libs/common_services + version: file:services/libs/common_services '@crowd/cubejs': specifier: file:../services/libs/cubejs version: file:services/libs/cubejs(bufferutil@4.0.8)(utf-8-validate@5.0.10) @@ -478,6 +481,9 @@ importers: '@crowd/common': specifier: file:../../libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../../libs/common_services + version: file:services/libs/common_services '@crowd/conversations': specifier: file:../../libs/conversations version: file:services/libs/conversations @@ -657,9 +663,15 @@ importers: '@crowd/common': specifier: file:../../libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../../libs/common_services + version: file:services/libs/common_services '@crowd/database': specifier: file:../../libs/database version: file:services/libs/database + '@crowd/feature-flags': + specifier: file:../../libs/feature-flags + version: file:services/libs/feature-flags '@crowd/integrations': specifier: file:../../libs/integrations version: file:services/libs/integrations @@ -730,9 +742,15 @@ importers: '@crowd/common': specifier: file:../../libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../../libs/common_services + version: file:services/libs/common_services '@crowd/database': specifier: file:../../libs/database version: file:services/libs/database + '@crowd/feature-flags': + specifier: file:../../libs/feature-flags + version: file:services/libs/feature-flags '@crowd/integrations': specifier: file:../../libs/integrations version: file:services/libs/integrations @@ -803,9 +821,15 @@ importers: '@crowd/common': specifier: file:../../libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../../libs/common_services + version: file:services/libs/common_services '@crowd/database': specifier: file:../../libs/database version: file:services/libs/database + '@crowd/feature-flags': + specifier: file:../../libs/feature-flags + version: file:services/libs/feature-flags '@crowd/integrations': specifier: file:../../libs/integrations version: file:services/libs/integrations @@ -885,9 +909,6 @@ importers: '@crowd/opensearch': specifier: file:../../libs/opensearch version: file:services/libs/opensearch - '@crowd/redis': - specifier: file:../../libs/redis - version: file:services/libs/redis '@crowd/sqs': specifier: file:../../libs/sqs version: file:services/libs/sqs @@ -1239,12 +1260,21 @@ importers: '@crowd/common': specifier: file:../../libs/common version: file:services/libs/common + '@crowd/common_services': + specifier: file:../../libs/common_services + version: file:services/libs/common_services '@crowd/database': specifier: file:../../libs/database version: file:services/libs/database + '@crowd/feature-flags': + specifier: file:../../libs/feature-flags + version: file:services/libs/feature-flags '@crowd/logging': specifier: file:../../libs/logging version: file:services/libs/logging + '@crowd/redis': + specifier: file:../../libs/redis + version: file:services/libs/redis '@crowd/sqs': specifier: file:../../libs/sqs version: file:services/libs/sqs @@ -1557,6 +1587,58 @@ importers: specifier: ^5.0.4 version: 5.3.2 + services/libs/common_services: + dependencies: + '@crowd/common': + specifier: file:../common + version: file:services/libs/common + '@crowd/database': + specifier: file:../database + version: file:services/libs/database + '@crowd/feature-flags': + specifier: file:../feature-flags + version: file:services/libs/feature-flags + '@crowd/logging': + specifier: file:../logging + version: file:services/libs/logging + '@crowd/redis': + specifier: file:../redis + version: file:services/libs/redis + '@crowd/sqs': + specifier: file:../sqs + version: file:services/libs/sqs + '@crowd/tracing': + specifier: file:../tracing + version: file:services/libs/tracing + '@crowd/types': + specifier: file:../types + version: file:services/libs/types + devDependencies: + '@types/node': + specifier: ^18.16.3 + version: 18.18.14 + '@typescript-eslint/eslint-plugin': + specifier: ^5.59.2 + version: 5.62.0(@typescript-eslint/parser@5.62.0)(eslint@8.54.0)(typescript@5.3.2) + '@typescript-eslint/parser': + specifier: ^5.59.2 + version: 5.62.0(eslint@8.54.0)(typescript@5.3.2) + eslint: + specifier: ^8.39.0 + version: 8.54.0 + eslint-config-prettier: + specifier: ^8.8.0 + version: 8.10.0(eslint@8.54.0) + eslint-plugin-prettier: + specifier: ^4.2.1 + version: 4.2.1(eslint-config-prettier@8.10.0)(eslint@8.54.0)(prettier@2.8.8) + prettier: + specifier: ^2.8.8 + version: 2.8.8 + typescript: + specifier: ^5.0.4 + version: 5.3.2 + services/libs/conversations: dependencies: '@crowd/common': @@ -17946,6 +18028,25 @@ packages: - supports-color dev: false + file:services/libs/common_services: + resolution: {directory: services/libs/common_services, type: directory} + name: '@crowd/common_services' + dependencies: + '@crowd/common': file:services/libs/common + '@crowd/database': file:services/libs/database + '@crowd/feature-flags': file:services/libs/feature-flags + '@crowd/logging': file:services/libs/logging + '@crowd/redis': file:services/libs/redis + '@crowd/sqs': file:services/libs/sqs + '@crowd/tracing': file:services/libs/tracing + '@crowd/types': file:services/libs/types + transitivePeerDependencies: + - aws-crt + - bluebird + - pg-native + - supports-color + dev: false + file:services/libs/conversations: resolution: {directory: services/libs/conversations, type: directory} name: '@crowd/conversations' diff --git a/scripts/scaffold.yaml b/scripts/scaffold.yaml index bb1c29877b..c425794824 100644 --- a/scripts/scaffold.yaml +++ b/scripts/scaffold.yaml @@ -134,20 +134,20 @@ services: networks: - crowd-bridge - kafka: - image: bitnami/kafka:latest - restart: always - environment: - - KAFKA_CFG_NODE_ID=0 - - KAFKA_CFG_PROCESS_ROLES=controller,broker - - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093 - - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT - - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093 - - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER - ports: - - '9092:9092' - networks: - - crowd-bridge + # kafka: + # image: bitnami/kafka:latest + # restart: always + # environment: + # - KAFKA_CFG_NODE_ID=0 + # - KAFKA_CFG_PROCESS_ROLES=controller,broker + # - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093 + # - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT + # - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093 + # - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER + # ports: + # - '9092:9092' + # networks: + # - crowd-bridge temporal: build: diff --git a/scripts/scaffold/sqs/queue.conf b/scripts/scaffold/sqs/queue.conf index 6610fcbe94..fae65a7ed8 100644 --- a/scripts/scaffold/sqs/queue.conf +++ b/scripts/scaffold/sqs/queue.conf @@ -5,19 +5,6 @@ node-address { } queues { - "nodejs-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } - "nodejs-worker" { - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } "python-worker.fifo" { fifo = true contentBasedDeduplication = false @@ -25,41 +12,6 @@ queues { delay = 0 seconds defaultVisibilityTimeout = 0 seconds } - "premium-python-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } - "integration-run-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } - "integration-data-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } - "integration-stream-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } - "data-sink-worker.fifo" { - fifo = true - contentBasedDeduplication = false - receiveMessageWait = 30 seconds - delay = 0 seconds - defaultVisibilityTimeout = 0 seconds - } } aws { diff --git a/scripts/services/api.yaml b/scripts/services/api.yaml index 85851fb30d..9c85811fd6 100644 --- a/scripts/services/api.yaml +++ b/scripts/services/api.yaml @@ -48,6 +48,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/automations-worker.yaml b/scripts/services/automations-worker.yaml index e3bd0a0634..4e028392b8 100644 --- a/scripts/services/automations-worker.yaml +++ b/scripts/services/automations-worker.yaml @@ -45,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/data-sink-worker.yaml b/scripts/services/data-sink-worker.yaml index 4e70372f5e..ff8f4087e1 100644 --- a/scripts/services/data-sink-worker.yaml +++ b/scripts/services/data-sink-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: data-sink-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: data-sink-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/discord-ws.yaml b/scripts/services/discord-ws.yaml index 949e682a14..d75fd5f120 100644 --- a/scripts/services/discord-ws.yaml +++ b/scripts/services/discord-ws.yaml @@ -43,6 +43,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/emails-worker.yaml b/scripts/services/emails-worker.yaml index 5cc1b38f96..7fd559b435 100644 --- a/scripts/services/emails-worker.yaml +++ b/scripts/services/emails-worker.yaml @@ -45,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/cubejs/src:/usr/crowd/app/services/libs/cubejs/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src diff --git a/scripts/services/integration-data-worker.yaml b/scripts/services/integration-data-worker.yaml index e3651d949b..5a46b23a71 100644 --- a/scripts/services/integration-data-worker.yaml +++ b/scripts/services/integration-data-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: integration-data-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: integration-data-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/integration-run-worker.yaml b/scripts/services/integration-run-worker.yaml index e89fb8dafc..6302db487b 100644 --- a/scripts/services/integration-run-worker.yaml +++ b/scripts/services/integration-run-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: integration-run-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: integration-run-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/integration-stream-worker.yaml b/scripts/services/integration-stream-worker.yaml index f997093998..72992469a1 100644 --- a/scripts/services/integration-stream-worker.yaml +++ b/scripts/services/integration-stream-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: integration-stream-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: integration-stream-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/integration-sync-worker.yaml b/scripts/services/integration-sync-worker.yaml index 745d3ef52f..06057202ce 100644 --- a/scripts/services/integration-sync-worker.yaml +++ b/scripts/services/integration-sync-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: integration-sync-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: integration-sync-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/job-generator.yaml b/scripts/services/job-generator.yaml index fedaaa33ab..b87aba81cc 100644 --- a/scripts/services/job-generator.yaml +++ b/scripts/services/job-generator.yaml @@ -44,6 +44,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/nodejs-worker.yaml b/scripts/services/nodejs-worker.yaml index d5a9e18804..c35f7aec2f 100644 --- a/scripts/services/nodejs-worker.yaml +++ b/scripts/services/nodejs-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: nodejs-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: nodejs-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/search-sync-api.yaml b/scripts/services/search-sync-api.yaml index e059f4bee8..f8a371a88f 100644 --- a/scripts/services/search-sync-api.yaml +++ b/scripts/services/search-sync-api.yaml @@ -48,6 +48,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/search-sync-worker.yaml b/scripts/services/search-sync-worker.yaml index f32d4b5d7b..9c9293bb05 100644 --- a/scripts/services/search-sync-worker.yaml +++ b/scripts/services/search-sync-worker.yaml @@ -5,6 +5,7 @@ x-env-args: &env-args NODE_ENV: docker SERVICE: search-sync-worker SHELL: /bin/sh + QUEUE_PRIORITY_LEVEL: normal services: search-sync-worker: @@ -44,6 +45,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/scripts/services/webhook-api.yaml b/scripts/services/webhook-api.yaml index 592c044765..7777aac4bb 100644 --- a/scripts/services/webhook-api.yaml +++ b/scripts/services/webhook-api.yaml @@ -48,6 +48,7 @@ services: volumes: - ../../services/libs/alerting/src:/usr/crowd/app/services/libs/alerting/src - ../../services/libs/common/src:/usr/crowd/app/services/libs/common/src + - ../../services/libs/common_services/src:/usr/crowd/app/services/libs/common_services/src - ../../services/libs/conversations/src:/usr/crowd/app/services/libs/conversations/src - ../../services/libs/database/src:/usr/crowd/app/services/libs/database/src - ../../services/libs/integrations/src:/usr/crowd/app/services/libs/integrations/src diff --git a/services/apps/data_sink_worker/config/custom-environment-variables.json b/services/apps/data_sink_worker/config/custom-environment-variables.json index c5650ac796..02f160ff35 100644 --- a/services/apps/data_sink_worker/config/custom-environment-variables.json +++ b/services/apps/data_sink_worker/config/custom-environment-variables.json @@ -39,5 +39,8 @@ }, "searchSyncApi": { "baseUrl": "CROWD_SEARCH_SYNC_API_URL" + }, + "worker": { + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" } } diff --git a/services/apps/data_sink_worker/config/default.json b/services/apps/data_sink_worker/config/default.json index e9a6b31829..37208be81c 100644 --- a/services/apps/data_sink_worker/config/default.json +++ b/services/apps/data_sink_worker/config/default.json @@ -7,6 +7,7 @@ "automationsTaskQueue": "automations" }, "worker": { - "maxStreamRetries": 5 + "maxStreamRetries": 5, + "queuePriorityLevel": "normal" } } diff --git a/services/apps/data_sink_worker/package.json b/services/apps/data_sink_worker/package.json index 349e1cd4a0..379881bd8c 100644 --- a/services/apps/data_sink_worker/package.json +++ b/services/apps/data_sink_worker/package.json @@ -34,6 +34,7 @@ "@crowd/types": "file:../../libs/types", "@crowd/feature-flags": "file:../../libs/feature-flags", "@crowd/temporal": "file:../../libs/temporal", + "@crowd/common_services": "file:../../libs/common_services", "@crowd/telemetry": "file:../../libs/telemetry", "@types/config": "^3.3.0", "@types/node": "^18.16.3", diff --git a/services/apps/data_sink_worker/src/bin/map-member-to-org.ts b/services/apps/data_sink_worker/src/bin/map-member-to-org.ts index 9b4e9b281f..13d15cb168 100644 --- a/services/apps/data_sink_worker/src/bin/map-member-to-org.ts +++ b/services/apps/data_sink_worker/src/bin/map-member-to-org.ts @@ -2,12 +2,7 @@ import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, TEMPORAL_CONFIG, UNLEASH_CONFIG } import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { - DataSinkWorkerEmitter, - NodejsWorkerEmitter, - SearchSyncWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import MemberRepository from '../repo/member.repo' import MemberService from '../service/member.service' import DataSinkRepository from '../repo/dataSink.repo' @@ -15,6 +10,13 @@ import { OrganizationService } from '../service/organization.service' import { getUnleashClient } from '@crowd/feature-flags' import { Client as TemporalClient, getTemporalClient } from '@crowd/temporal' import { getRedisClient } from '@crowd/redis' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -37,31 +39,49 @@ setImmediate(async () => { temporal = await getTemporalClient(TEMPORAL_CONFIG()) } - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() + const redis = await getRedisClient(REDIS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const dataSinkRepo = new DataSinkRepository(store, log) const memberRepo = new MemberRepository(store, log) - const nodejsWorkerEmitter = new NodejsWorkerEmitter(sqsClient, tracer, log) + const nodejsWorkerEmitter = new NodejsWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await nodejsWorkerEmitter.init() - const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(sqsClient, tracer, log) + const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await searchSyncWorkerEmitter.init() - const redisClient = await getRedisClient(REDIS_CONFIG()) - const memberService = new MemberService( store, nodejsWorkerEmitter, searchSyncWorkerEmitter, unleash, temporal, - redisClient, + redis, log, ) const orgService = new OrganizationService(store, log) @@ -93,10 +113,10 @@ setImmediate(async () => { orgService.addToMember(member.tenantId, segmentId, member.id, orgs) for (const org of orgs) { - await searchSyncWorkerEmitter.triggerOrganizationSync(member.tenantId, org.id) + await searchSyncWorkerEmitter.triggerOrganizationSync(member.tenantId, org.id, true) } - await searchSyncWorkerEmitter.triggerMemberSync(member.tenantId, member.id) + await searchSyncWorkerEmitter.triggerMemberSync(member.tenantId, member.id, true) log.info('Done mapping member to organizations!') } else { log.info('No organizations found with matching email domains!') diff --git a/services/apps/data_sink_worker/src/bin/map-tenant-members-to-org.ts b/services/apps/data_sink_worker/src/bin/map-tenant-members-to-org.ts index ff656b2d6c..bb273a5113 100644 --- a/services/apps/data_sink_worker/src/bin/map-tenant-members-to-org.ts +++ b/services/apps/data_sink_worker/src/bin/map-tenant-members-to-org.ts @@ -2,12 +2,7 @@ import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, TEMPORAL_CONFIG, UNLEASH_CONFIG } import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { - DataSinkWorkerEmitter, - NodejsWorkerEmitter, - SearchSyncWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import MemberRepository from '../repo/member.repo' import DataSinkRepository from '../repo/dataSink.repo' import MemberService from '../service/member.service' @@ -15,6 +10,13 @@ import { OrganizationService } from '../service/organization.service' import { getUnleashClient } from '@crowd/feature-flags' import { Client as TemporalClient, getTemporalClient } from '@crowd/temporal' import { getRedisClient } from '@crowd/redis' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -37,34 +39,52 @@ setImmediate(async () => { temporal = await getTemporalClient(TEMPORAL_CONFIG()) } - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const redis = await getRedisClient(REDIS_CONFIG()) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const dataSinkRepo = new DataSinkRepository(store, log) const memberRepo = new MemberRepository(store, log) const segmentIds = await dataSinkRepo.getSegmentIds(tenantId) const segmentId = segmentIds[segmentIds.length - 1] // leaf segment id - const nodejsWorkerEmitter = new NodejsWorkerEmitter(sqsClient, tracer, log) + const nodejsWorkerEmitter = new NodejsWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await nodejsWorkerEmitter.init() - const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(sqsClient, tracer, log) + const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await searchSyncWorkerEmitter.init() - const redisClient = await getRedisClient(REDIS_CONFIG()) - const memberService = new MemberService( store, nodejsWorkerEmitter, searchSyncWorkerEmitter, unleash, temporal, - redisClient, + redis, log, ) const orgService = new OrganizationService(store, log) @@ -107,10 +127,10 @@ setImmediate(async () => { orgService.addToMember(tenantId, segmentId, member.id, orgs) for (const org of orgs) { - await searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id) + await searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id, true) } - await searchSyncWorkerEmitter.triggerMemberSync(tenantId, member.id) + await searchSyncWorkerEmitter.triggerMemberSync(tenantId, member.id, true) } } diff --git a/services/apps/data_sink_worker/src/bin/process-results.ts b/services/apps/data_sink_worker/src/bin/process-results.ts index 19f320f150..11056cd036 100644 --- a/services/apps/data_sink_worker/src/bin/process-results.ts +++ b/services/apps/data_sink_worker/src/bin/process-results.ts @@ -12,15 +12,17 @@ import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' -import { - NodejsWorkerEmitter, - SearchSyncWorkerEmitter, - DataSinkWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { initializeSentimentAnalysis } from '@crowd/sentiment' import { getUnleashClient } from '@crowd/feature-flags' import { Client as TemporalClient, getTemporalClient } from '@crowd/temporal' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -44,28 +46,52 @@ setImmediate(async () => { } const sqsClient = getSqsClient(SQS_CONFIG()) - const redisClient = await getRedisClient(REDIS_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) initializeSentimentAnalysis(SENTIMENT_CONFIG()) - const nodejsWorkerEmitter = new NodejsWorkerEmitter(sqsClient, tracer, log) + const nodejsWorkerEmitter = new NodejsWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await nodejsWorkerEmitter.init() - const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(sqsClient, tracer, log) + const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await searchSyncWorkerEmitter.init() - const dataSinkWorkerEmitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) + const dataSinkWorkerEmitter = new DataSinkWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) await dataSinkWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) - const service = new DataSinkService( store, nodejsWorkerEmitter, searchSyncWorkerEmitter, dataSinkWorkerEmitter, - redisClient, + redis, unleash, temporal, log, diff --git a/services/apps/data_sink_worker/src/bin/restart-all-failed-results.ts b/services/apps/data_sink_worker/src/bin/restart-all-failed-results.ts index 180add16b7..dc2b69efc4 100644 --- a/services/apps/data_sink_worker/src/bin/restart-all-failed-results.ts +++ b/services/apps/data_sink_worker/src/bin/restart-all-failed-results.ts @@ -1,11 +1,18 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import DataSinkRepository from '../repo/dataSink.repo' import { partition } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DataSinkWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { ProcessIntegrationResultQueueMessage } from '@crowd/types' +import { + DataSinkWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const batchSize = 500 @@ -13,12 +20,19 @@ const tracer = getServiceTracer() const log = getServiceLogger() setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - + const unleash = await getUnleashClient(UNLEASH_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const redis = await getRedisClient(REDIS_CONFIG()) + + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() const repo = new DataSinkRepository(store, log) let count = 0 @@ -29,6 +43,7 @@ setImmediate(async () => { const messages = results.map((r) => { return { + tenantId: r.tenantId, payload: new ProcessIntegrationResultQueueMessage(r.id), groupId: r.id, deduplicationId: r.id, @@ -37,7 +52,7 @@ setImmediate(async () => { const batches = partition(messages, 10) for (const batch of batches) { - await emitter.sendMessages(batch) + await emitter.sendMessagesBatch(batch) } count += results.length diff --git a/services/apps/data_sink_worker/src/bin/restart-failed-results.ts b/services/apps/data_sink_worker/src/bin/restart-failed-results.ts index 3bb51b7912..945c4ec88a 100644 --- a/services/apps/data_sink_worker/src/bin/restart-failed-results.ts +++ b/services/apps/data_sink_worker/src/bin/restart-failed-results.ts @@ -1,10 +1,16 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import DataSinkRepository from '../repo/dataSink.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DataSinkWorkerEmitter, getSqsClient } from '@crowd/sqs' -import { ProcessIntegrationResultQueueMessage } from '@crowd/types' +import { getSqsClient } from '@crowd/sqs' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' +import { + DataSinkWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,13 +25,21 @@ if (processArguments.length !== 1) { const runId = processArguments[0] setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + + const redis = await getRedisClient(REDIS_CONFIG()) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const repo = new DataSinkRepository(store, log) let results = await repo.getFailedResultsForRun(runId, 1, 20) @@ -33,9 +47,13 @@ setImmediate(async () => { await repo.resetResults(results.map((r) => r.id)) for (const result of results) { - await emitter.sendMessage( - `results-${result.tenantId}-${result.platform}`, - new ProcessIntegrationResultQueueMessage(result.id), + await emitter.triggerResultProcessing( + result.tenantId, + result.platform, + result.id, + result.id, + result.onboarding === null ? true : result.onboarding, + result.id, ) } diff --git a/services/apps/data_sink_worker/src/bin/restart-result.ts b/services/apps/data_sink_worker/src/bin/restart-result.ts index b945973657..b83fbd9051 100644 --- a/services/apps/data_sink_worker/src/bin/restart-result.ts +++ b/services/apps/data_sink_worker/src/bin/restart-result.ts @@ -1,10 +1,16 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import DataSinkRepository from '../repo/dataSink.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DataSinkWorkerEmitter, getSqsClient } from '@crowd/sqs' -import { ProcessIntegrationResultQueueMessage } from '@crowd/types' +import { getSqsClient } from '@crowd/sqs' +import { + DataSinkWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,13 +25,20 @@ if (processArguments.length !== 1) { const resultIds = processArguments[0].split(',') setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const repo = new DataSinkRepository(store, log) for (const resultId of resultIds) { const result = await repo.getResultInfo(resultId) @@ -34,9 +47,12 @@ setImmediate(async () => { process.exit(1) } else { await repo.resetResults([resultId]) - await emitter.sendMessage( - `results-${result.tenantId}-${result.platform}`, - new ProcessIntegrationResultQueueMessage(result.id), + await emitter.triggerResultProcessing( + result.tenantId, + result.platform, + result.id, + result.id, + result.onboarding === null ? true : result.onboarding, ) } } diff --git a/services/apps/data_sink_worker/src/bin/restart-x-failed-results.ts b/services/apps/data_sink_worker/src/bin/restart-x-failed-results.ts index f672c08764..1074fea778 100644 --- a/services/apps/data_sink_worker/src/bin/restart-x-failed-results.ts +++ b/services/apps/data_sink_worker/src/bin/restart-x-failed-results.ts @@ -1,11 +1,18 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import DataSinkRepository from '../repo/dataSink.repo' import { partition } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DataSinkWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { ProcessIntegrationResultQueueMessage } from '@crowd/types' +import { + DataSinkWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const MAX_TO_PROCESS = 500 @@ -24,13 +31,20 @@ let numResults = parseInt(processArguments[0], 10) numResults = Math.min(numResults, MAX_TO_PROCESS) setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) - await emitter.init() + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new DataSinkWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const repo = new DataSinkRepository(store, log) const results = await repo.getFailedResults(1, numResults) @@ -44,6 +58,7 @@ setImmediate(async () => { const messages = results.map((r) => { return { + tenantId: r.tenantId, payload: new ProcessIntegrationResultQueueMessage(r.id), groupId: r.id, deduplicationId: r.id, @@ -52,7 +67,7 @@ setImmediate(async () => { const batches = partition(messages, 10) for (const batch of batches) { - await emitter.sendMessages(batch) + await emitter.sendMessagesBatch(batch) } log.info(`Restarted total of ${results.length} failed results.`) diff --git a/services/apps/data_sink_worker/src/conf/index.ts b/services/apps/data_sink_worker/src/conf/index.ts index c993f7559e..45c5af4b43 100644 --- a/services/apps/data_sink_worker/src/conf/index.ts +++ b/services/apps/data_sink_worker/src/conf/index.ts @@ -7,12 +7,14 @@ import { ISqsClientConfig } from '@crowd/sqs' import { ITemporalConfig } from '@crowd/temporal' import config from 'config' import { ISearchSyncApiConfig } from '@crowd/opensearch' +import { QueuePriorityLevel } from '@crowd/types' export interface ISlackAlertingConfig { url: string } export interface IWorkerConfig { maxStreamRetries: number + queuePriorityLevel: QueuePriorityLevel } let workerSettings: IWorkerConfig diff --git a/services/apps/data_sink_worker/src/jobs/processOldResults.ts b/services/apps/data_sink_worker/src/jobs/processOldResults.ts index d5d399edff..065123ed0a 100644 --- a/services/apps/data_sink_worker/src/jobs/processOldResults.ts +++ b/services/apps/data_sink_worker/src/jobs/processOldResults.ts @@ -3,10 +3,14 @@ import { DbConnection, DbStore } from '@crowd/database' import { Unleash } from '@crowd/feature-flags' import { Logger } from '@crowd/logging' import { RedisClient } from '@crowd/redis' -import { NodejsWorkerEmitter, SearchSyncWorkerEmitter, DataSinkWorkerEmitter } from '@crowd/sqs' import { Client as TemporalClient } from '@crowd/temporal' import DataSinkRepository from '../repo/dataSink.repo' import DataSinkService from '../service/dataSink.service' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' const MAX_CONCURRENT_PROMISES = 2 const MAX_RESULTS_TO_LOAD = 10 diff --git a/services/apps/data_sink_worker/src/main.ts b/services/apps/data_sink_worker/src/main.ts index a8d3e347ec..21ac0b5bd6 100644 --- a/services/apps/data_sink_worker/src/main.ts +++ b/services/apps/data_sink_worker/src/main.ts @@ -1,12 +1,14 @@ -import { getDbConnection } from '@crowd/database' -import { getServiceTracer } from '@crowd/tracing' -import { getServiceLogger } from '@crowd/logging' import { - NodejsWorkerEmitter, - SearchSyncWorkerEmitter, + PriorityLevelContextRepository, DataSinkWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' + QueuePriorityContextLoader, + SearchSyncWorkerEmitter, + NodejsWorkerEmitter, +} from '@crowd/common_services' +import { DbStore, getDbConnection } from '@crowd/database' +import { getServiceTracer } from '@crowd/tracing' +import { getServiceLogger } from '@crowd/logging' +import { getSqsClient } from '@crowd/sqs' import { DB_CONFIG, SENTIMENT_CONFIG, @@ -14,6 +16,7 @@ import { REDIS_CONFIG, UNLEASH_CONFIG, TEMPORAL_CONFIG, + WORKER_SETTINGS, } from './conf' import { WorkerQueueReceiver } from './queue' import { initializeSentimentAnalysis } from '@crowd/sentiment' @@ -49,13 +52,39 @@ setImmediate(async () => { initializeSentimentAnalysis(SENTIMENT_CONFIG()) } - const nodejsWorkerEmitter = new NodejsWorkerEmitter(sqsClient, tracer, log) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) - const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(sqsClient, tracer, log) + const nodejsWorkerEmitter = new NodejsWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) - const dataWorkerEmitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) + const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + + const dataWorkerEmitter = new DataSinkWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) const queue = new WorkerQueueReceiver( + WORKER_SETTINGS().queuePriorityLevel, sqsClient, dbConnection, nodejsWorkerEmitter, diff --git a/services/apps/data_sink_worker/src/queue/index.ts b/services/apps/data_sink_worker/src/queue/index.ts index f7ccade232..d7a9a50f4a 100644 --- a/services/apps/data_sink_worker/src/queue/index.ts +++ b/services/apps/data_sink_worker/src/queue/index.ts @@ -1,27 +1,27 @@ import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' import { Logger } from '@crowd/logging' import { DbConnection, DbStore } from '@crowd/database' -import { - DATA_SINK_WORKER_QUEUE_SETTINGS, - NodejsWorkerEmitter, - SearchSyncWorkerEmitter, - DataSinkWorkerEmitter, - SqsClient, - SqsQueueReceiver, -} from '@crowd/sqs' +import { DATA_SINK_WORKER_QUEUE_SETTINGS, SqsClient, SqsPrioritizedQueueReciever } from '@crowd/sqs' import { CreateAndProcessActivityResultQueueMessage, DataSinkWorkerQueueMessageType, IQueueMessage, ProcessIntegrationResultQueueMessage, + QueuePriorityLevel, } from '@crowd/types' import DataSinkService from '../service/dataSink.service' import { RedisClient } from '@crowd/redis' import { Unleash } from '@crowd/feature-flags' import { Client as TemporalClient } from '@crowd/temporal' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { constructor( + level: QueuePriorityLevel, client: SqsClient, private readonly dbConn: DbConnection, private readonly nodejsWorkerEmitter: NodejsWorkerEmitter, @@ -34,7 +34,14 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { parentLog: Logger, maxConcurrentProcessing: number, ) { - super(client, DATA_SINK_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, tracer, parentLog) + super( + level, + client, + DATA_SINK_WORKER_QUEUE_SETTINGS, + maxConcurrentProcessing, + tracer, + parentLog, + ) } override async processMessage(message: IQueueMessage): Promise { diff --git a/services/apps/data_sink_worker/src/repo/dataSink.data.ts b/services/apps/data_sink_worker/src/repo/dataSink.data.ts index fa5470eee4..8b48dd4db8 100644 --- a/services/apps/data_sink_worker/src/repo/dataSink.data.ts +++ b/services/apps/data_sink_worker/src/repo/dataSink.data.ts @@ -1,4 +1,9 @@ -import { IIntegrationResult, IntegrationResultState, PlatformType } from '@crowd/types' +import { + IIntegrationResult, + IQueuePriorityCalculationContext, + IntegrationResultState, + PlatformType, +} from '@crowd/types' export interface IResultData { id: string @@ -6,6 +11,7 @@ export interface IResultData { data: IIntegrationResult runId: string | null + onboarding: boolean | null webhookId: string | null streamId: string apiDataId: string @@ -22,8 +28,16 @@ export interface IResultData { delayedUntil: string | null } -export interface IFailedResultData { +export interface IFailedResultData extends IQueuePriorityCalculationContext { id: string + onboarding: boolean | null tenantId: string platform: string } + +export interface IDelayedResults { + id: string + tenantId: string + platform: PlatformType + onboarding: boolean | null +} diff --git a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts index 6b4bd13c34..510ddcf498 100644 --- a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts +++ b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts @@ -1,7 +1,7 @@ import { DbStore, RepositoryBase } from '@crowd/database' import { Logger } from '@crowd/logging' -import { IIntegrationResult, IntegrationResultState, PlatformType, TenantPlans } from '@crowd/types' -import { IFailedResultData, IResultData } from './dataSink.data' +import { IIntegrationResult, IntegrationResultState, TenantPlans } from '@crowd/types' +import { IDelayedResults, IFailedResultData, IResultData } from './dataSink.data' export default class DataSinkRepository extends RepositoryBase { constructor(dbStore: DbStore, parentLog: Logger) { @@ -24,10 +24,12 @@ export default class DataSinkRepository extends RepositoryBase { @@ -162,9 +164,14 @@ export default class DataSinkRepository extends RepositoryBase { + public async getDelayedResults(limit: number): Promise { this.ensureTransactional() try { const results = await this.db().any( ` - select r.id, r."tenantId", i.platform - from integration.results r - join integrations i on r."integrationId" = i.id + select r.id, + r."tenantId", + i.platform, + run.onboarding + from + integration.results r + inner join integrations i on r."integrationId" = i.id + left join integration.runs run on run.id = r."runId" where r.state = $(delayedState) and r."delayedUntil" < now() limit ${limit} @@ -267,7 +282,7 @@ export default class DataSinkRepository extends RepositoryBase ({ id: s.id, tenantId: s.tenantId, platform: s.platform })) + return results } catch (err) { this.log.error(err, 'Failed to get delayed results!') throw err diff --git a/services/apps/data_sink_worker/src/service/activity.service.ts b/services/apps/data_sink_worker/src/service/activity.service.ts index 997a572aec..723f26153c 100644 --- a/services/apps/data_sink_worker/src/service/activity.service.ts +++ b/services/apps/data_sink_worker/src/service/activity.service.ts @@ -10,7 +10,6 @@ import { IActivityCreateData, IActivityUpdateData } from './activity.data' import MemberService from './member.service' import mergeWith from 'lodash.mergewith' import isEqual from 'lodash.isequal' -import { NodejsWorkerEmitter, SearchSyncWorkerEmitter } from '@crowd/sqs' import SettingsRepository from './settings.repo' import { ConversationService } from '@crowd/conversations' import IntegrationRepository from '../repo/integration.repo' @@ -20,6 +19,7 @@ import { RedisClient } from '@crowd/redis' import { Unleash } from '@crowd/feature-flags' import { Client as TemporalClient, WorkflowIdReusePolicy } from '@crowd/temporal' import { TEMPORAL_CONFIG } from '../conf' +import { NodejsWorkerEmitter, SearchSyncWorkerEmitter } from '@crowd/common_services' export default class ActivityService extends LoggerBase { private readonly conversationService: ConversationService @@ -42,6 +42,7 @@ export default class ActivityService extends LoggerBase { tenantId: string, segmentId: string, activity: IActivityCreateData, + onboarding: boolean, fireSync = true, ): Promise { try { @@ -116,13 +117,17 @@ export default class ActivityService extends LoggerBase { const affectedIds = await this.conversationService.processActivity(tenantId, segmentId, id) if (fireSync) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, activity.memberId) - await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, id) + await this.searchSyncWorkerEmitter.triggerMemberSync( + tenantId, + activity.memberId, + onboarding, + ) + await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, id, onboarding) } if (affectedIds.length > 0) { for (const affectedId of affectedIds.filter((i) => i !== id)) { - await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, affectedId) + await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, affectedId, onboarding) } } @@ -136,6 +141,7 @@ export default class ActivityService extends LoggerBase { public async update( id: string, tenantId: string, + onboarding: boolean, segmentId: string, activity: IActivityUpdateData, original: IDbActivity, @@ -195,8 +201,12 @@ export default class ActivityService extends LoggerBase { await this.conversationService.processActivity(tenantId, segmentId, id) if (fireSync) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, activity.memberId) - await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, id) + await this.searchSyncWorkerEmitter.triggerMemberSync( + tenantId, + activity.memberId, + onboarding, + ) + await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, id, onboarding) } } } catch (err) { @@ -322,6 +332,7 @@ export default class ActivityService extends LoggerBase { public async processActivity( tenantId: string, integrationId: string, + onboarding: boolean, platform: PlatformType, activity: IActivityData, providedSegmentId?: string, @@ -481,7 +492,11 @@ export default class ActivityService extends LoggerBase { // delete activity await txRepo.delete(dbActivity.id) - await this.searchSyncWorkerEmitter.triggerRemoveActivity(tenantId, dbActivity.id) + await this.searchSyncWorkerEmitter.triggerRemoveActivity( + tenantId, + dbActivity.id, + onboarding, + ) createActivity = true } @@ -489,6 +504,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -527,6 +543,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -591,6 +608,7 @@ export default class ActivityService extends LoggerBase { await this.searchSyncWorkerEmitter.triggerRemoveActivity( tenantId, dbActivity.id, + onboarding, ) createActivity = true } @@ -599,6 +617,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbObjectMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -637,6 +656,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbObjectMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -670,6 +690,7 @@ export default class ActivityService extends LoggerBase { await txActivityService.update( dbActivity.id, tenantId, + onboarding, segmentId, { type: activity.type, @@ -708,6 +729,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -731,6 +753,7 @@ export default class ActivityService extends LoggerBase { ) memberId = await txMemberService.create( tenantId, + onboarding, segmentId, integrationId, { @@ -767,6 +790,7 @@ export default class ActivityService extends LoggerBase { await txMemberService.update( dbObjectMember.id, tenantId, + onboarding, segmentId, integrationId, { @@ -790,6 +814,7 @@ export default class ActivityService extends LoggerBase { ) objectMemberId = await txMemberService.create( tenantId, + onboarding, segmentId, integrationId, { @@ -839,6 +864,7 @@ export default class ActivityService extends LoggerBase { url: activity.url, organizationId, }, + onboarding, false, ) } @@ -848,13 +874,13 @@ export default class ActivityService extends LoggerBase { }) if (memberId) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, memberId) + await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, memberId, onboarding) } if (objectMemberId) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, objectMemberId) + await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, objectMemberId, onboarding) } if (activityId) { - await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, activityId) + await this.searchSyncWorkerEmitter.triggerActivitySync(tenantId, activityId, onboarding) } } catch (err) { this.log.error(err, 'Error while processing an activity!') diff --git a/services/apps/data_sink_worker/src/service/dataSink.service.ts b/services/apps/data_sink_worker/src/service/dataSink.service.ts index c5ed1b4aee..b429217d02 100644 --- a/services/apps/data_sink_worker/src/service/dataSink.service.ts +++ b/services/apps/data_sink_worker/src/service/dataSink.service.ts @@ -1,7 +1,6 @@ import { DbStore } from '@crowd/database' import { Logger, LoggerBase, getChildLogger } from '@crowd/logging' import { RedisClient } from '@crowd/redis' -import { NodejsWorkerEmitter, SearchSyncWorkerEmitter, DataSinkWorkerEmitter } from '@crowd/sqs' import { IActivityData, IMemberData, @@ -19,6 +18,11 @@ import { Client as TemporalClient } from '@crowd/temporal' import { IResultData } from '../repo/dataSink.data' import { addSeconds } from '@crowd/common' import { WORKER_SETTINGS } from '../conf' +import { + DataSinkWorkerEmitter, + NodejsWorkerEmitter, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' import telemetry from '@crowd/telemetry' export default class DataSinkService extends LoggerBase { @@ -81,6 +85,7 @@ export default class DataSinkService extends LoggerBase { result.platform, result.id, result.id, + result.onboarding === null ? true : result.onboarding, `${result.id}-delayed-${Date.now()}`, ) } @@ -165,6 +170,7 @@ export default class DataSinkService extends LoggerBase { await service.processActivity( resultInfo.tenantId, resultInfo.integrationId, + resultInfo.onboarding === null ? true : resultInfo.onboarding, platform, activityData, data.segmentId, diff --git a/services/apps/data_sink_worker/src/service/member.service.ts b/services/apps/data_sink_worker/src/service/member.service.ts index 18c4df8310..f1e781fd1f 100644 --- a/services/apps/data_sink_worker/src/service/member.service.ts +++ b/services/apps/data_sink_worker/src/service/member.service.ts @@ -21,13 +21,13 @@ import mergeWith from 'lodash.mergewith' import isEqual from 'lodash.isequal' import { IMemberCreateData, IMemberUpdateData } from './member.data' import MemberAttributeService from './memberAttribute.service' -import { NodejsWorkerEmitter, SearchSyncWorkerEmitter } from '@crowd/sqs' import IntegrationRepository from '../repo/integration.repo' import { OrganizationService } from './organization.service' import uniqby from 'lodash.uniqby' import { Unleash } from '@crowd/feature-flags' import { TEMPORAL_CONFIG } from '../conf' import { RedisClient } from '@crowd/redis' +import { NodejsWorkerEmitter, SearchSyncWorkerEmitter } from '@crowd/common_services' export default class MemberService extends LoggerBase { constructor( @@ -44,6 +44,7 @@ export default class MemberService extends LoggerBase { public async create( tenantId: string, + onboarding: boolean, segmentId: string, integrationId: string, data: IMemberCreateData, @@ -152,11 +153,11 @@ export default class MemberService extends LoggerBase { ) if (fireSync) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, id) + await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, id, onboarding) } for (const org of organizations) { - await this.searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id) + await this.searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id, onboarding) } return id @@ -169,6 +170,7 @@ export default class MemberService extends LoggerBase { public async update( id: string, tenantId: string, + onboarding: boolean, segmentId: string, integrationId: string, data: IMemberUpdateData, @@ -275,11 +277,11 @@ export default class MemberService extends LoggerBase { }) if (updated && fireSync) { - await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, id) + await this.searchSyncWorkerEmitter.triggerMemberSync(tenantId, id, onboarding) } for (const org of organizations) { - await this.searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id) + await this.searchSyncWorkerEmitter.triggerOrganizationSync(tenantId, org.id, onboarding) } } catch (err) { this.log.error(err, { memberId: id }, 'Error while updating a member!') @@ -401,6 +403,7 @@ export default class MemberService extends LoggerBase { await txService.update( dbMember.id, tenantId, + false, segmentId, integrationId, { @@ -471,6 +474,7 @@ export default class MemberService extends LoggerBase { await txService.update( dbMember.id, tenantId, + false, segmentId, integrationId, { diff --git a/services/apps/integration_data_worker/config/custom-environment-variables.json b/services/apps/integration_data_worker/config/custom-environment-variables.json index f1c35bbb41..68fc776a23 100644 --- a/services/apps/integration_data_worker/config/custom-environment-variables.json +++ b/services/apps/integration_data_worker/config/custom-environment-variables.json @@ -19,6 +19,10 @@ "host": "CROWD_REDIS_HOST", "port": "CROWD_REDIS_PORT" }, + "unleash": { + "url": "CROWD_UNLEASH_URL", + "apiKey": "CROWD_UNLEASH_BACKEND_API_KEY" + }, "slackAlerting": { "url": "CROWD_SLACK_ALERTING_URL" }, @@ -30,5 +34,8 @@ "privateKey": "CROWD_GITHUB_PRIVATE_KEY", "webhookSecret": "CROWD_GITHUB_WEBHOOK_SECRET", "isCommitDataEnabled": "CROWD_GITHUB_IS_COMMIT_DATA_ENABLED" + }, + "worker": { + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" } } diff --git a/services/apps/integration_data_worker/config/default.json b/services/apps/integration_data_worker/config/default.json index 96f8915d70..3c3201bd30 100644 --- a/services/apps/integration_data_worker/config/default.json +++ b/services/apps/integration_data_worker/config/default.json @@ -2,7 +2,9 @@ "db": {}, "sqs": {}, "redis": {}, + "unleash": {}, "worker": { - "maxDataRetries": 5 + "maxDataRetries": 5, + "queuePriorityLevel": "normal" } } diff --git a/services/apps/integration_data_worker/package.json b/services/apps/integration_data_worker/package.json index 50ab2d2d56..53718adaae 100644 --- a/services/apps/integration_data_worker/package.json +++ b/services/apps/integration_data_worker/package.json @@ -25,6 +25,8 @@ "@crowd/tracing": "file:../../libs/tracing", "@crowd/types": "file:../../libs/types", "@crowd/alerting": "file:../../libs/alerting", + "@crowd/feature-flags": "file:../../libs/feature-flags", + "@crowd/common_services": "file:../../libs/common_services", "@crowd/telemetry": "file:../../libs/telemetry", "@types/config": "^3.3.0", "@types/node": "^18.16.3", diff --git a/services/apps/integration_data_worker/src/bin/process-data-for-tenant.ts b/services/apps/integration_data_worker/src/bin/process-data-for-tenant.ts index b746e0613e..3ed3c12a1a 100644 --- a/services/apps/integration_data_worker/src/bin/process-data-for-tenant.ts +++ b/services/apps/integration_data_worker/src/bin/process-data-for-tenant.ts @@ -1,10 +1,17 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationDataRepository from '../repo/integrationData.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationDataWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { IntegrationStreamDataState } from '@crowd/types' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' +import { + IntegrationDataWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,12 +26,25 @@ if (processArguments.length !== 1) { const tenantId = processArguments[0] setImmediate(async () => { + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) + const emitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) const repo = new IntegrationDataRepository(store, log) const dataIds = await repo.getDataForTenant(tenantId) @@ -37,7 +57,12 @@ setImmediate(async () => { await repo.resetStream(dataId) } - await emitter.triggerDataProcessing(info.tenantId, info.integrationType, dataId) + await emitter.triggerDataProcessing( + info.tenantId, + info.integrationType, + dataId, + info.onboarding === null ? true : info.onboarding, + ) } else { log.error({ dataId }, 'Data stream not found!') process.exit(1) diff --git a/services/apps/integration_data_worker/src/bin/process-data.ts b/services/apps/integration_data_worker/src/bin/process-data.ts index 6c9203b4e4..aeaacd6c25 100644 --- a/services/apps/integration_data_worker/src/bin/process-data.ts +++ b/services/apps/integration_data_worker/src/bin/process-data.ts @@ -1,10 +1,17 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationDataRepository from '../repo/integrationData.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationDataWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { IntegrationStreamDataState } from '@crowd/types' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' +import { + IntegrationDataWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,12 +26,25 @@ if (processArguments.length !== 1) { const dataIds = processArguments[0].split(',') setImmediate(async () => { + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) + const emitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) const repo = new IntegrationDataRepository(store, log) for (const dataId of dataIds) { @@ -35,7 +55,12 @@ setImmediate(async () => { await repo.resetStream(dataId) } - await emitter.triggerDataProcessing(info.tenantId, info.integrationType, dataId) + await emitter.triggerDataProcessing( + info.tenantId, + info.integrationType, + dataId, + info.onboarding === null ? true : info.onboarding, + ) } else { log.error({ dataId }, 'Data stream not found!') process.exit(1) diff --git a/services/apps/integration_data_worker/src/conf/index.ts b/services/apps/integration_data_worker/src/conf/index.ts index a62fcfcdc4..772ab230ed 100644 --- a/services/apps/integration_data_worker/src/conf/index.ts +++ b/services/apps/integration_data_worker/src/conf/index.ts @@ -1,10 +1,14 @@ +import { IUnleashConfig } from '@crowd/feature-flags' import { IDatabaseConfig } from '@crowd/database' import { IRedisConfiguration } from '@crowd/redis' import { ISqsClientConfig } from '@crowd/sqs' +import { QueuePriorityLevel } from '@crowd/types' import config from 'config' +import { SERVICE } from '@crowd/common' export interface IWorkerSettings { maxDataRetries: number + queuePriorityLevel: QueuePriorityLevel } export interface ISlackAlertingConfig { @@ -72,3 +76,12 @@ export const PLATFORM_CONFIG = (platform: string): unknown | undefined => { return undefined } } + +let unleashConfig: IUnleashConfig | undefined +export const UNLEASH_CONFIG = (): IUnleashConfig | undefined => { + if (unleashConfig) return unleashConfig + + unleashConfig = Object.assign({ appName: SERVICE }, config.get('unleash')) + + return unleashConfig +} diff --git a/services/apps/integration_data_worker/src/jobs/processOldData.ts b/services/apps/integration_data_worker/src/jobs/processOldData.ts index e76791fb1d..e4d4a9ce6f 100644 --- a/services/apps/integration_data_worker/src/jobs/processOldData.ts +++ b/services/apps/integration_data_worker/src/jobs/processOldData.ts @@ -1,9 +1,9 @@ import { DbConnection, DbStore } from '@crowd/database' import { Logger } from '@crowd/logging' import { RedisClient } from '@crowd/redis' -import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter } from '@crowd/sqs' import IntegrationDataRepository from '../repo/integrationData.repo' import IntegrationDataService from '../service/integrationDataService' +import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter } from '@crowd/common_services' export const processOldDataJob = async ( dbConn: DbConnection, diff --git a/services/apps/integration_data_worker/src/main.ts b/services/apps/integration_data_worker/src/main.ts index 48caa97003..4ee20fcd98 100644 --- a/services/apps/integration_data_worker/src/main.ts +++ b/services/apps/integration_data_worker/src/main.ts @@ -1,11 +1,18 @@ import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from './conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG, WORKER_SETTINGS } from './conf' import { getRedisClient } from '@crowd/redis' -import { getDbConnection } from '@crowd/database' -import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { DbStore, getDbConnection } from '@crowd/database' +import { getSqsClient } from '@crowd/sqs' import { WorkerQueueReceiver } from './queue' import { processOldDataJob } from './jobs/processOldData' +import { + IntegrationStreamWorkerEmitter, + DataSinkWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const tracer = getServiceTracer() const log = getServiceLogger() @@ -16,15 +23,36 @@ const PROCESSING_INTERVAL_MINUTES = 5 setImmediate(async () => { log.info('Starting integration data worker...') + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const sqsClient = getSqsClient(SQS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG(), MAX_CONCURRENT_PROCESSING) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) - const dataSinkWorkerEmitter = new DataSinkWorkerEmitter(sqsClient, tracer, log) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataSinkWorkerEmitter = new DataSinkWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) const queue = new WorkerQueueReceiver( + WORKER_SETTINGS().queuePriorityLevel, sqsClient, redisClient, dbConnection, diff --git a/services/apps/integration_data_worker/src/queue/index.ts b/services/apps/integration_data_worker/src/queue/index.ts index 084e6eea04..db9430fcb9 100644 --- a/services/apps/integration_data_worker/src/queue/index.ts +++ b/services/apps/integration_data_worker/src/queue/index.ts @@ -1,23 +1,24 @@ -import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' -import { Logger } from '@crowd/logging' import { DbConnection, DbStore } from '@crowd/database' +import { Logger } from '@crowd/logging' import { RedisClient } from '@crowd/redis' import { INTEGRATION_DATA_WORKER_QUEUE_SETTINGS, - IntegrationStreamWorkerEmitter, - DataSinkWorkerEmitter, SqsClient, - SqsQueueReceiver, + SqsPrioritizedQueueReciever, } from '@crowd/sqs' +import { Span, SpanStatusCode, Tracer } from '@crowd/tracing' import { IQueueMessage, IntegrationDataWorkerQueueMessageType, ProcessStreamDataQueueMessage, + QueuePriorityLevel, } from '@crowd/types' import IntegrationStreamService from '../service/integrationDataService' +import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter } from '@crowd/common_services' -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { constructor( + level: QueuePriorityLevel, client: SqsClient, private readonly redisClient: RedisClient, private readonly dbConn: DbConnection, @@ -28,6 +29,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { maxConcurrentProcessing: number, ) { super( + level, client, INTEGRATION_DATA_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, diff --git a/services/apps/integration_data_worker/src/service/integrationDataService.ts b/services/apps/integration_data_worker/src/service/integrationDataService.ts index b7df7fc36e..0ae00eb0aa 100644 --- a/services/apps/integration_data_worker/src/service/integrationDataService.ts +++ b/services/apps/integration_data_worker/src/service/integrationDataService.ts @@ -1,13 +1,13 @@ +import { addSeconds, singleOrDefault } from '@crowd/common' +import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter } from '@crowd/common_services' import { DbStore } from '@crowd/database' +import { INTEGRATION_SERVICES, IProcessDataContext } from '@crowd/integrations' import { Logger, LoggerBase, getChildLogger } from '@crowd/logging' import { RedisCache, RedisClient } from '@crowd/redis' -import IntegrationDataRepository from '../repo/integrationData.repo' -import { IActivityData, IntegrationResultType, IntegrationRunState } from '@crowd/types' -import { addSeconds, singleOrDefault } from '@crowd/common' -import { INTEGRATION_SERVICES, IProcessDataContext } from '@crowd/integrations' -import { WORKER_SETTINGS, PLATFORM_CONFIG } from '../conf' -import { DataSinkWorkerEmitter, IntegrationStreamWorkerEmitter } from '@crowd/sqs' import telemetry from '@crowd/telemetry' +import { IActivityData, IntegrationResultType, IntegrationRunState } from '@crowd/types' +import { PLATFORM_CONFIG, WORKER_SETTINGS } from '../conf' +import IntegrationDataRepository from '../repo/integrationData.repo' export default class IntegrationDataService extends LoggerBase { private readonly repo: IntegrationDataRepository @@ -136,11 +136,24 @@ export default class IntegrationDataService extends LoggerBase { cache, publishActivity: async (activity) => { - await this.publishActivity(dataInfo.tenantId, dataInfo.integrationType, dataId, activity) + await this.publishActivity( + dataInfo.tenantId, + dataInfo.integrationType, + dataId, + dataInfo.onboarding === null ? true : dataInfo.onboarding, + activity, + ) }, publishCustom: async (entity, type) => { - await this.publishCustom(dataInfo.tenantId, dataInfo.integrationType, dataId, type, entity) + await this.publishCustom( + dataInfo.tenantId, + dataInfo.integrationType, + dataId, + dataInfo.onboarding === null ? true : dataInfo.onboarding, + type, + entity, + ) }, publishStream: async (identifier, data) => { @@ -149,6 +162,7 @@ export default class IntegrationDataService extends LoggerBase { dataInfo.integrationType, dataInfo.streamId, identifier, + dataInfo.onboarding === null ? true : dataInfo.onboarding, data, dataInfo.runId, dataInfo.webhookId, @@ -244,6 +258,7 @@ export default class IntegrationDataService extends LoggerBase { tenantId: string, platform: string, dataId: string, + onboarding: boolean, type: IntegrationResultType, entity: unknown, ): Promise { @@ -259,6 +274,7 @@ export default class IntegrationDataService extends LoggerBase { platform, resultId, resultId, + onboarding, ) } catch (err) { await this.triggerDataError( @@ -276,6 +292,7 @@ export default class IntegrationDataService extends LoggerBase { tenantId: string, platform: string, dataId: string, + onboarding: boolean, activity: IActivityData, ): Promise { try { @@ -289,6 +306,7 @@ export default class IntegrationDataService extends LoggerBase { platform, resultId, activity.sourceId, + onboarding, ) } catch (err) { await this.triggerDataError( @@ -355,6 +373,7 @@ export default class IntegrationDataService extends LoggerBase { platform: string, parentId: string, identifier: string, + onboarding: boolean, data?: unknown, runId?: string, webhookId?: string, @@ -368,7 +387,12 @@ export default class IntegrationDataService extends LoggerBase { const streamId = await this.repo.publishStream(parentId, identifier, data, runId, webhookId) if (streamId) { if (runId) { - await this.streamWorkerEmitter.triggerStreamProcessing(tenantId, platform, streamId) + await this.streamWorkerEmitter.triggerStreamProcessing( + tenantId, + platform, + streamId, + onboarding, + ) } else if (webhookId) { await this.streamWorkerEmitter.triggerWebhookProcessing(tenantId, platform, webhookId) } else { diff --git a/services/apps/integration_run_worker/config/custom-environment-variables.json b/services/apps/integration_run_worker/config/custom-environment-variables.json index ebf11a2efa..e676c6ffd2 100644 --- a/services/apps/integration_run_worker/config/custom-environment-variables.json +++ b/services/apps/integration_run_worker/config/custom-environment-variables.json @@ -19,6 +19,10 @@ "host": "CROWD_REDIS_HOST", "port": "CROWD_REDIS_PORT" }, + "unleash": { + "url": "CROWD_UNLEASH_URL", + "apiKey": "CROWD_UNLEASH_BACKEND_API_KEY" + }, "nango": { "url": "CROWD_NANGO_URL", "secretKey": "CROWD_NANGO_SECRET_KEY" @@ -42,5 +46,8 @@ "clientId": "CROWD_TWITTER_CLIENT_ID", "clientSecret": "CROWD_TWITTER_CLIENT_SECRET", "callbackUrl": "CROWD_TWITTER_CALLBACK_URL" + }, + "worker": { + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" } } diff --git a/services/apps/integration_run_worker/config/default.json b/services/apps/integration_run_worker/config/default.json index a966bbe509..df904b3681 100644 --- a/services/apps/integration_run_worker/config/default.json +++ b/services/apps/integration_run_worker/config/default.json @@ -2,8 +2,10 @@ "db": {}, "sqs": {}, "redis": {}, + "unleash": {}, "nango": {}, "worker": { - "maxRetries": 5 + "maxRetries": 5, + "queuePriorityLevel": "normal" } } diff --git a/services/apps/integration_run_worker/package.json b/services/apps/integration_run_worker/package.json index a8a47cfe0a..a1c4d4aef1 100644 --- a/services/apps/integration_run_worker/package.json +++ b/services/apps/integration_run_worker/package.json @@ -27,6 +27,8 @@ "@crowd/sqs": "file:../../libs/sqs", "@crowd/tracing": "file:../../libs/tracing", "@crowd/types": "file:../../libs/types", + "@crowd/common_services": "file:../../libs/common_services", + "@crowd/feature-flags": "file:../../libs/feature-flags", "@types/config": "^3.3.0", "@types/node": "^18.16.3", "config": "^3.3.9", diff --git a/services/apps/integration_run_worker/src/bin/continue-run.ts b/services/apps/integration_run_worker/src/bin/continue-run.ts index 13038c4ed3..2aa0a8dc17 100644 --- a/services/apps/integration_run_worker/src/bin/continue-run.ts +++ b/services/apps/integration_run_worker/src/bin/continue-run.ts @@ -1,10 +1,17 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import IntegrationRunRepository from '../repo/integrationRun.repo' import { IntegrationRunState } from '@crowd/types' +import { + IntegrationStreamWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const tracer = getServiceTracer() const log = getServiceLogger() @@ -14,13 +21,20 @@ const processArguments = process.argv.slice(2) const runId = processArguments[0] setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) - await emitter.init() + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new IntegrationStreamWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const repo = new IntegrationRunRepository(store, log) const run = await repo.findIntegrationRunById(runId) @@ -36,7 +50,7 @@ setImmediate(async () => { log.info(`Triggering integration run for ${runId}!`) - await emitter.continueProcessingRunStreams(run.tenantId, run.platform, run.id) + await emitter.continueProcessingRunStreams(run.tenantId, run.onboarding, run.platform, run.id) process.exit(0) } else { log.error({ run }, 'Run not found!') diff --git a/services/apps/integration_run_worker/src/bin/onboard-integration.ts b/services/apps/integration_run_worker/src/bin/onboard-integration.ts index 30d096e5f4..70157ffabe 100644 --- a/services/apps/integration_run_worker/src/bin/onboard-integration.ts +++ b/services/apps/integration_run_worker/src/bin/onboard-integration.ts @@ -1,10 +1,17 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationRunWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import IntegrationRunRepository from '../repo/integrationRun.repo' import { IntegrationState } from '@crowd/types' +import { + IntegrationRunWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const tracer = getServiceTracer() const log = getServiceLogger() @@ -15,13 +22,19 @@ const parameter = processArguments[0] const isOnboarding = processArguments[1] ? processArguments[1] === 'true' : true setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new IntegrationRunWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() + const repo = new IntegrationRunRepository(store, log) const integrationIds = parameter.split(',') diff --git a/services/apps/integration_run_worker/src/bin/process-repo.ts b/services/apps/integration_run_worker/src/bin/process-repo.ts index 81adbaa04c..f17f1088fa 100644 --- a/services/apps/integration_run_worker/src/bin/process-repo.ts +++ b/services/apps/integration_run_worker/src/bin/process-repo.ts @@ -1,8 +1,8 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationRunWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import IntegrationRunRepository from '../repo/integrationRun.repo' import { IntegrationState } from '@crowd/types' import { @@ -10,6 +10,13 @@ import { GithubManualIntegrationSettings, GithubManualStreamType, } from '@crowd/integrations' +import { + IntegrationRunWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const mapStreamTypeToEnum = (stream: string): GithubManualStreamType => { switch (stream) { @@ -66,12 +73,18 @@ setImmediate(async () => { process.exit(1) } - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new IntegrationRunWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() const repo = new IntegrationRunRepository(store, log) diff --git a/services/apps/integration_run_worker/src/bin/trigger-all-onboardings.ts b/services/apps/integration_run_worker/src/bin/trigger-all-onboardings.ts index 920339f326..c04478f08d 100644 --- a/services/apps/integration_run_worker/src/bin/trigger-all-onboardings.ts +++ b/services/apps/integration_run_worker/src/bin/trigger-all-onboardings.ts @@ -1,22 +1,34 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationRunRepository from '../repo/integrationRun.repo' import { singleOrDefault, timeout } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { INTEGRATION_SERVICES } from '@crowd/integrations' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationRunWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' +import { + IntegrationRunWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const tracer = getServiceTracer() const log = getServiceLogger() setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new IntegrationRunWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) + await emitter.init() const repo = new IntegrationRunRepository(store, log) diff --git a/services/apps/integration_run_worker/src/bin/trigger-stream-processed.ts b/services/apps/integration_run_worker/src/bin/trigger-stream-processed.ts index 90e4fea52a..f7b5c4342a 100644 --- a/services/apps/integration_run_worker/src/bin/trigger-stream-processed.ts +++ b/services/apps/integration_run_worker/src/bin/trigger-stream-processed.ts @@ -1,8 +1,15 @@ -import { SQS_CONFIG } from '../conf' -import { getServiceTracer } from '@crowd/tracing' +import { + IntegrationRunWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { DbStore, getDbConnection } from '@crowd/database' +import { getUnleashClient } from '@crowd/feature-flags' import { getServiceLogger } from '@crowd/logging' -import { IntegrationRunWorkerEmitter, getSqsClient } from '@crowd/sqs' -import { StreamProcessedQueueMessage } from '@crowd/types' +import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' +import { getServiceTracer } from '@crowd/tracing' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' const tracer = getServiceTracer() const log = getServiceLogger() @@ -10,18 +17,37 @@ const log = getServiceLogger() const processArguments = process.argv.slice(2) if (processArguments.length !== 1) { - log.error('Expected 1 argument: streamIds') + log.error('Expected 1 argument: runIds') process.exit(1) } const runIds = processArguments[0].split(',') setImmediate(async () => { + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redis = await getRedisClient(REDIS_CONFIG()) + + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) + const emitter = new IntegrationRunWorkerEmitter(sqsClient, redis, tracer, unleash, loader, log) await emitter.init() - for (const runId of runIds) { - await emitter.sendMessage(runId, new StreamProcessedQueueMessage(runId)) + const results = await dbConnection.any( + `select r.id, r."tenantId", r.onboarding, i.platform from integration.runs r + inner join integrations i on i.id = r."integrationId" + where r.id in ($runIds:csv)`, + { + runIds, + }, + ) + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + for (const res of results as any[]) { + await emitter.streamProcessed(res.tenantId, res.platform, res.id, res.onboarding) } }) diff --git a/services/apps/integration_run_worker/src/conf/index.ts b/services/apps/integration_run_worker/src/conf/index.ts index 3f8f6819c2..325f17a650 100644 --- a/services/apps/integration_run_worker/src/conf/index.ts +++ b/services/apps/integration_run_worker/src/conf/index.ts @@ -1,7 +1,10 @@ +import { SERVICE } from '@crowd/common' import { IDatabaseConfig } from '@crowd/database' +import { IUnleashConfig } from '@crowd/feature-flags' import { ISearchSyncApiConfig } from '@crowd/opensearch' import { IRedisConfiguration } from '@crowd/redis' import { ISqsClientConfig } from '@crowd/sqs' +import { QueuePriorityLevel } from '@crowd/types' import config from 'config' export interface INangoConfig { @@ -39,6 +42,7 @@ export const REDIS_CONFIG = (): IRedisConfiguration => { export interface IWorkerConfig { maxRetries: number + queuePriorityLevel: QueuePriorityLevel } let workerConfig: IWorkerConfig @@ -90,3 +94,12 @@ export const PLATFORM_CONFIG = (platform: string): unknown | undefined => { export const SEARCH_SYNC_API_CONFIG = (): ISearchSyncApiConfig => { return config.get('searchSyncApi') } + +let unleashConfig: IUnleashConfig | undefined +export const UNLEASH_CONFIG = (): IUnleashConfig | undefined => { + if (unleashConfig) return unleashConfig + + unleashConfig = Object.assign({ appName: SERVICE }, config.get('unleash')) + + return unleashConfig +} diff --git a/services/apps/integration_run_worker/src/main.ts b/services/apps/integration_run_worker/src/main.ts index 0862ed2c3c..13ad0b76c5 100644 --- a/services/apps/integration_run_worker/src/main.ts +++ b/services/apps/integration_run_worker/src/main.ts @@ -1,16 +1,19 @@ -import { getDbConnection } from '@crowd/database' +import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' +import { getSqsClient } from '@crowd/sqs' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG, WORKER_CONFIG } from './conf' +import { WorkerQueueReceiver } from './queue' +import { ApiPubSubEmitter, getRedisClient } from '@crowd/redis' import { IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, IntegrationSyncWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, SearchSyncWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from './conf' -import { WorkerQueueReceiver } from './queue' -import { ApiPubSubEmitter, getRedisClient } from '@crowd/redis' +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,22 +22,56 @@ const MAX_CONCURRENT_PROCESSING = 2 setImmediate(async () => { log.info('Starting integration run worker...') + const unleash = await getUnleashClient(UNLEASH_CONFIG()) const sqsClient = getSqsClient(SQS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG(), MAX_CONCURRENT_PROCESSING) - const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const redis = await getRedisClient(REDIS_CONFIG(), true) + + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) - const runWorkerEmitter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) - const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter(sqsClient, tracer, log) - const integrationSyncWorkerEmitter = new IntegrationSyncWorkerEmitter(sqsClient, tracer, log) + const runWorkerEmitter = new IntegrationRunWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) + const searchSyncWorkerEmitter = new SearchSyncWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) + const integrationSyncWorkerEmitter = new IntegrationSyncWorkerEmitter( + sqsClient, + redis, + tracer, + unleash, + loader, + log, + ) - const apiPubSubEmitter = new ApiPubSubEmitter(redisClient, log) + const apiPubSubEmitter = new ApiPubSubEmitter(redis, log) const queue = new WorkerQueueReceiver( + WORKER_CONFIG().queuePriorityLevel, sqsClient, - redisClient, + redis, dbConnection, streamWorkerEmitter, runWorkerEmitter, diff --git a/services/apps/integration_run_worker/src/queue/index.ts b/services/apps/integration_run_worker/src/queue/index.ts index 15ccb34da0..e56ddec416 100644 --- a/services/apps/integration_run_worker/src/queue/index.ts +++ b/services/apps/integration_run_worker/src/queue/index.ts @@ -1,20 +1,23 @@ -import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' -import { Logger } from '@crowd/logging' -import { DbConnection, DbStore } from '@crowd/database' -import { ApiPubSubEmitter, RedisClient } from '@crowd/redis' import { - INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - SearchSyncWorkerEmitter, IntegrationSyncWorkerEmitter, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' +import { DbConnection, DbStore } from '@crowd/database' +import { Logger } from '@crowd/logging' +import { ApiPubSubEmitter, RedisClient } from '@crowd/redis' +import { + INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, SqsClient, - SqsQueueReceiver, + SqsPrioritizedQueueReciever, } from '@crowd/sqs' +import { Span, SpanStatusCode, Tracer } from '@crowd/tracing' import { GenerateRunStreamsQueueMessage, IQueueMessage, IntegrationRunWorkerQueueMessageType, + QueuePriorityLevel, StartIntegrationRunQueueMessage, StreamProcessedQueueMessage, } from '@crowd/types' @@ -22,8 +25,9 @@ import IntegrationRunService from '../service/integrationRunService' /* eslint-disable no-case-declarations */ -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { constructor( + level: QueuePriorityLevel, client: SqsClient, private readonly redisClient: RedisClient, private readonly dbConn: DbConnection, @@ -36,7 +40,14 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { parentLog: Logger, maxConcurrentProcessing: number, ) { - super(client, INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, tracer, parentLog) + super( + level, + client, + INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, + maxConcurrentProcessing, + tracer, + parentLog, + ) } override async processMessage(message: IQueueMessage): Promise { diff --git a/services/apps/integration_run_worker/src/repo/integrationRun.data.ts b/services/apps/integration_run_worker/src/repo/integrationRun.data.ts index 482d5cfdec..f3c67eccc3 100644 --- a/services/apps/integration_run_worker/src/repo/integrationRun.data.ts +++ b/services/apps/integration_run_worker/src/repo/integrationRun.data.ts @@ -29,6 +29,7 @@ export interface IStartIntegrationRunData { export interface IPendingDelayedRun { id: string + onboarding: boolean tenantId: string integrationType: string } diff --git a/services/apps/integration_run_worker/src/repo/integrationRun.repo.ts b/services/apps/integration_run_worker/src/repo/integrationRun.repo.ts index f2e1f83625..e21f50f94e 100644 --- a/services/apps/integration_run_worker/src/repo/integrationRun.repo.ts +++ b/services/apps/integration_run_worker/src/repo/integrationRun.repo.ts @@ -18,7 +18,8 @@ export default class IntegrationRunRepository extends RepositoryBase { const result = await this.db().oneOrNone( ` - select r.id, r.state, r."tenantId", r."integrationId", i.platform + select r.id, r.state, r."tenantId", r."integrationId", i.platform, r.onboarding from integration.runs r inner join integrations i on r."integrationId" = i.id where r.id = $(runId) diff --git a/services/apps/integration_run_worker/src/service/integrationRunService.ts b/services/apps/integration_run_worker/src/service/integrationRunService.ts index 247c5fc061..5710b2bddc 100644 --- a/services/apps/integration_run_worker/src/service/integrationRunService.ts +++ b/services/apps/integration_run_worker/src/service/integrationRunService.ts @@ -7,18 +7,18 @@ import { } from '@crowd/integrations' import { Logger, LoggerBase, getChildLogger } from '@crowd/logging' import { ApiPubSubEmitter, RedisCache, RedisClient } from '@crowd/redis' -import { - IntegrationRunWorkerEmitter, - IntegrationStreamWorkerEmitter, - SearchSyncWorkerEmitter, - IntegrationSyncWorkerEmitter, -} from '@crowd/sqs' import { IntegrationRunState, IntegrationStreamState } from '@crowd/types' import { NANGO_CONFIG, PLATFORM_CONFIG } from '../conf' import IntegrationRunRepository from '../repo/integrationRun.repo' import MemberAttributeSettingsRepository from '../repo/memberAttributeSettings.repo' import SampleDataRepository from '../repo/sampleData.repo' import { AutomationRepository } from '../repo/automation.repo' +import { + IntegrationRunWorkerEmitter, + IntegrationStreamWorkerEmitter, + IntegrationSyncWorkerEmitter, + SearchSyncWorkerEmitter, +} from '@crowd/common_services' export default class IntegrationRunService extends LoggerBase { private readonly repo: IntegrationRunRepository @@ -211,6 +211,7 @@ export default class IntegrationRunService extends LoggerBase { await this.repo.resetDelayedRun(run.id) await this.streamWorkerEmitter.continueProcessingRunStreams( run.tenantId, + run.onboarding, run.integrationType, run.id, ) @@ -270,6 +271,7 @@ export default class IntegrationRunService extends LoggerBase { integrationInfo.tenantId, integrationInfo.type, runId, + onboarding, isManualRun, manualSettings, ) @@ -422,7 +424,14 @@ export default class IntegrationRunService extends LoggerBase { }, publishStream: async (identifier: string, data?: unknown) => { - await this.publishStream(runInfo.tenantId, runInfo.integrationType, runId, identifier, data) + await this.publishStream( + runInfo.tenantId, + runInfo.integrationType, + runId, + identifier, + runInfo.onboarding, + data, + ) }, updateIntegrationSettings: async (settings: unknown) => { @@ -516,12 +525,18 @@ export default class IntegrationRunService extends LoggerBase { platform: string, runId: string, identifier: string, + onboarding: boolean, data?: unknown, ): Promise { try { this.log.debug('Publishing new root stream!') const streamId = await this.repo.publishStream(runId, identifier, data) - await this.streamWorkerEmitter.triggerStreamProcessing(tenantId, platform, streamId) + await this.streamWorkerEmitter.triggerStreamProcessing( + tenantId, + platform, + streamId, + onboarding, + ) } catch (err) { await this.triggerRunError( runId, diff --git a/services/apps/integration_stream_worker/config/custom-environment-variables.json b/services/apps/integration_stream_worker/config/custom-environment-variables.json index c40b0196f3..2e569c5c65 100644 --- a/services/apps/integration_stream_worker/config/custom-environment-variables.json +++ b/services/apps/integration_stream_worker/config/custom-environment-variables.json @@ -19,6 +19,10 @@ "host": "CROWD_REDIS_HOST", "port": "CROWD_REDIS_PORT" }, + "unleash": { + "url": "CROWD_UNLEASH_URL", + "apiKey": "CROWD_UNLEASH_BACKEND_API_KEY" + }, "nango": { "url": "CROWD_NANGO_URL", "secretKey": "CROWD_NANGO_SECRET_KEY" @@ -47,5 +51,8 @@ "clientId": "CROWD_TWITTER_CLIENT_ID", "clientSecret": "CROWD_TWITTER_CLIENT_SECRET", "callbackUrl": "CROWD_TWITTER_CALLBACK_URL" + }, + "worker": { + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" } } diff --git a/services/apps/integration_stream_worker/config/default.json b/services/apps/integration_stream_worker/config/default.json index 3080ea2731..8a7dc79f12 100644 --- a/services/apps/integration_stream_worker/config/default.json +++ b/services/apps/integration_stream_worker/config/default.json @@ -2,9 +2,11 @@ "db": {}, "sqs": {}, "redis": {}, + "unleash": {}, "nango": {}, "worker": { - "maxStreamRetries": 5 + "maxStreamRetries": 5, + "queuePriorityLevel": "normal" }, "slack": { "maxRetrospectInSeconds": 86400 diff --git a/services/apps/integration_stream_worker/package.json b/services/apps/integration_stream_worker/package.json index 4ade14aa08..629c57c3ba 100644 --- a/services/apps/integration_stream_worker/package.json +++ b/services/apps/integration_stream_worker/package.json @@ -31,6 +31,8 @@ "@crowd/sqs": "file:../../libs/sqs", "@crowd/tracing": "file:../../libs/tracing", "@crowd/types": "file:../../libs/types", + "@crowd/feature-flags": "file:../../libs/feature-flags", + "@crowd/common_services": "file:../../libs/common_services", "@types/config": "^3.3.0", "@types/node": "^18.16.3", "config": "^3.3.9", diff --git a/services/apps/integration_stream_worker/src/bin/process-all-streams-for-integration.ts b/services/apps/integration_stream_worker/src/bin/process-all-streams-for-integration.ts index 4444fdb9aa..ce2d980bc5 100644 --- a/services/apps/integration_stream_worker/src/bin/process-all-streams-for-integration.ts +++ b/services/apps/integration_stream_worker/src/bin/process-all-streams-for-integration.ts @@ -1,17 +1,20 @@ -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationStreamService from '../service/integrationStreamService' import { timeout } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' +import { IntegrationStreamState } from '@crowd/types' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { IntegrationStreamState } from '@crowd/types' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const BATCH_SIZE = 100 const MAX_CONCURRENT = 1 @@ -41,19 +44,43 @@ async function processStream( setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) - + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await runWorkerEmiiter.init() await dataWorkerEmitter.init() await streamWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) - const service = new IntegrationStreamService( redisClient, runWorkerEmiiter, diff --git a/services/apps/integration_stream_worker/src/bin/process-all-streams.ts b/services/apps/integration_stream_worker/src/bin/process-all-streams.ts index 4c80b39803..8d8475633c 100644 --- a/services/apps/integration_stream_worker/src/bin/process-all-streams.ts +++ b/services/apps/integration_stream_worker/src/bin/process-all-streams.ts @@ -1,17 +1,20 @@ -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationStreamService from '../service/integrationStreamService' import { timeout } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' +import { IntegrationStreamState } from '@crowd/types' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { IntegrationStreamState } from '@crowd/types' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const BATCH_SIZE = 100 const MAX_CONCURRENT = 3 @@ -32,19 +35,43 @@ async function processStream( setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) - + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await runWorkerEmiiter.init() await dataWorkerEmitter.init() await streamWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) - const service = new IntegrationStreamService( redisClient, runWorkerEmiiter, diff --git a/services/apps/integration_stream_worker/src/bin/process-all-webhooks.ts b/services/apps/integration_stream_worker/src/bin/process-all-webhooks.ts index abb397c285..8ce962a86e 100644 --- a/services/apps/integration_stream_worker/src/bin/process-all-webhooks.ts +++ b/services/apps/integration_stream_worker/src/bin/process-all-webhooks.ts @@ -1,17 +1,20 @@ -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationStreamService from '../service/integrationStreamService' import { timeout } from '@crowd/common' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' +import { WebhookType } from '@crowd/types' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { WebhookType } from '@crowd/types' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const BATCH_SIZE = 100 const MAX_CONCURRENT = 3 @@ -32,19 +35,43 @@ async function processWebhook( setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await runWorkerEmiiter.init() await dataWorkerEmitter.init() await streamWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) - const service = new IntegrationStreamService( redisClient, runWorkerEmiiter, diff --git a/services/apps/integration_stream_worker/src/bin/process-stream.ts b/services/apps/integration_stream_worker/src/bin/process-stream.ts index fcdd65e14e..104e856680 100644 --- a/services/apps/integration_stream_worker/src/bin/process-stream.ts +++ b/services/apps/integration_stream_worker/src/bin/process-stream.ts @@ -1,17 +1,20 @@ -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationStreamRepository from '../repo/integrationStream.repo' import IntegrationStreamService from '../service/integrationStreamService' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' +import { IntegrationStreamState } from '@crowd/types' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { IntegrationStreamState } from '@crowd/types' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const tracer = getServiceTracer() const log = getServiceLogger() @@ -27,18 +30,43 @@ const streamIds = processArguments[0].split(',') setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await runWorkerEmiiter.init() await dataWorkerEmitter.init() await streamWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) const repo = new IntegrationStreamRepository(store, log) const service = new IntegrationStreamService( diff --git a/services/apps/integration_stream_worker/src/bin/process-webhook.ts b/services/apps/integration_stream_worker/src/bin/process-webhook.ts index c504e39a02..4afba27f22 100644 --- a/services/apps/integration_stream_worker/src/bin/process-webhook.ts +++ b/services/apps/integration_stream_worker/src/bin/process-webhook.ts @@ -1,15 +1,18 @@ -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IntegrationStreamService from '../service/integrationStreamService' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' +import { getSqsClient } from '@crowd/sqs' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const tracer = getServiceTracer() const log = getServiceLogger() @@ -25,19 +28,43 @@ const webhookIds = processArguments[0].split(',') setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) - + const dbConnection = await getDbConnection(DB_CONFIG()) + const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await runWorkerEmiiter.init() await dataWorkerEmitter.init() await streamWorkerEmitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - const store = new DbStore(log, dbConnection) - const service = new IntegrationStreamService( redisClient, runWorkerEmiiter, diff --git a/services/apps/integration_stream_worker/src/bin/trigger-all-failed-webhooks.ts b/services/apps/integration_stream_worker/src/bin/trigger-all-failed-webhooks.ts index 41793572e2..373e49cdf7 100644 --- a/services/apps/integration_stream_worker/src/bin/trigger-all-failed-webhooks.ts +++ b/services/apps/integration_stream_worker/src/bin/trigger-all-failed-webhooks.ts @@ -1,9 +1,16 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IncomingWebhookRepository from '../repo/incomingWebhook.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' +import { + PriorityLevelContextRepository, + QueuePriorityContextLoader, + IntegrationStreamWorkerEmitter, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const batchSize = 500 @@ -12,12 +19,24 @@ const log = getServiceLogger() setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) - await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + const emitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + await emitter.init() + const repo = new IncomingWebhookRepository(store, log) let count = 0 diff --git a/services/apps/integration_stream_worker/src/bin/trigger-all-streams-for-integration.ts b/services/apps/integration_stream_worker/src/bin/trigger-all-streams-for-integration.ts index 3534329897..fe1a0300ad 100644 --- a/services/apps/integration_stream_worker/src/bin/trigger-all-streams-for-integration.ts +++ b/services/apps/integration_stream_worker/src/bin/trigger-all-streams-for-integration.ts @@ -1,8 +1,15 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' -import { getDbConnection } from '@crowd/database' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' +import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' +import { getUnleashClient } from '@crowd/feature-flags' +import { + PriorityLevelContextRepository, + QueuePriorityContextLoader, + IntegrationStreamWorkerEmitter, +} from '@crowd/common_services' +import { getRedisClient } from '@crowd/redis' const BATCH_SIZE = 100 @@ -20,16 +27,28 @@ const integrationId = processArguments[0] setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) - const emitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const emitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - let results = await dbConnection.any( ` - select s.id + select s.id, r.onboarding from integration.streams s + left join integration.runs r on r.id = s."runId" where state in ('error', 'pending', 'processing') and s."integrationId" = $(integrationId) order by s.id @@ -43,15 +62,16 @@ setImmediate(async () => { let count = 0 while (results.length > 0) { for (const result of results) { - await emitter.triggerStreamProcessing(result.id, result.id, result.id) + await emitter.triggerStreamProcessing(result.id, result.id, result.id, result.onboarding) } count += results.length log.info(`Processed total of ${count} streams for integration ${integrationId}!`) results = await dbConnection.any( ` - select s.id + select s.id, r.onboarding from integration.streams s + left join integration.runs r on r.id = s."runId" where state in ('error', 'pending', 'processing') and s."integrationId" = $(integrationId) and s.id > $(lastId) diff --git a/services/apps/integration_stream_worker/src/bin/trigger-all-streams.ts b/services/apps/integration_stream_worker/src/bin/trigger-all-streams.ts index dcbe3a5880..dd4d8cd60c 100644 --- a/services/apps/integration_stream_worker/src/bin/trigger-all-streams.ts +++ b/services/apps/integration_stream_worker/src/bin/trigger-all-streams.ts @@ -1,8 +1,15 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' -import { getDbConnection } from '@crowd/database' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' +import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' +import { + PriorityLevelContextRepository, + QueuePriorityContextLoader, + IntegrationStreamWorkerEmitter, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const BATCH_SIZE = 100 @@ -11,16 +18,28 @@ const log = getServiceLogger() setImmediate(async () => { const sqsClient = getSqsClient(SQS_CONFIG()) + const dbConnection = await getDbConnection(DB_CONFIG()) + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) - const emitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const emitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) await emitter.init() - const dbConnection = await getDbConnection(DB_CONFIG()) - let results = await dbConnection.any( ` - select s.id + select s.id, r.onboarding from integration.streams s + left join integration.runs r on r.id = s."runId" where state in ('error', 'pending', 'processing') order by s.id limit ${BATCH_SIZE}; @@ -30,15 +49,16 @@ setImmediate(async () => { let count = 0 while (results.length > 0) { for (const result of results) { - await emitter.triggerStreamProcessing(result.id, result.id, result.id) + await emitter.triggerStreamProcessing(result.id, result.id, result.id, result.onboarding) } count += results.length log.info(`Processed total of ${count} streams!`) results = await dbConnection.any( ` - select s.id + select s.id, r.onboarding from integration.streams s + left join integration.runs r on r.id = s."runId" where state in ('error', 'pending', 'processing') and s.id > $(lastId) order by s.id diff --git a/services/apps/integration_stream_worker/src/bin/trigger-webhook.ts b/services/apps/integration_stream_worker/src/bin/trigger-webhook.ts index 7741d0399a..01b3a1b6cc 100644 --- a/services/apps/integration_stream_worker/src/bin/trigger-webhook.ts +++ b/services/apps/integration_stream_worker/src/bin/trigger-webhook.ts @@ -1,10 +1,17 @@ -import { DB_CONFIG, SQS_CONFIG } from '../conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG } from '../conf' import IncomingWebhookRepository from '../repo/incomingWebhook.repo' import { DbStore, getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { IntegrationStreamWorkerEmitter, getSqsClient } from '@crowd/sqs' +import { getSqsClient } from '@crowd/sqs' import { WebhookState, WebhookType } from '@crowd/types' +import { + IntegrationStreamWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' const tracer = getServiceTracer() const log = getServiceLogger() @@ -19,12 +26,27 @@ if (processArguments.length !== 1) { const webhookIds = processArguments[0].split(',') setImmediate(async () => { - const sqsClient = getSqsClient(SQS_CONFIG()) - const emitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) - await emitter.init() + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const redisClient = await getRedisClient(REDIS_CONFIG(), true) const dbConnection = await getDbConnection(DB_CONFIG()) const store = new DbStore(log, dbConnection) + + const priorityLevelRepo = new PriorityLevelContextRepository(store, log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const sqsClient = getSqsClient(SQS_CONFIG()) + const emitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + await emitter.init() + const repo = new IncomingWebhookRepository(store, log) for (const webhookId of webhookIds) { diff --git a/services/apps/integration_stream_worker/src/conf/index.ts b/services/apps/integration_stream_worker/src/conf/index.ts index 0d7c697a28..17f55071a7 100644 --- a/services/apps/integration_stream_worker/src/conf/index.ts +++ b/services/apps/integration_stream_worker/src/conf/index.ts @@ -1,10 +1,14 @@ +import { SERVICE } from '@crowd/common' import { IDatabaseConfig } from '@crowd/database' +import { IUnleashConfig } from '@crowd/feature-flags' import { IRedisConfiguration } from '@crowd/redis' import { ISqsClientConfig } from '@crowd/sqs' +import { QueuePriorityLevel } from '@crowd/types' import config from 'config' export interface IWorkerConfig { maxStreamRetries: number + queuePriorityLevel: QueuePriorityLevel } export interface INangoConfig { @@ -73,3 +77,12 @@ export const PLATFORM_CONFIG = (platform: string): unknown | undefined => { return undefined } } + +let unleashConfig: IUnleashConfig | undefined +export const UNLEASH_CONFIG = (): IUnleashConfig | undefined => { + if (unleashConfig) return unleashConfig + + unleashConfig = Object.assign({ appName: SERVICE }, config.get('unleash')) + + return unleashConfig +} diff --git a/services/apps/integration_stream_worker/src/jobs/processOldStreams.ts b/services/apps/integration_stream_worker/src/jobs/processOldStreams.ts index dc1cabd76d..c135a34e7e 100644 --- a/services/apps/integration_stream_worker/src/jobs/processOldStreams.ts +++ b/services/apps/integration_stream_worker/src/jobs/processOldStreams.ts @@ -1,14 +1,14 @@ import { DbConnection, DbStore } from '@crowd/database' import { Logger } from '@crowd/logging' import { RedisClient } from '@crowd/redis' +import { IInsertableWebhookStream } from '../repo/integrationStream.data' +import IntegrationStreamRepository from '../repo/integrationStream.repo' +import IntegrationStreamService from '../service/integrationStreamService' import { IntegrationDataWorkerEmitter, IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, -} from '@crowd/sqs' -import { IInsertableWebhookStream } from '../repo/integrationStream.data' -import IntegrationStreamRepository from '../repo/integrationStream.repo' -import IntegrationStreamService from '../service/integrationStreamService' +} from '@crowd/common_services' export const processOldStreamsJob = async ( dbConn: DbConnection, diff --git a/services/apps/integration_stream_worker/src/main.ts b/services/apps/integration_stream_worker/src/main.ts index ecae3280f0..09a0db0b12 100644 --- a/services/apps/integration_stream_worker/src/main.ts +++ b/services/apps/integration_stream_worker/src/main.ts @@ -1,16 +1,19 @@ import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' -import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG } from './conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG, WORKER_SETTINGS } from './conf' import { getRedisClient } from '@crowd/redis' -import { getDbConnection } from '@crowd/database' +import { DbStore, getDbConnection } from '@crowd/database' +import { getSqsClient } from '@crowd/sqs' +import { WorkerQueueReceiver } from './queue' +import { processOldStreamsJob } from './jobs/processOldStreams' import { IntegrationRunWorkerEmitter, IntegrationDataWorkerEmitter, IntegrationStreamWorkerEmitter, - getSqsClient, -} from '@crowd/sqs' -import { WorkerQueueReceiver } from './queue' -import { processOldStreamsJob } from './jobs/processOldStreams' + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getUnleashClient } from '@crowd/feature-flags' const tracer = getServiceTracer() const log = getServiceLogger() @@ -21,16 +24,44 @@ const PROCESSING_INTERVAL_MINUTES = 5 setImmediate(async () => { log.info('Starting integration stream worker...') + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + const sqsClient = getSqsClient(SQS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG(), MAX_CONCURRENT_PROCESSING) const redisClient = await getRedisClient(REDIS_CONFIG(), true) - const runWorkerEmiiter = new IntegrationRunWorkerEmitter(sqsClient, tracer, log) - const dataWorkerEmitter = new IntegrationDataWorkerEmitter(sqsClient, tracer, log) - const streamWorkerEmitter = new IntegrationStreamWorkerEmitter(sqsClient, tracer, log) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const runWorkerEmiiter = new IntegrationRunWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const dataWorkerEmitter = new IntegrationDataWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + const streamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) const queue = new WorkerQueueReceiver( + WORKER_SETTINGS().queuePriorityLevel, sqsClient, redisClient, dbConnection, diff --git a/services/apps/integration_stream_worker/src/queue/index.ts b/services/apps/integration_stream_worker/src/queue/index.ts index e74f4212b9..088550d7e9 100644 --- a/services/apps/integration_stream_worker/src/queue/index.ts +++ b/services/apps/integration_stream_worker/src/queue/index.ts @@ -1,26 +1,30 @@ -import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' -import { Logger } from '@crowd/logging' -import { DbConnection, DbStore } from '@crowd/database' import { - INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, - IntegrationRunWorkerEmitter, IntegrationDataWorkerEmitter, + IntegrationRunWorkerEmitter, IntegrationStreamWorkerEmitter, +} from '@crowd/common_services' +import { DbConnection, DbStore } from '@crowd/database' +import { Logger } from '@crowd/logging' +import { RedisClient } from '@crowd/redis' +import { + INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, SqsClient, - SqsQueueReceiver, + SqsPrioritizedQueueReciever, } from '@crowd/sqs' +import { Span, SpanStatusCode, Tracer } from '@crowd/tracing' import { ContinueProcessingRunStreamsQueueMessage, IQueueMessage, IntegrationStreamWorkerQueueMessageType, ProcessStreamQueueMessage, ProcessWebhookStreamQueueMessage, + QueuePriorityLevel, } from '@crowd/types' -import { RedisClient } from '@crowd/redis' import IntegrationStreamService from '../service/integrationStreamService' -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { constructor( + level: QueuePriorityLevel, client: SqsClient, private readonly redisClient: RedisClient, private readonly dbConn: DbConnection, @@ -32,6 +36,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { maxConcurrentProcessing: number, ) { super( + level, client, INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, diff --git a/services/apps/integration_stream_worker/src/repo/integrationStream.data.ts b/services/apps/integration_stream_worker/src/repo/integrationStream.data.ts index a7c4352265..8d7f30625e 100644 --- a/services/apps/integration_stream_worker/src/repo/integrationStream.data.ts +++ b/services/apps/integration_stream_worker/src/repo/integrationStream.data.ts @@ -29,6 +29,7 @@ export interface IProcessableStream { integrationType: string runId: string | null webhookId: string | null + onboarding: boolean | null } export interface IInsertableWebhookStream { diff --git a/services/apps/integration_stream_worker/src/repo/integrationStream.repo.ts b/services/apps/integration_stream_worker/src/repo/integrationStream.repo.ts index 9e08f9ce26..a119cec9fe 100644 --- a/services/apps/integration_stream_worker/src/repo/integrationStream.repo.ts +++ b/services/apps/integration_stream_worker/src/repo/integrationStream.repo.ts @@ -128,9 +128,11 @@ export default class IntegrationStreamRepository extends RepositoryBase $(lastId) order by s.id limit ${limit} @@ -173,9 +177,11 @@ export default class IntegrationStreamRepository extends RepositoryBase { await this.updateIntegrationSettings(streamId, settings) @@ -571,6 +581,7 @@ export default class IntegrationStreamService extends LoggerBase { streamInfo.tenantId, streamInfo.integrationType, streamInfo.runId, + streamInfo.onboarding, ) } } @@ -638,6 +649,7 @@ export default class IntegrationStreamService extends LoggerBase { platform: string, parentId: string, identifier: string, + onboarding: boolean, data?: unknown, runId?: string, webhookId?: string, @@ -652,7 +664,12 @@ export default class IntegrationStreamService extends LoggerBase { // publising normal stream const streamId = await this.repo.publishStream(parentId, identifier, data, runId) if (streamId) { - await this.streamWorkerEmitter.triggerStreamProcessing(tenantId, platform, streamId) + await this.streamWorkerEmitter.triggerStreamProcessing( + tenantId, + platform, + streamId, + onboarding, + ) } else { this.log.debug({ identifier }, 'Child stream already exists!') } @@ -683,13 +700,14 @@ export default class IntegrationStreamService extends LoggerBase { tenantId: string, platform: string, streamId: string, + onboarding: boolean, data: unknown, runId?: string, ): Promise { try { this.log.debug('Publishing new stream data!') const dataId = await this.repo.publishData(streamId, data) - await this.dataWorkerEmitter.triggerDataProcessing(tenantId, platform, dataId) + await this.dataWorkerEmitter.triggerDataProcessing(tenantId, platform, dataId, onboarding) } catch (err) { if (runId) { await this.triggerRunError( diff --git a/services/apps/integration_sync_worker/config/custom-environment-variables.json b/services/apps/integration_sync_worker/config/custom-environment-variables.json index aba9658dd4..c3bc716227 100644 --- a/services/apps/integration_sync_worker/config/custom-environment-variables.json +++ b/services/apps/integration_sync_worker/config/custom-environment-variables.json @@ -1,6 +1,7 @@ { "service": { - "edition": "CROWD_EDITION" + "edition": "CROWD_EDITION", + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" }, "db": { "host": "CROWD_DB_WRITE_HOST", diff --git a/services/apps/integration_sync_worker/config/default.json b/services/apps/integration_sync_worker/config/default.json index 0d056b3639..fb3f396811 100644 --- a/services/apps/integration_sync_worker/config/default.json +++ b/services/apps/integration_sync_worker/config/default.json @@ -1,5 +1,7 @@ { - "service": {}, + "service": { + "queuePriorityLevel": "normal" + }, "db": {}, "sqs": {}, "redis": {}, diff --git a/services/apps/integration_sync_worker/package.json b/services/apps/integration_sync_worker/package.json index 2dcbe9a4e2..b673741639 100644 --- a/services/apps/integration_sync_worker/package.json +++ b/services/apps/integration_sync_worker/package.json @@ -17,7 +17,6 @@ "@crowd/common": "file:../../libs/common", "@crowd/database": "file:../../libs/database", "@crowd/logging": "file:../../libs/logging", - "@crowd/redis": "file:../../libs/redis", "@crowd/sqs": "file:../../libs/sqs", "@crowd/tracing": "file:../../libs/tracing", "@crowd/types": "file:../../libs/types", diff --git a/services/apps/integration_sync_worker/src/conf/index.ts b/services/apps/integration_sync_worker/src/conf/index.ts index 8fb612cd27..f0964b6f6e 100644 --- a/services/apps/integration_sync_worker/src/conf/index.ts +++ b/services/apps/integration_sync_worker/src/conf/index.ts @@ -1,10 +1,11 @@ import { IDatabaseConfig } from '@crowd/database' import { ISqsClientConfig } from '@crowd/sqs' -import { IOpenSearchConfig } from '@crowd/types' +import { IOpenSearchConfig, QueuePriorityLevel } from '@crowd/types' import config from 'config' export interface IServiceConfig { edition: string + queuePriorityLevel: QueuePriorityLevel } export interface INangoConfig { url: string diff --git a/services/apps/integration_sync_worker/src/main.ts b/services/apps/integration_sync_worker/src/main.ts index 89eef19f4a..e1fb251f6a 100644 --- a/services/apps/integration_sync_worker/src/main.ts +++ b/services/apps/integration_sync_worker/src/main.ts @@ -2,7 +2,7 @@ import { getDbConnection } from '@crowd/database' import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getSqsClient } from '@crowd/sqs' -import { DB_CONFIG, OPENSEARCH_CONFIG, SQS_CONFIG } from './conf' +import { DB_CONFIG, OPENSEARCH_CONFIG, SERVICE_CONFIG, SQS_CONFIG } from './conf' import { WorkerQueueReceiver } from './queue' import { getOpensearchClient } from '@crowd/opensearch' @@ -13,7 +13,6 @@ const MAX_CONCURRENT_PROCESSING = 2 setImmediate(async () => { log.info('Starting integration sync worker...') - const sqsClient = getSqsClient(SQS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG(), MAX_CONCURRENT_PROCESSING) @@ -21,6 +20,7 @@ setImmediate(async () => { const opensearchClient = getOpensearchClient(OPENSEARCH_CONFIG()) const worker = new WorkerQueueReceiver( + SERVICE_CONFIG().queuePriorityLevel, sqsClient, dbConnection, opensearchClient, diff --git a/services/apps/integration_sync_worker/src/queue/index.ts b/services/apps/integration_sync_worker/src/queue/index.ts index 0d6ed5a083..5822f389fd 100644 --- a/services/apps/integration_sync_worker/src/queue/index.ts +++ b/services/apps/integration_sync_worker/src/queue/index.ts @@ -1,19 +1,25 @@ -import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' -import { Logger } from '@crowd/logging' import { DbConnection, DbStore } from '@crowd/database' -import { MemberSyncService } from '../service/member.sync.service' -import { OpenSearchService } from '../service/opensearch.service' -import { OrganizationSyncService } from '../service/organization.sync.service' -import { INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, SqsClient, SqsQueueReceiver } from '@crowd/sqs' +import { Logger } from '@crowd/logging' +import { + INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, + SqsClient, + SqsPrioritizedQueueReciever, +} from '@crowd/sqs' +import { Span, SpanStatusCode, Tracer } from '@crowd/tracing' import { AutomationSyncTrigger, IQueueMessage, IntegrationSyncWorkerQueueMessageType, + QueuePriorityLevel, } from '@crowd/types' import { Client } from '@opensearch-project/opensearch' +import { MemberSyncService } from '../service/member.sync.service' +import { OpenSearchService } from '../service/opensearch.service' +import { OrganizationSyncService } from '../service/organization.sync.service' -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { constructor( + level: QueuePriorityLevel, client: SqsClient, private readonly dbConn: DbConnection, private readonly openSearchClient: Client, @@ -22,6 +28,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { maxConcurrentProcessing: number, ) { super( + level, client, INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, @@ -42,7 +49,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { return new OrganizationSyncService(new DbStore(this.log, this.dbConn), this.log) } - protected override async processMessage(message: T): Promise { + public override async processMessage(message: T): Promise { await this.tracer.startActiveSpan('ProcessMessage', async (span: Span) => { try { this.log.trace({ messageType: message.type }, 'Processing message!') diff --git a/services/apps/search_sync_worker/config/custom-environment-variables.json b/services/apps/search_sync_worker/config/custom-environment-variables.json index 5d53ec56f9..4afd4ad866 100644 --- a/services/apps/search_sync_worker/config/custom-environment-variables.json +++ b/services/apps/search_sync_worker/config/custom-environment-variables.json @@ -1,6 +1,7 @@ { "service": { - "edition": "CROWD_EDITION" + "edition": "CROWD_EDITION", + "queuePriorityLevel": "QUEUE_PRIORITY_LEVEL" }, "db": { "host": "CROWD_DB_WRITE_HOST", diff --git a/services/apps/search_sync_worker/config/default.json b/services/apps/search_sync_worker/config/default.json index 0ae2ffe715..c3476f4311 100644 --- a/services/apps/search_sync_worker/config/default.json +++ b/services/apps/search_sync_worker/config/default.json @@ -1,5 +1,7 @@ { - "service": {}, + "service": { + "queuePriorityLevel": "normal" + }, "db": {}, "sqs": {}, "redis": {}, diff --git a/services/apps/search_sync_worker/src/conf/index.ts b/services/apps/search_sync_worker/src/conf/index.ts index a02ea34892..e1fd1a5428 100644 --- a/services/apps/search_sync_worker/src/conf/index.ts +++ b/services/apps/search_sync_worker/src/conf/index.ts @@ -1,6 +1,7 @@ import { IDatabaseConfig } from '@crowd/database' import { IRedisConfiguration } from '@crowd/redis' import { ISqsClientConfig } from '@crowd/sqs' +import { QueuePriorityLevel } from '@crowd/types' import config from 'config' export interface IOpenSearchConfig { @@ -12,6 +13,7 @@ export interface IOpenSearchConfig { export interface IServiceConfig { edition: string + queuePriorityLevel: QueuePriorityLevel } let serviceConfig: IServiceConfig diff --git a/services/apps/search_sync_worker/src/main.ts b/services/apps/search_sync_worker/src/main.ts index 94b9ecd109..4e37d61a9c 100644 --- a/services/apps/search_sync_worker/src/main.ts +++ b/services/apps/search_sync_worker/src/main.ts @@ -4,7 +4,7 @@ import { getServiceTracer } from '@crowd/tracing' import { getServiceLogger } from '@crowd/logging' import { getRedisClient } from '@crowd/redis' import { getSqsClient } from '@crowd/sqs' -import { DB_CONFIG, OPENSEARCH_CONFIG, REDIS_CONFIG, SQS_CONFIG } from './conf' +import { DB_CONFIG, OPENSEARCH_CONFIG, REDIS_CONFIG, SERVICE_CONFIG, SQS_CONFIG } from './conf' import { WorkerQueueReceiver } from './queue' const tracer = getServiceTracer() @@ -24,6 +24,7 @@ setImmediate(async () => { const dbConnection = await getDbConnection(DB_CONFIG(), MAX_CONCURRENT_PROCESSING) const worker = new WorkerQueueReceiver( + SERVICE_CONFIG().queuePriorityLevel, redis, sqsClient, dbConnection, diff --git a/services/apps/search_sync_worker/src/queue/index.ts b/services/apps/search_sync_worker/src/queue/index.ts index f732a2c7dd..a3a9a51233 100644 --- a/services/apps/search_sync_worker/src/queue/index.ts +++ b/services/apps/search_sync_worker/src/queue/index.ts @@ -1,25 +1,30 @@ import { BatchProcessor } from '@crowd/common' -import { Tracer, Span, SpanStatusCode } from '@crowd/tracing' import { DbConnection, DbStore } from '@crowd/database' import { Logger } from '@crowd/logging' +import { RedisClient } from '@crowd/redis' +import { + SEARCH_SYNC_WORKER_QUEUE_SETTINGS, + SqsClient, + SqsPrioritizedQueueReciever, +} from '@crowd/sqs' +import { Span, SpanStatusCode, Tracer } from '@crowd/tracing' +import { IQueueMessage, QueuePriorityLevel, SearchSyncWorkerQueueMessageType } from '@crowd/types' import { OpenSearchService, ActivitySyncService, MemberSyncService, OrganizationSyncService, } from '@crowd/opensearch' -import { RedisClient } from '@crowd/redis' -import { SEARCH_SYNC_WORKER_QUEUE_SETTINGS, SqsClient, SqsQueueReceiver } from '@crowd/sqs' -import { IQueueMessage, SearchSyncWorkerQueueMessageType } from '@crowd/types' -import { SERVICE_CONFIG } from 'conf' +import { SERVICE_CONFIG } from '../conf' /* eslint-disable @typescript-eslint/no-explicit-any */ -export class WorkerQueueReceiver extends SqsQueueReceiver { +export class WorkerQueueReceiver extends SqsPrioritizedQueueReciever { private readonly memberBatchProcessor: BatchProcessor private readonly activityBatchProcessor: BatchProcessor private readonly organizationBatchProcessor: BatchProcessor constructor( + level: QueuePriorityLevel, private readonly redisClient: RedisClient, client: SqsClient, private readonly dbConn: DbConnection, @@ -29,6 +34,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { maxConcurrentProcessing: number, ) { super( + level, client, SEARCH_SYNC_WORKER_QUEUE_SETTINGS, maxConcurrentProcessing, @@ -112,7 +118,7 @@ export class WorkerQueueReceiver extends SqsQueueReceiver { ) } - protected override async processMessage(message: T): Promise { + public override async processMessage(message: T): Promise { await this.tracer.startActiveSpan('ProcessMessage', async (span: Span) => { try { this.log.trace({ messageType: message.type }, 'Processing message!') diff --git a/services/apps/webhook_api/config/custom-environment-variables.json b/services/apps/webhook_api/config/custom-environment-variables.json index 5f249ff8dc..322bb4daa7 100644 --- a/services/apps/webhook_api/config/custom-environment-variables.json +++ b/services/apps/webhook_api/config/custom-environment-variables.json @@ -12,5 +12,15 @@ "port": "CROWD_SQS_PORT", "accessKeyId": "CROWD_SQS_AWS_ACCESS_KEY_ID", "secretAccessKey": "CROWD_SQS_AWS_SECRET_ACCESS_KEY" + }, + "unleash": { + "url": "CROWD_UNLEASH_URL", + "apiKey": "CROWD_UNLEASH_BACKEND_API_KEY" + }, + "redis": { + "username": "CROWD_REDIS_USERNAME", + "password": "CROWD_REDIS_PASSWORD", + "host": "CROWD_REDIS_HOST", + "port": "CROWD_REDIS_PORT" } } diff --git a/services/apps/webhook_api/config/default.json b/services/apps/webhook_api/config/default.json index 371dfadee0..8559154b27 100644 --- a/services/apps/webhook_api/config/default.json +++ b/services/apps/webhook_api/config/default.json @@ -2,6 +2,8 @@ "service": { "port": 8082 }, + "unleash": {}, + "redis": {}, "db": {}, "sqs": {} } diff --git a/services/apps/webhook_api/package.json b/services/apps/webhook_api/package.json index 752589f8fd..673137c245 100644 --- a/services/apps/webhook_api/package.json +++ b/services/apps/webhook_api/package.json @@ -21,6 +21,9 @@ "@crowd/sqs": "file:../../libs/sqs", "@crowd/types": "file:../../libs/types", "@crowd/telemetry": "file:../../libs/telemetry", + "@crowd/feature-flags": "file:../../libs/feature-flags", + "@crowd/common_services": "file:../../libs/common_services", + "@crowd/redis": "file:../../libs/redis", "@types/config": "^3.3.0", "@types/express": "^4.17.17", "@types/node": "^18.16.3", diff --git a/services/apps/webhook_api/src/conf/index.ts b/services/apps/webhook_api/src/conf/index.ts index 5d80e76b15..f2105d97b0 100644 --- a/services/apps/webhook_api/src/conf/index.ts +++ b/services/apps/webhook_api/src/conf/index.ts @@ -1,7 +1,9 @@ import { IDatabaseConfig } from '@crowd/database' import { ISqsClientConfig } from '@crowd/sqs' import config from 'config' - +import { IUnleashConfig } from '@crowd/feature-flags' +import { SERVICE } from '@crowd/common' +import { IRedisConfiguration } from '@crowd/redis' export interface IWebhookApiServiceConfig { port: number } @@ -29,3 +31,20 @@ export const DB_CONFIG = (): IDatabaseConfig => { dbConfig = config.get('db') return dbConfig } + +let unleashConfig: IUnleashConfig | undefined +export const UNLEASH_CONFIG = (): IUnleashConfig | undefined => { + if (unleashConfig) return unleashConfig + + unleashConfig = Object.assign({ appName: SERVICE }, config.get('unleash')) + + return unleashConfig +} + +let redisConfig: IRedisConfiguration +export const REDIS_CONFIG = (): IRedisConfiguration => { + if (redisConfig) return redisConfig + + redisConfig = config.get('redis') + return redisConfig +} diff --git a/services/apps/webhook_api/src/main.ts b/services/apps/webhook_api/src/main.ts index 3ca3dd16e4..8d1535a8ef 100644 --- a/services/apps/webhook_api/src/main.ts +++ b/services/apps/webhook_api/src/main.ts @@ -1,9 +1,9 @@ import { getServiceLogger } from '@crowd/logging' -import { DB_CONFIG, SQS_CONFIG, WEBHOOK_API_CONFIG } from './conf' +import { DB_CONFIG, REDIS_CONFIG, SQS_CONFIG, UNLEASH_CONFIG, WEBHOOK_API_CONFIG } from './conf' import express from 'express' import { loggingMiddleware } from './middleware/logging' import { getSqsClient } from '@crowd/sqs' -import { getDbConnection } from '@crowd/database' +import { DbStore, getDbConnection } from '@crowd/database' import { databaseMiddleware } from './middleware/database' import { errorMiddleware } from './middleware/error' import { sqsMiddleware } from './middleware/sqs' @@ -12,17 +12,46 @@ import { installGroupsIoRoutes } from './routes/groupsio' import { installDiscourseRoutes } from './routes/discourse' import cors from 'cors' import { telemetryExpressMiddleware } from '@crowd/telemetry' +import { getUnleashClient } from '@crowd/feature-flags' +import { getRedisClient } from '@crowd/redis' +import { + IntegrationStreamWorkerEmitter, + PriorityLevelContextRepository, + QueuePriorityContextLoader, +} from '@crowd/common_services' +import { getServiceTracer } from '@crowd/tracing' +import { emittersMiddleware } from './middleware/emitters' +const tracer = getServiceTracer() const log = getServiceLogger() const config = WEBHOOK_API_CONFIG() setImmediate(async () => { const app = express() + const unleash = await getUnleashClient(UNLEASH_CONFIG()) + + const redisClient = await getRedisClient(REDIS_CONFIG()) + const sqsClient = getSqsClient(SQS_CONFIG()) const dbConnection = await getDbConnection(DB_CONFIG(), 3, 0) + const priorityLevelRepo = new PriorityLevelContextRepository(new DbStore(log, dbConnection), log) + const loader: QueuePriorityContextLoader = (tenantId: string) => + priorityLevelRepo.loadPriorityLevelContext(tenantId) + + const integrationStreamWorkerEmitter = new IntegrationStreamWorkerEmitter( + sqsClient, + redisClient, + tracer, + unleash, + loader, + log, + ) + + await integrationStreamWorkerEmitter.init() + app.use(emittersMiddleware(integrationStreamWorkerEmitter)) app.use((req, res, next) => { // Groups.io doesn't send a content-type header, // so request body parsing is just skipped diff --git a/services/apps/webhook_api/src/middleware/emitters.ts b/services/apps/webhook_api/src/middleware/emitters.ts new file mode 100644 index 0000000000..72b82099df --- /dev/null +++ b/services/apps/webhook_api/src/middleware/emitters.ts @@ -0,0 +1,18 @@ +import { IntegrationStreamWorkerEmitter } from '@crowd/common_services' +import { NextFunction, Request, RequestHandler, Response } from 'express' + +export interface IEmittersRequest { + emitters: { + integrationStreamWorker: IntegrationStreamWorkerEmitter + } +} + +export const emittersMiddleware = ( + integrationStreamWorkerEmitter: IntegrationStreamWorkerEmitter, +): RequestHandler => { + return (req: Request, _res: Response, next: NextFunction) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(req as any).integrationStreamWorker = integrationStreamWorkerEmitter + next() + } +} diff --git a/services/apps/webhook_api/src/middleware/index.ts b/services/apps/webhook_api/src/middleware/index.ts index 0ffa59e487..0303134d7b 100644 --- a/services/apps/webhook_api/src/middleware/index.ts +++ b/services/apps/webhook_api/src/middleware/index.ts @@ -2,5 +2,11 @@ import { Request } from 'express' import { ILoggingRequest } from './logging' import { IDatabaseRequest } from './database' import { ISqsRequest } from './sqs' +import { IEmittersRequest } from './emitters' -export interface ApiRequest extends Request, ILoggingRequest, IDatabaseRequest, ISqsRequest {} +export interface ApiRequest + extends Request, + ILoggingRequest, + IDatabaseRequest, + ISqsRequest, + IEmittersRequest {} diff --git a/services/apps/webhook_api/src/routes/discourse.ts b/services/apps/webhook_api/src/routes/discourse.ts index 6d30a7467d..ce62b628c6 100644 --- a/services/apps/webhook_api/src/routes/discourse.ts +++ b/services/apps/webhook_api/src/routes/discourse.ts @@ -1,16 +1,11 @@ -import { asyncWrap } from '../middleware/error' -import { WebhooksRepository } from '../repos/webhooks.repo' import { Error400BadRequest } from '@crowd/common' -import { getServiceTracer } from '@crowd/tracing' -import { IntegrationStreamWorkerEmitter } from '@crowd/sqs' import { PlatformType, WebhookType } from '@crowd/types' import express from 'express' import { verifyWebhookSignature } from 'utils/crypto' - -const tracer = getServiceTracer() +import { asyncWrap } from '../middleware/error' +import { WebhooksRepository } from '../repos/webhooks.repo' export const installDiscourseRoutes = async (app: express.Express) => { - let emitter: IntegrationStreamWorkerEmitter app.post( '/discourse/:tenantId/', asyncWrap(async (req, res) => { @@ -59,12 +54,11 @@ export const installDiscourseRoutes = async (app: express.Express) => { }, ) - if (!emitter) { - emitter = new IntegrationStreamWorkerEmitter(req.sqs, tracer, req.log) - await emitter.init() - } - - await emitter.triggerWebhookProcessing(integration.tenantId, integration.platform, id) + await req.emitters.integrationStreamWorker.triggerWebhookProcessing( + integration.tenantId, + integration.platform, + id, + ) res.sendStatus(204) } else { diff --git a/services/apps/webhook_api/src/routes/github.ts b/services/apps/webhook_api/src/routes/github.ts index dd11f442bc..e1f1f09619 100644 --- a/services/apps/webhook_api/src/routes/github.ts +++ b/services/apps/webhook_api/src/routes/github.ts @@ -1,18 +1,13 @@ import { asyncWrap } from '../middleware/error' import { WebhooksRepository } from '../repos/webhooks.repo' import { Error400BadRequest } from '@crowd/common' -import { getServiceTracer } from '@crowd/tracing' -import { IntegrationStreamWorkerEmitter } from '@crowd/sqs' import { PlatformType, WebhookType } from '@crowd/types' import express from 'express' -const tracer = getServiceTracer() - const SIGNATURE_HEADER = 'x-hub-signature' const EVENT_HEADER = 'x-github-event' export const installGithubRoutes = async (app: express.Express) => { - let emitter: IntegrationStreamWorkerEmitter app.post( '/github', asyncWrap(async (req, res) => { @@ -48,12 +43,11 @@ export const installGithubRoutes = async (app: express.Express) => { }, ) - if (!emitter) { - emitter = new IntegrationStreamWorkerEmitter(req.sqs, tracer, req.log) - await emitter.init() - } - - await emitter.triggerWebhookProcessing(integration.tenantId, integration.platform, id) + await req.emitters.integrationStreamWorker.triggerWebhookProcessing( + integration.tenantId, + integration.platform, + id, + ) res.sendStatus(204) } else { diff --git a/services/apps/webhook_api/src/routes/groupsio.ts b/services/apps/webhook_api/src/routes/groupsio.ts index 6d3c635329..261db7a3bf 100644 --- a/services/apps/webhook_api/src/routes/groupsio.ts +++ b/services/apps/webhook_api/src/routes/groupsio.ts @@ -1,15 +1,10 @@ import { asyncWrap } from '../middleware/error' import { WebhooksRepository } from '../repos/webhooks.repo' import { Error400BadRequest } from '@crowd/common' -import { getServiceTracer } from '@crowd/tracing' -import { IntegrationStreamWorkerEmitter } from '@crowd/sqs' import { WebhookType } from '@crowd/types' import express from 'express' -const tracer = getServiceTracer() - export const installGroupsIoRoutes = async (app: express.Express) => { - let emitter: IntegrationStreamWorkerEmitter app.post( '/groupsio', asyncWrap(async (req, res) => { @@ -42,12 +37,11 @@ export const installGroupsIoRoutes = async (app: express.Express) => { }, ) - if (!emitter) { - emitter = new IntegrationStreamWorkerEmitter(req.sqs, tracer, req.log) - await emitter.init() - } - - await emitter.triggerWebhookProcessing(integration.tenantId, integration.platform, result) + await req.emitters.integrationStreamWorker.triggerWebhookProcessing( + integration.tenantId, + integration.platform, + result, + ) res.sendStatus(204) } else { diff --git a/services/libs/common_services/.eslintrc.cjs b/services/libs/common_services/.eslintrc.cjs new file mode 100644 index 0000000000..4a8c9f2f7d --- /dev/null +++ b/services/libs/common_services/.eslintrc.cjs @@ -0,0 +1,21 @@ +module.exports = { + parser: '@typescript-eslint/parser', + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:prettier/recommended', + ], + plugins: ['@typescript-eslint', 'prettier'], + parserOptions: { + ecmaVersion: 2022, + sourceType: 'module', + }, + env: { + es6: true, + node: true, + }, + rules: { + 'prettier/prettier': 'error', + '@typescript-eslint/explicit-module-boundary-types': 'off', + }, +}; diff --git a/services/libs/common_services/.prettierignore b/services/libs/common_services/.prettierignore new file mode 100644 index 0000000000..323bc0d11c --- /dev/null +++ b/services/libs/common_services/.prettierignore @@ -0,0 +1,3 @@ +dist/ +.eslintrc.cjs +.prettierrc.cjs \ No newline at end of file diff --git a/services/libs/common_services/.prettierrc.cjs b/services/libs/common_services/.prettierrc.cjs new file mode 100644 index 0000000000..424d55813a --- /dev/null +++ b/services/libs/common_services/.prettierrc.cjs @@ -0,0 +1,7 @@ +module.exports = { + singleQuote: true, + arrowParens: 'always', + printWidth: 100, + trailingComma: 'all', + semi: false, +}; diff --git a/services/libs/common_services/package.json b/services/libs/common_services/package.json new file mode 100644 index 0000000000..dac2044604 --- /dev/null +++ b/services/libs/common_services/package.json @@ -0,0 +1,31 @@ +{ + "name": "@crowd/common_services", + "version": "1.0.0", + "private": true, + "scripts": { + "lint": "./node_modules/.bin/eslint --ext .ts src --max-warnings=0", + "format": "./node_modules/.bin/prettier --write \"src/**/*.ts\"", + "format-check": "./node_modules/.bin/prettier --check .", + "tsc-check": "./node_modules/.bin/tsc --noEmit" + }, + "devDependencies": { + "@types/node": "^18.16.3", + "@typescript-eslint/eslint-plugin": "^5.59.2", + "@typescript-eslint/parser": "^5.59.2", + "eslint": "^8.39.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-prettier": "^4.2.1", + "prettier": "^2.8.8", + "typescript": "^5.0.4" + }, + "dependencies": { + "@crowd/common": "file:../common", + "@crowd/logging": "file:../logging", + "@crowd/types": "file:../types", + "@crowd/database": "file:../database", + "@crowd/feature-flags": "file:../feature-flags", + "@crowd/redis": "file:../redis", + "@crowd/tracing": "file:../tracing", + "@crowd/sqs": "file:../sqs" + } +} diff --git a/services/libs/common_services/src/index.ts b/services/libs/common_services/src/index.ts new file mode 100644 index 0000000000..4c230b6055 --- /dev/null +++ b/services/libs/common_services/src/index.ts @@ -0,0 +1,2 @@ +export * from './services' +export * from './repos' diff --git a/services/libs/common_services/src/repos/index.ts b/services/libs/common_services/src/repos/index.ts new file mode 100644 index 0000000000..4c063ea2a7 --- /dev/null +++ b/services/libs/common_services/src/repos/index.ts @@ -0,0 +1 @@ +export * from './priorityLevelContext.repo' diff --git a/services/libs/common_services/src/repos/priorityLevelContext.repo.ts b/services/libs/common_services/src/repos/priorityLevelContext.repo.ts new file mode 100644 index 0000000000..71fd01b1d3 --- /dev/null +++ b/services/libs/common_services/src/repos/priorityLevelContext.repo.ts @@ -0,0 +1,26 @@ +import { DbStore, RepositoryBase } from '@crowd/database' +import { Logger } from '@crowd/logging' +import { IQueuePriorityCalculationContext } from '@crowd/types' + +export class PriorityLevelContextRepository extends RepositoryBase { + public constructor(dbStore: DbStore, parentLog: Logger) { + super(dbStore, parentLog) + } + + public async loadPriorityLevelContext( + tenantId: string, + ): Promise { + const result = await this.db().oneOrNone( + `select plan, "priorityLevel" as "dbPriority" from tenants where id = $(tenantId)`, + { + tenantId, + }, + ) + + if (result) { + return result + } + + throw new Error(`Tenant not found: ${tenantId}!`) + } +} diff --git a/services/libs/common_services/src/services/emitters/dataSinkWorker.emitter.ts b/services/libs/common_services/src/services/emitters/dataSinkWorker.emitter.ts new file mode 100644 index 0000000000..20e063192b --- /dev/null +++ b/services/libs/common_services/src/services/emitters/dataSinkWorker.emitter.ts @@ -0,0 +1,96 @@ +import { CrowdQueue, DATA_SINK_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { RedisClient } from '@crowd/redis' +import { UnleashClient } from '@crowd/feature-flags' +import { Logger } from '@crowd/logging' +import { Tracer } from '@crowd/tracing' +import { + CheckResultsQueueMessage, + CreateAndProcessActivityResultQueueMessage, + IActivityData, + IQueueMessage, + ProcessIntegrationResultQueueMessage, + QueuePriorityLevel, +} from '@crowd/types' +import { generateUUIDv1 } from '@crowd/common' + +export class DataSinkWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.DATA_SINK_WORKER, + DATA_SINK_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) + } + + public async triggerResultProcessing( + tenantId: string, + platform: string, + resultId: string, + sourceId: string, + onboarding: boolean, + deduplicationId?: string, + ) { + await this.sendMessage( + tenantId, + sourceId, + new ProcessIntegrationResultQueueMessage(resultId), + deduplicationId || resultId, + { + onboarding, + }, + ) + } + + public async createAndProcessActivityResult( + tenantId: string, + segmentId: string, + integrationId: string, + activity: IActivityData, + ) { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new CreateAndProcessActivityResultQueueMessage(tenantId, segmentId, integrationId, activity), + undefined, + { + onboarding: true, + }, + ) + } + + public async checkResults() { + await this.sendMessage( + undefined, + 'global', + new CheckResultsQueueMessage(), + 'global', + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public sendMessagesBatch( + messages: { + tenantId: string + payload: T + groupId: string + deduplicationId?: string + id?: string + }[], + ): Promise { + return super.sendMessages(messages) + } +} diff --git a/services/libs/common_services/src/services/emitters/index.ts b/services/libs/common_services/src/services/emitters/index.ts new file mode 100644 index 0000000000..2a092a4b17 --- /dev/null +++ b/services/libs/common_services/src/services/emitters/index.ts @@ -0,0 +1,7 @@ +export * from './dataSinkWorker.emitter' +export * from './integrationDataWorker.emitter' +export * from './integrationRunWorker.emitter' +export * from './integrationStreamWorker.emitter' +export * from './integrationSyncWorker.emitter' +export * from './nodejsWorker.emitter' +export * from './searchSyncWorker.emitter' diff --git a/services/libs/common_services/src/services/emitters/integrationDataWorker.emitter.ts b/services/libs/common_services/src/services/emitters/integrationDataWorker.emitter.ts new file mode 100644 index 0000000000..4febba4e2d --- /dev/null +++ b/services/libs/common_services/src/services/emitters/integrationDataWorker.emitter.ts @@ -0,0 +1,40 @@ +import { CrowdQueue, INTEGRATION_DATA_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { RedisClient } from '@crowd/redis' +import { Tracer } from '@crowd/tracing' +import { UnleashClient } from '@crowd/feature-flags' +import { Logger } from '@crowd/logging' +import { ProcessStreamDataQueueMessage } from '@crowd/types' + +export class IntegrationDataWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.INTEGRATION_DATA_WORKER, + INTEGRATION_DATA_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) + } + + public async triggerDataProcessing( + tenantId: string, + platform: string, + dataId: string, + onboarding: boolean, + ) { + await this.sendMessage(tenantId, dataId, new ProcessStreamDataQueueMessage(dataId), dataId, { + onboarding, + }) + } +} diff --git a/services/libs/common_services/src/services/emitters/integrationRunWorker.emitter.ts b/services/libs/common_services/src/services/emitters/integrationRunWorker.emitter.ts new file mode 100644 index 0000000000..37857bb502 --- /dev/null +++ b/services/libs/common_services/src/services/emitters/integrationRunWorker.emitter.ts @@ -0,0 +1,91 @@ +import { CrowdQueue, INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { RedisClient } from '@crowd/redis' +import { Tracer } from '@crowd/tracing' +import { UnleashClient } from '@crowd/feature-flags' +import { Logger } from '@crowd/logging' +import { + CheckRunsQueueMessage, + GenerateRunStreamsQueueMessage, + QueuePriorityLevel, + StartIntegrationRunQueueMessage, + StreamProcessedQueueMessage, +} from '@crowd/types' + +export class IntegrationRunWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.INTEGRATION_RUN_WORKER, + INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) + } + + public async checkRuns() { + await this.sendMessage( + undefined, + 'global', + new CheckRunsQueueMessage(), + 'global', + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public async triggerIntegrationRun( + tenantId: string, + platform: string, + integrationId: string, + onboarding: boolean, + isManualRun?: boolean, + manualSettings?: unknown, + ): Promise { + await this.sendMessage( + tenantId, + integrationId, + new StartIntegrationRunQueueMessage(integrationId, onboarding, isManualRun, manualSettings), + undefined, + { onboarding }, + ) + } + + public async triggerRunProcessing( + tenantId: string, + platform: string, + runId: string, + onboarding: boolean, + isManualRun?: boolean, + manualSettings?: unknown, + ): Promise { + await this.sendMessage( + tenantId, + runId, + new GenerateRunStreamsQueueMessage(runId, isManualRun, manualSettings), + runId, + { onboarding }, + ) + } + + public async streamProcessed( + tenantId: string, + platform: string, + runId: string, + onboarding: boolean, + ): Promise { + await this.sendMessage(tenantId, runId, new StreamProcessedQueueMessage(runId), undefined, { + onboarding, + }) + } +} diff --git a/services/libs/common_services/src/services/emitters/integrationStreamWorker.emitter.ts b/services/libs/common_services/src/services/emitters/integrationStreamWorker.emitter.ts new file mode 100644 index 0000000000..56036eefc3 --- /dev/null +++ b/services/libs/common_services/src/services/emitters/integrationStreamWorker.emitter.ts @@ -0,0 +1,91 @@ +import { UnleashClient } from '@crowd/feature-flags' +import { Logger } from '@crowd/logging' +import { RedisClient } from '@crowd/redis' +import { CrowdQueue, INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' +import { Tracer } from '@crowd/tracing' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { + CheckStreamsQueueMessage, + ContinueProcessingRunStreamsQueueMessage, + ProcessStreamQueueMessage, + ProcessWebhookStreamQueueMessage, + QueuePriorityLevel, +} from '@crowd/types' +import { generateUUIDv1 } from '@crowd/common' + +export class IntegrationStreamWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.INTEGRATION_STREAM_WORKER, + INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) + } + + public async checkStreams() { + await this.sendMessage( + undefined, + 'global', + new CheckStreamsQueueMessage(), + 'global', + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public async continueProcessingRunStreams( + tenantId: string, + onboarding: boolean, + platform: string, + runId: string, + ): Promise { + await this.sendMessage( + tenantId, + runId, + new ContinueProcessingRunStreamsQueueMessage(runId), + undefined, + { onboarding }, + ) + } + + public async triggerStreamProcessing( + tenantId: string, + platform: string, + streamId: string, + onboarding: boolean, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new ProcessStreamQueueMessage(streamId), + undefined, + { onboarding }, + ) + } + + public async triggerWebhookProcessing( + tenantId: string, + platform: string, + webhookId: string, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new ProcessWebhookStreamQueueMessage(webhookId), + undefined, + { onboarding: true }, + ) + } +} diff --git a/services/libs/sqs/src/instances/integrationSyncWorker.ts b/services/libs/common_services/src/services/emitters/integrationSyncWorker.emitter.ts similarity index 74% rename from services/libs/sqs/src/instances/integrationSyncWorker.ts rename to services/libs/common_services/src/services/emitters/integrationSyncWorker.emitter.ts index a0c038ead5..17c55a16b1 100644 --- a/services/libs/sqs/src/instances/integrationSyncWorker.ts +++ b/services/libs/common_services/src/services/emitters/integrationSyncWorker.emitter.ts @@ -1,11 +1,37 @@ +import { UnleashClient } from '@crowd/feature-flags' import { Logger } from '@crowd/logging' -import { AutomationSyncTrigger, IntegrationSyncWorkerQueueMessageType } from '@crowd/types' -import { INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, SqsClient, SqsQueueEmitter } from '..' +import { RedisClient } from '@crowd/redis' +import { CrowdQueue, INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' import { Tracer } from '@crowd/tracing' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { + AutomationSyncTrigger, + IIntegrationSyncWorkerEmitter, + IntegrationSyncWorkerQueueMessageType, +} from '@crowd/types' -export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, tracer, parentLog) +export class IntegrationSyncWorkerEmitter + extends QueuePriorityService + implements IIntegrationSyncWorkerEmitter +{ + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.INTEGRATION_SYNC_WORKER, + INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) } public async triggerSyncMarkedMembers(tenantId: string, integrationId: string): Promise { @@ -16,6 +42,7 @@ export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('integrationId is required!') } await this.sendMessage( + tenantId, integrationId, { type: IntegrationSyncWorkerQueueMessageType.SYNC_ALL_MARKED_MEMBERS, @@ -48,6 +75,7 @@ export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { } await this.sendMessage( + tenantId, memberId, { type: IntegrationSyncWorkerQueueMessageType.SYNC_MEMBER, @@ -76,6 +104,7 @@ export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('integrationId is required!') } await this.sendMessage( + tenantId, automationId, { type: IntegrationSyncWorkerQueueMessageType.ONBOARD_AUTOMATION, @@ -99,6 +128,7 @@ export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('integrationId is required!') } await this.sendMessage( + tenantId, integrationId, { type: IntegrationSyncWorkerQueueMessageType.SYNC_ALL_MARKED_ORGANIZATIONS, @@ -127,6 +157,7 @@ export class IntegrationSyncWorkerEmitter extends SqsQueueEmitter { } await this.sendMessage( + tenantId, organizationId, { type: IntegrationSyncWorkerQueueMessageType.SYNC_ORGANIZATION, diff --git a/services/libs/common_services/src/services/emitters/nodejsWorker.emitter.ts b/services/libs/common_services/src/services/emitters/nodejsWorker.emitter.ts new file mode 100644 index 0000000000..f13b55867f --- /dev/null +++ b/services/libs/common_services/src/services/emitters/nodejsWorker.emitter.ts @@ -0,0 +1,208 @@ +import { generateUUIDv1 } from '@crowd/common' +import { UnleashClient } from '@crowd/feature-flags' +import { Logger } from '@crowd/logging' +import { RedisClient } from '@crowd/redis' +import { CrowdQueue, NODEJS_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' +import { Tracer } from '@crowd/tracing' +import { + AutomationType, + BulkEnrichQueueMessage, + EagleEyeEmailDigestQueueMessage, + EnrichOrganizationQueueMessage, + ExportCSVQueueMessage, + ExportableEntity, + IntegrationDataCheckerQueueMessage, + MergeSuggestionsQueueMessage, + NewActivityAutomationQueueMessage, + NewMemberAutomationQueueMessage, + OrgMergeQueueMessage, + ProcessAutomationQueueMessage, + QueuePriorityLevel, + RefreshSampleDataQueueMessage, + SendgridWebhookQueueMessage, + StripeWebhookQueueMessage, + WeeklyAnalyticsEmailQueueMessage, +} from '@crowd/types' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' + +/* eslint-disable @typescript-eslint/no-explicit-any */ + +export class NodejsWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.NODEJS_WORKER, + NODEJS_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) + } + + public async processAutomationForNewActivity( + tenantId: string, + activityId: string, + segmentId: string, + onboarding: boolean, + ): Promise { + await this.sendMessage( + tenantId, + `${activityId}--${segmentId}`, + new NewActivityAutomationQueueMessage(tenantId, activityId, segmentId), + `${activityId}--${segmentId}`, + { + onboarding, + }, + ) + } + + public async processAutomationForNewMember( + tenantId: string, + memberId: string, + segmentId: string, + onboarding: boolean, + ): Promise { + await this.sendMessage( + tenantId, + memberId, + new NewMemberAutomationQueueMessage(tenantId, memberId, segmentId), + memberId, + { + onboarding, + }, + ) + } + + public async bulkEnrich( + tenantId: string, + memberIds: string[], + segmentIds: string[], + notifyFrontend = true, + skipCredits = false, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new BulkEnrichQueueMessage(tenantId, memberIds, segmentIds, notifyFrontend, skipCredits), + ) + } + + public async mergeOrg( + tenantId: string, + primaryOrgId: string, + secondaryOrgId: string, + notifyFrontend = true, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new OrgMergeQueueMessage(tenantId, primaryOrgId, secondaryOrgId, notifyFrontend), + ) + } + + public async exportCSV( + tenantId: string, + user: string, + entity: ExportableEntity, + segmentIds: string[], + criteria: any, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new ExportCSVQueueMessage(user, tenantId, entity, segmentIds, criteria), + ) + } + + public async eagleEyeEmailDigest(tenantId: string, user: string): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new EagleEyeEmailDigestQueueMessage(tenantId, user), + ) + } + + public async integrationDataChecker(tenantId: string, integrationId: string): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new IntegrationDataCheckerQueueMessage(tenantId, integrationId), + ) + } + + public async processAutomation( + tenantId: string, + type: AutomationType, + automation: any, + eventId: string, + payload: any, + ): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new ProcessAutomationQueueMessage(tenantId, type, automation, eventId, payload), + ) + } + + public async weeklyAnalyticsEmail(tenantId: string): Promise { + await this.sendMessage( + tenantId, + generateUUIDv1(), + new WeeklyAnalyticsEmailQueueMessage(tenantId), + ) + } + + public async stripeWebhook(event: any): Promise { + await this.sendMessage( + undefined, + generateUUIDv1(), + new StripeWebhookQueueMessage(event), + undefined, + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public async sendgridWebhook(event: any): Promise { + await this.sendMessage( + undefined, + generateUUIDv1(), + new SendgridWebhookQueueMessage(event), + undefined, + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public async refreshSampleData(): Promise { + await this.sendMessage( + undefined, + generateUUIDv1(), + new RefreshSampleDataQueueMessage(), + undefined, + undefined, + QueuePriorityLevel.SYSTEM, + ) + } + + public async enrichOrganizations(tenantId: string, maxEnrichLimit = 0): Promise { + await this.sendMessage( + tenantId, + tenantId, + new EnrichOrganizationQueueMessage(tenantId, maxEnrichLimit), + ) + } + + public async mergeSuggestions(tenantId: string): Promise { + await this.sendMessage(tenantId, tenantId, new MergeSuggestionsQueueMessage(tenantId)) + } +} diff --git a/services/libs/sqs/src/instances/searchSyncWorker.ts b/services/libs/common_services/src/services/emitters/searchSyncWorker.emitter.ts similarity index 55% rename from services/libs/sqs/src/instances/searchSyncWorker.ts rename to services/libs/common_services/src/services/emitters/searchSyncWorker.emitter.ts index 473b47ddb0..cb1e941f3d 100644 --- a/services/libs/sqs/src/instances/searchSyncWorker.ts +++ b/services/libs/common_services/src/services/emitters/searchSyncWorker.emitter.ts @@ -1,14 +1,33 @@ +import { UnleashClient } from '@crowd/feature-flags' import { Logger } from '@crowd/logging' -import { SearchSyncWorkerQueueMessageType } from '@crowd/types' -import { SEARCH_SYNC_WORKER_QUEUE_SETTINGS, SqsClient, SqsQueueEmitter } from '..' +import { RedisClient } from '@crowd/redis' +import { CrowdQueue, SEARCH_SYNC_WORKER_QUEUE_SETTINGS, SqsClient } from '@crowd/sqs' import { Tracer } from '@crowd/tracing' +import { QueuePriorityContextLoader, QueuePriorityService } from '../priority.service' +import { SearchSyncWorkerQueueMessageType } from '@crowd/types' -export class SearchSyncWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, SEARCH_SYNC_WORKER_QUEUE_SETTINGS, tracer, parentLog) +export class SearchSyncWorkerEmitter extends QueuePriorityService { + public constructor( + sqsClient: SqsClient, + redis: RedisClient, + tracer: Tracer, + unleash: UnleashClient | undefined, + priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + super( + CrowdQueue.SEARCH_SYNC_WORKER, + SEARCH_SYNC_WORKER_QUEUE_SETTINGS, + sqsClient, + redis, + tracer, + unleash, + priorityLevelCalculationContextLoader, + parentLog, + ) } - public async triggerMemberSync(tenantId: string, memberId: string) { + public async triggerMemberSync(tenantId: string, memberId: string, onboarding: boolean) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -17,12 +36,16 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { } await this.sendMessage( + tenantId, memberId, { type: SearchSyncWorkerQueueMessageType.SYNC_MEMBER, memberId, }, memberId, + { + onboarding, + }, ) } @@ -30,23 +53,39 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { if (!tenantId) { throw new Error('tenantId is required!') } - await this.sendMessage(tenantId, { + await this.sendMessage(tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.SYNC_TENANT_MEMBERS, tenantId, }) } - public async triggerOrganizationMembersSync(organizationId: string) { + public async triggerOrganizationMembersSync( + tenantId: string, + organizationId: string, + onboarding: boolean, + ) { + if (!tenantId) { + throw new Error('tenantId is required!') + } + if (!organizationId) { throw new Error('organizationId is required!') } - await this.sendMessage(organizationId, { - type: SearchSyncWorkerQueueMessageType.SYNC_ORGANIZATION_MEMBERS, + await this.sendMessage( + tenantId, organizationId, - }) + { + type: SearchSyncWorkerQueueMessageType.SYNC_ORGANIZATION_MEMBERS, + organizationId, + }, + undefined, + { + onboarding, + }, + ) } - public async triggerRemoveMember(tenantId: string, memberId: string) { + public async triggerRemoveMember(tenantId: string, memberId: string, onboarding: boolean) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -54,23 +93,31 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('memberId is required!') } - await this.sendMessage(memberId, { - type: SearchSyncWorkerQueueMessageType.REMOVE_MEMBER, + await this.sendMessage( + tenantId, memberId, - }) + { + type: SearchSyncWorkerQueueMessageType.REMOVE_MEMBER, + memberId, + }, + undefined, + { + onboarding, + }, + ) } public async triggerMemberCleanup(tenantId: string) { if (!tenantId) { throw new Error('tenantId is required!') } - await this.sendMessage(tenantId, { + await this.sendMessage(tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.CLEANUP_TENANT_MEMBERS, tenantId, }) } - public async triggerActivitySync(tenantId: string, activityId: string) { + public async triggerActivitySync(tenantId: string, activityId: string, onboarding: boolean) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -79,12 +126,16 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { } await this.sendMessage( + tenantId, activityId, { type: SearchSyncWorkerQueueMessageType.SYNC_ACTIVITY, activityId, }, activityId, + { + onboarding, + }, ) } @@ -92,23 +143,39 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { if (!tenantId) { throw new Error('tenantId is required!') } - await this.sendMessage(tenantId, { + await this.sendMessage(tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.SYNC_TENANT_ACTIVITIES, tenantId, }) } - public async triggerOrganizationActivitiesSync(organizationId: string) { + public async triggerOrganizationActivitiesSync( + tenantId: string, + organizationId: string, + onboarding: boolean, + ) { + if (!tenantId) { + throw new Error('tenantId is required!') + } + if (!organizationId) { throw new Error('organizationId is required!') } - await this.sendMessage(organizationId, { - type: SearchSyncWorkerQueueMessageType.SYNC_ORGANIZATION_ACTIVITIES, + await this.sendMessage( + tenantId, organizationId, - }) + { + type: SearchSyncWorkerQueueMessageType.SYNC_ORGANIZATION_ACTIVITIES, + organizationId, + }, + undefined, + { + onboarding, + }, + ) } - public async triggerRemoveActivity(tenantId: string, activityId: string) { + public async triggerRemoveActivity(tenantId: string, activityId: string, onboarding: boolean) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -116,23 +183,35 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('activityId is required!') } - await this.sendMessage(activityId, { - type: SearchSyncWorkerQueueMessageType.REMOVE_ACTIVITY, + await this.sendMessage( + tenantId, activityId, - }) + { + type: SearchSyncWorkerQueueMessageType.REMOVE_ACTIVITY, + activityId, + }, + undefined, + { + onboarding, + }, + ) } public async triggerActivityCleanup(tenantId: string) { if (!tenantId) { throw new Error('tenantId is required!') } - await this.sendMessage(tenantId, { + await this.sendMessage(tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.CLEANUP_TENANT_ACTIVITIES, tenantId, }) } - public async triggerOrganizationSync(tenantId: string, organizationId: string) { + public async triggerOrganizationSync( + tenantId: string, + organizationId: string, + onboarding: boolean, + ) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -141,12 +220,16 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { } await this.sendMessage( + tenantId, organizationId, { type: SearchSyncWorkerQueueMessageType.SYNC_ORGANIZATION, organizationId, }, organizationId, + { + onboarding, + }, ) } @@ -155,6 +238,7 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('tenantId is required!') } await this.sendMessage( + tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.SYNC_TENANT_ORGANIZATIONS, @@ -164,7 +248,11 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { ) } - public async triggerRemoveOrganization(tenantId: string, organizationId: string) { + public async triggerRemoveOrganization( + tenantId: string, + organizationId: string, + onboarding: boolean, + ) { if (!tenantId) { throw new Error('tenantId is required!') } @@ -172,10 +260,18 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('organizationId is required!') } - await this.sendMessage(organizationId, { - type: SearchSyncWorkerQueueMessageType.REMOVE_ORGANIZATION, + await this.sendMessage( + tenantId, organizationId, - }) + { + type: SearchSyncWorkerQueueMessageType.REMOVE_ORGANIZATION, + organizationId, + }, + undefined, + { + onboarding, + }, + ) } public async triggerOrganizationCleanup(tenantId: string) { @@ -183,6 +279,7 @@ export class SearchSyncWorkerEmitter extends SqsQueueEmitter { throw new Error('tenantId is required!') } await this.sendMessage( + tenantId, tenantId, { type: SearchSyncWorkerQueueMessageType.CLEANUP_TENANT_ORGANIZATIONS, diff --git a/services/libs/common_services/src/services/index.ts b/services/libs/common_services/src/services/index.ts new file mode 100644 index 0000000000..8967b647ab --- /dev/null +++ b/services/libs/common_services/src/services/index.ts @@ -0,0 +1,2 @@ +export * from './priority.service' +export * from './emitters' diff --git a/services/libs/common_services/src/services/priority.service.ts b/services/libs/common_services/src/services/priority.service.ts new file mode 100644 index 0000000000..4c26b9af85 --- /dev/null +++ b/services/libs/common_services/src/services/priority.service.ts @@ -0,0 +1,219 @@ +import { EDITION, IS_DEV_ENV, IS_STAGING_ENV, IS_TEST_ENV, groupBy } from '@crowd/common' +import { UnleashClient, isFeatureEnabled } from '@crowd/feature-flags' +import { Logger, getChildLogger } from '@crowd/logging' +import { RedisCache, RedisClient } from '@crowd/redis' +import { CrowdQueue, ISqsQueueConfig, SqsClient, SqsPrioritizedQueueEmitter } from '@crowd/sqs' +import { Tracer } from '@crowd/tracing' +import { + FeatureFlag, + IQueuePriorityCalculationContext, + IQueueMessage, + QueuePriorityLevel, + TenantPlans, + Edition, +} from '@crowd/types' + +export type QueuePriorityContextLoader = ( + tenantId: string, +) => Promise + +export class QueuePriorityService { + private readonly log: Logger + private readonly cache: RedisCache + + private readonly emitter: SqsPrioritizedQueueEmitter + + public constructor( + public readonly queue: CrowdQueue, + private readonly queueConfig: ISqsQueueConfig, + private readonly sqsClient: SqsClient, + private readonly redis: RedisClient, + private readonly tracer: Tracer, + private readonly unleash: UnleashClient | undefined, + private readonly priorityLevelCalculationContextLoader: QueuePriorityContextLoader, + parentLog: Logger, + ) { + this.log = getChildLogger(this.constructor.name, parentLog) + this.cache = new RedisCache('queue-priority', redis, this.log) + this.emitter = new SqsPrioritizedQueueEmitter( + this.sqsClient, + this.queueConfig, + this.tracer, + this.log, + ) + } + + public async init(): Promise { + await this.emitter.init() + } + + public async setMessageVisibilityTimeout( + tenantId: string, + receiptHandle: string, + newVisibility: number, + ): Promise { + // feature flag will be cached for 5 minutes + if ( + isFeatureEnabled( + FeatureFlag.PRIORITIZED_QUEUES, + async () => { + return { + tenantId, + } + }, + this.unleash, + this.redis, + 5 * 60, + tenantId, + ) + ) { + const priorityLevel = await this.getPriorityLevel( + tenantId, + this.priorityLevelCalculationContextLoader, + ) + + return this.emitter.setMessageVisibilityTimeout(receiptHandle, newVisibility, priorityLevel) + } else { + return this.emitter.setMessageVisibilityTimeout(receiptHandle, newVisibility) + } + } + + public async sendMessages( + messages: { + tenantId: string + payload: T + groupId: string + deduplicationId?: string + id?: string + }[], + ): Promise { + const grouped = groupBy(messages, (m) => m.tenantId) + + for (const tenantId of Array.from(grouped.keys())) { + // feature flag will be cached for 5 minutes + if ( + isFeatureEnabled( + FeatureFlag.PRIORITIZED_QUEUES, + async () => { + return { + tenantId, + } + }, + this.unleash, + this.redis, + 5 * 60, + tenantId, + ) + ) { + const priorityLevel = await this.getPriorityLevel( + tenantId, + this.priorityLevelCalculationContextLoader, + ) + + return this.emitter.sendMessages( + messages.map((m) => { + return { ...m, priorityLevel } + }), + ) + } else { + return this.emitter.sendMessages(messages) + } + } + } + + public async sendMessage( + tenantId: string | undefined, + groupId: string, + message: T, + deduplicationId?: string, + priorityLevelContextOverride?: unknown, + priorityLevelOverride?: QueuePriorityLevel, + ): Promise { + // feature flag will be cached for 5 minutes + if ( + isFeatureEnabled( + FeatureFlag.PRIORITIZED_QUEUES, + async () => { + return { + tenantId, + } + }, + this.unleash, + this.redis, + 5 * 60, + tenantId, + ) + ) { + let priorityLevel = priorityLevelOverride + if (!priorityLevel) { + priorityLevel = await this.getPriorityLevel( + tenantId, + this.priorityLevelCalculationContextLoader, + priorityLevelContextOverride, + ) + } else if (IS_DEV_ENV || IS_TEST_ENV || IS_STAGING_ENV) { + priorityLevel = QueuePriorityLevel.NORMAL + } + + return this.emitter.sendMessage(groupId, message, deduplicationId, priorityLevel) + } else { + return this.emitter.sendMessage(groupId, message, deduplicationId) + } + } + + private async getPriorityLevel( + tenantId: string, + loader: QueuePriorityContextLoader, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + override?: any, + ): Promise { + if (IS_DEV_ENV || IS_TEST_ENV || IS_STAGING_ENV) { + return QueuePriorityLevel.NORMAL + } + + const cached = await this.cache.get(tenantId) + if (cached) { + return cached as QueuePriorityLevel + } + + let ctx = await loader(tenantId) + if (override) { + ctx = { ...ctx, ...override } + } + + const priority = this.calculateQueuePriorityLevel(ctx) + + // cache for 5 minutes + await this.cache.set(tenantId, priority, 5 * 60) + + return priority + } + + private calculateQueuePriorityLevel(ctx: IQueuePriorityCalculationContext): QueuePriorityLevel { + if (ctx.dbPriority) { + return ctx.dbPriority + } + + if (EDITION === Edition.LFX) { + if (ctx.onboarding) { + return QueuePriorityLevel.HIGH + } + + return QueuePriorityLevel.NORMAL + } + + if (ctx.plan === TenantPlans.Essential) { + if (ctx.onboarding) { + return QueuePriorityLevel.HIGH + } + + return QueuePriorityLevel.NORMAL + } else { + if (ctx.onboarding) { + return QueuePriorityLevel.URGENT + } + + return QueuePriorityLevel.HIGH + } + } +} diff --git a/services/libs/common_services/tsconfig.json b/services/libs/common_services/tsconfig.json new file mode 100644 index 0000000000..ed0f58b824 --- /dev/null +++ b/services/libs/common_services/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es6", "es7", "es2017", "es2017.object", "es2015.promise"], + "skipLibCheck": true, + "sourceMap": true, + "moduleResolution": "node", + "experimentalDecorators": true, + "esModuleInterop": true, + "baseUrl": "./src", + "paths": { + "@crowd/*": ["../../*/src"] + } + }, + "include": ["src/**/*"] +} diff --git a/services/libs/feature-flags/src/index.ts b/services/libs/feature-flags/src/index.ts index fc3ec6b1f7..22a074b0e6 100644 --- a/services/libs/feature-flags/src/index.ts +++ b/services/libs/feature-flags/src/index.ts @@ -57,7 +57,7 @@ export const getUnleashClient = async (cfg: IUnleashConfig): Promise Promise, + contextLoader: UnleashContextLoader, client?: Unleash, redis?: RedisClient, redisTimeoutSeconds?: number, @@ -95,3 +95,6 @@ export const isFeatureEnabled = async ( } export * from 'unleash-client' + +export type UnleashClient = Unleash +export type UnleashContextLoader = () => Promise diff --git a/services/libs/integrations/src/types.ts b/services/libs/integrations/src/types.ts index 7ac9ffbeba..5807f494b9 100644 --- a/services/libs/integrations/src/types.ts +++ b/services/libs/integrations/src/types.ts @@ -1,21 +1,19 @@ +import { DbConnection, DbTransaction } from '@crowd/database' +import { Logger } from '@crowd/logging' import { - IMemberAttribute, - IActivityData, - IntegrationResultType, Entity, + IActivityData, IAutomationData, -} from '@crowd/types' -import { Logger } from '@crowd/logging' -import { ICache, + IConcurrentRequestLimiter, IIntegration, IIntegrationStream, + IIntegrationSyncWorkerEmitter, + IMemberAttribute, IRateLimiter, - IConcurrentRequestLimiter, + IntegrationResultType, } from '@crowd/types' -import { DbConnection, DbTransaction } from '@crowd/database' -import { IntegrationSyncWorkerEmitter } from '@crowd/sqs' import { IBatchOperationResult } from './integrations/premium/hubspot/api/types' export interface IIntegrationContext { @@ -36,7 +34,7 @@ export interface IIntegrationContext { } export interface IIntegrationStartRemoteSyncContext { - integrationSyncWorkerEmitter: IntegrationSyncWorkerEmitter + integrationSyncWorkerEmitter: IIntegrationSyncWorkerEmitter integration: IIntegration automations: IAutomationData[] tenantId: string diff --git a/services/libs/opensearch/src/apiClient.ts b/services/libs/opensearch/src/apiClient.ts index 2c6470186f..d54e45081f 100644 --- a/services/libs/opensearch/src/apiClient.ts +++ b/services/libs/opensearch/src/apiClient.ts @@ -32,7 +32,10 @@ export class SearchSyncApiClient { }) } - public async triggerOrganizationMembersSync(organizationId: string): Promise { + public async triggerOrganizationMembersSync( + tenantId: string, + organizationId: string, + ): Promise { if (!organizationId) { throw new Error('organizationId is required!') } @@ -82,7 +85,10 @@ export class SearchSyncApiClient { }) } - public async triggerOrganizationActivitiesSync(organizationId: string): Promise { + public async triggerOrganizationActivitiesSync( + tenantId: string, + organizationId: string, + ): Promise { if (!organizationId) { throw new Error('organizationId is required!') } diff --git a/services/libs/sqs/src/config.ts b/services/libs/sqs/src/config.ts index 859f2f0254..062fec30f9 100644 --- a/services/libs/sqs/src/config.ts +++ b/services/libs/sqs/src/config.ts @@ -1,4 +1,5 @@ import { + CrowdQueue, ISqsQueueConfig, SqsFifoThroughputLimitType, SqsQueueDeduplicationType, @@ -6,7 +7,7 @@ import { } from './types' export const INTEGRATION_RUN_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'integration-run-worker', + name: CrowdQueue.INTEGRATION_RUN_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -17,7 +18,7 @@ export const INTEGRATION_RUN_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'integration-stream-worker', + name: CrowdQueue.INTEGRATION_STREAM_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -28,7 +29,7 @@ export const INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const INTEGRATION_DATA_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'integration-data-worker', + name: CrowdQueue.INTEGRATION_DATA_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -39,7 +40,7 @@ export const INTEGRATION_DATA_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const DATA_SINK_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'data-sink-worker', + name: CrowdQueue.DATA_SINK_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -50,7 +51,7 @@ export const DATA_SINK_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const NODEJS_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'nodejs-worker', + name: CrowdQueue.NODEJS_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -61,7 +62,7 @@ export const NODEJS_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const SEARCH_SYNC_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'search-sync-worker', + name: CrowdQueue.SEARCH_SYNC_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds @@ -72,7 +73,7 @@ export const SEARCH_SYNC_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { } export const INTEGRATION_SYNC_WORKER_QUEUE_SETTINGS: ISqsQueueConfig = { - name: 'integration-sync-worker', + name: CrowdQueue.INTEGRATION_SYNC_WORKER, type: SqsQueueType.FIFO, waitTimeSeconds: 20, // seconds visibilityTimeout: 30, // seconds diff --git a/services/libs/sqs/src/index.ts b/services/libs/sqs/src/index.ts index e61464d4e5..bd160a11da 100644 --- a/services/libs/sqs/src/index.ts +++ b/services/libs/sqs/src/index.ts @@ -2,4 +2,4 @@ export * from './types' export * from './client' export * from './queue' export * from './config' -export * from './instances' +export * from './prioritization' diff --git a/services/libs/sqs/src/instances/dataSinkWorker.ts b/services/libs/sqs/src/instances/dataSinkWorker.ts deleted file mode 100644 index ec84ba4c10..0000000000 --- a/services/libs/sqs/src/instances/dataSinkWorker.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { Logger } from '@crowd/logging' -import { DATA_SINK_WORKER_QUEUE_SETTINGS } from '../config' -import { SqsQueueEmitter } from '../queue' -import { SqsClient } from '../types' -import { - CreateAndProcessActivityResultQueueMessage, - IActivityData, - ProcessIntegrationResultQueueMessage, - CheckResultsQueueMessage, -} from '@crowd/types' -import { Tracer } from '@crowd/tracing' - -export class DataSinkWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, DATA_SINK_WORKER_QUEUE_SETTINGS, tracer, parentLog) - } - - public async triggerResultProcessing( - tenantId: string, - platform: string, - resultId: string, - sourceId: string, - deduplicationId?: string, - ) { - await this.sendMessage( - sourceId, - new ProcessIntegrationResultQueueMessage(resultId), - deduplicationId || resultId, - ) - } - - public async createAndProcessActivityResult( - tenantId: string, - segmentId: string, - integrationId: string, - activity: IActivityData, - ) { - await this.sendMessage( - new Date().toISOString(), - new CreateAndProcessActivityResultQueueMessage(tenantId, segmentId, integrationId, activity), - ) - } - - public async checkResults() { - await this.sendMessage('global', new CheckResultsQueueMessage()) - } -} diff --git a/services/libs/sqs/src/instances/index.ts b/services/libs/sqs/src/instances/index.ts deleted file mode 100644 index d98d6b2ba5..0000000000 --- a/services/libs/sqs/src/instances/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -export * from './integrationRunWorker' -export * from './integrationStreamWorker' -export * from './integrationDataWorker' -export * from './dataSinkWorker' -export * from './nodejsWorker' -export * from './searchSyncWorker' -export * from './integrationSyncWorker' diff --git a/services/libs/sqs/src/instances/integrationDataWorker.ts b/services/libs/sqs/src/instances/integrationDataWorker.ts deleted file mode 100644 index 6607db7c7f..0000000000 --- a/services/libs/sqs/src/instances/integrationDataWorker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Logger } from '@crowd/logging' -import { INTEGRATION_DATA_WORKER_QUEUE_SETTINGS } from '../config' -import { SqsQueueEmitter } from '../queue' -import { SqsClient } from '../types' -import { ProcessStreamDataQueueMessage } from '@crowd/types' -import { Tracer } from '@crowd/tracing' - -export class IntegrationDataWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, INTEGRATION_DATA_WORKER_QUEUE_SETTINGS, tracer, parentLog) - } - - public async triggerDataProcessing(tenantId: string, platform: string, dataId: string) { - await this.sendMessage(dataId, new ProcessStreamDataQueueMessage(dataId), dataId) - } -} diff --git a/services/libs/sqs/src/instances/integrationRunWorker.ts b/services/libs/sqs/src/instances/integrationRunWorker.ts deleted file mode 100644 index a64e6c90a0..0000000000 --- a/services/libs/sqs/src/instances/integrationRunWorker.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { Logger } from '@crowd/logging' -import { INTEGRATION_RUN_WORKER_QUEUE_SETTINGS } from '../config' -import { SqsQueueEmitter } from '../queue' -import { SqsClient } from '../types' -import { - StreamProcessedQueueMessage, - GenerateRunStreamsQueueMessage, - StartIntegrationRunQueueMessage, - CheckRunsQueueMessage, -} from '@crowd/types' -import { Tracer } from '@crowd/tracing' - -export class IntegrationRunWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, INTEGRATION_RUN_WORKER_QUEUE_SETTINGS, tracer, parentLog) - } - - public async checkRuns() { - await this.sendMessage('global', new CheckRunsQueueMessage()) - } - - public async triggerIntegrationRun( - tenantId: string, - platform: string, - integrationId: string, - onboarding: boolean, - isManualRun?: boolean, - manualSettings?: unknown, - ): Promise { - await this.sendMessage( - integrationId, - new StartIntegrationRunQueueMessage(integrationId, onboarding, isManualRun, manualSettings), - ) - } - - public async triggerRunProcessing( - tenantId: string, - platform: string, - runId: string, - isManualRun?: boolean, - manualSettings?: unknown, - ): Promise { - await this.sendMessage( - runId, - new GenerateRunStreamsQueueMessage(runId, isManualRun, manualSettings), - runId, - ) - } - - public async streamProcessed(tenantId: string, platform: string, runId: string): Promise { - await this.sendMessage(runId, new StreamProcessedQueueMessage(runId)) - } -} diff --git a/services/libs/sqs/src/instances/integrationStreamWorker.ts b/services/libs/sqs/src/instances/integrationStreamWorker.ts deleted file mode 100644 index 45cbc2a54d..0000000000 --- a/services/libs/sqs/src/instances/integrationStreamWorker.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Logger } from '@crowd/logging' -import { INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS } from '../config' -import { SqsQueueEmitter } from '../queue' -import { SqsClient } from '../types' -import { - CheckStreamsQueueMessage, - ContinueProcessingRunStreamsQueueMessage, - ProcessStreamQueueMessage, - ProcessWebhookStreamQueueMessage, -} from '@crowd/types' -import { generateUUIDv1 } from '@crowd/common' -import { Tracer } from '@crowd/tracing' - -export class IntegrationStreamWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, INTEGRATION_STREAM_WORKER_QUEUE_SETTINGS, tracer, parentLog) - } - - public async checkStreams() { - await this.sendMessage('global', new CheckStreamsQueueMessage()) - } - - public async continueProcessingRunStreams( - tenantId: string, - platform: string, - runId: string, - ): Promise { - await this.sendMessage(runId, new ContinueProcessingRunStreamsQueueMessage(runId)) - } - - public async triggerStreamProcessing( - tenantId: string, - platform: string, - streamId: string, - ): Promise { - await this.sendMessage(generateUUIDv1(), new ProcessStreamQueueMessage(streamId)) - } - - public async triggerWebhookProcessing( - tenantId: string, - platform: string, - webhookId: string, - ): Promise { - await this.sendMessage(generateUUIDv1(), new ProcessWebhookStreamQueueMessage(webhookId)) - } -} diff --git a/services/libs/sqs/src/instances/nodejsWorker.ts b/services/libs/sqs/src/instances/nodejsWorker.ts deleted file mode 100644 index 744ddacbdd..0000000000 --- a/services/libs/sqs/src/instances/nodejsWorker.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Logger } from '@crowd/logging' -import { NODEJS_WORKER_QUEUE_SETTINGS } from '../config' -import { SqsQueueEmitter } from '../queue' -import { SqsClient } from '../types' -import { - IQueueMessage, - NewActivityAutomationQueueMessage, - NewMemberAutomationQueueMessage, -} from '@crowd/types' -import { Tracer } from '@crowd/tracing' - -export class NodejsWorkerEmitter extends SqsQueueEmitter { - constructor(client: SqsClient, tracer: Tracer, parentLog: Logger) { - super(client, NODEJS_WORKER_QUEUE_SETTINGS, tracer, parentLog) - } - - public override sendMessage( - groupId: string, - message: IQueueMessage, - deduplicationId: string, - ): Promise { - return super.sendMessage(groupId, message, deduplicationId) - } - - public async processAutomationForNewActivity( - tenantId: string, - activityId: string, - segmentId: string, - ): Promise { - await this.sendMessage( - `${activityId}--${segmentId}`, - new NewActivityAutomationQueueMessage(tenantId, activityId, segmentId), - `${activityId}--${segmentId}`, - ) - } - - public async processAutomationForNewMember(tenantId: string, memberId: string): Promise { - await this.sendMessage( - memberId, - new NewMemberAutomationQueueMessage(tenantId, memberId), - memberId, - ) - } -} diff --git a/services/libs/sqs/src/prioritization.ts b/services/libs/sqs/src/prioritization.ts new file mode 100644 index 0000000000..8fa9ec7dcc --- /dev/null +++ b/services/libs/sqs/src/prioritization.ts @@ -0,0 +1,172 @@ +import { groupBy } from '@crowd/common' +import { Logger, getChildLogger } from '@crowd/logging' +import { Tracer } from '@crowd/tracing' +import { IQueueMessage, ISqsQueueReceiver, QueuePriorityLevel } from '@crowd/types' +import { SqsQueueEmitter, SqsQueueReceiver } from './queue' +import { ISqsQueueConfig, SqsClient } from './types' + +export abstract class SqsPrioritizedQueueReciever { + protected readonly log: Logger + private readonly levelReceiver: ISqsQueueReceiver + private readonly defaultReceiver: ISqsQueueReceiver + + public constructor( + level: QueuePriorityLevel, + sqsClient: SqsClient, + queueConf: ISqsQueueConfig, + maxConcurrentMessageProcessing: number, + protected readonly tracer: Tracer, + parentLog: Logger, + deleteMessageImmediately = false, + visibilityTimeoutSeconds?: number, + receiveMessageCount?: number, + ) { + this.log = getChildLogger(this.constructor.name, parentLog, { + queueName: queueConf.name, + type: queueConf.type, + }) + + const processFunc = this.processMessage.bind(this) + + this.defaultReceiver = new (class extends SqsQueueReceiver { + constructor() { + super( + sqsClient, + queueConf, + maxConcurrentMessageProcessing, + tracer, + parentLog, + deleteMessageImmediately, + visibilityTimeoutSeconds, + receiveMessageCount, + ) + } + + public async processMessage(data: IQueueMessage, receiptHandle?: string): Promise { + return processFunc(data, receiptHandle) + } + })() + + const config = { ...queueConf, name: `${queueConf.name}-${level}` } + this.levelReceiver = new (class extends SqsQueueReceiver { + constructor() { + super( + sqsClient, + config, + maxConcurrentMessageProcessing, + tracer, + parentLog, + deleteMessageImmediately, + visibilityTimeoutSeconds, + receiveMessageCount, + ) + } + + public async processMessage(data: IQueueMessage, receiptHandle?: string): Promise { + return processFunc(data, receiptHandle) + } + })() + } + + public async start(): Promise { + await Promise.all([this.defaultReceiver.start(), this.levelReceiver.start()]) + } + + public stop(): void { + this.defaultReceiver.stop() + this.levelReceiver.stop() + } + + public abstract processMessage(data: IQueueMessage, receiptHandle?: string): Promise +} + +export class SqsPrioritizedQueueEmitter { + private readonly emittersMap: Map = new Map() + private readonly defaultEmitter: SqsQueueEmitter + + public constructor( + sqsClient: SqsClient, + queueConf: ISqsQueueConfig, + tracer: Tracer, + parentLog: Logger, + ) { + this.defaultEmitter = new SqsQueueEmitter(sqsClient, queueConf, tracer, parentLog) + for (const level of Object.values(QueuePriorityLevel)) { + const config = { ...queueConf, name: `${queueConf.name}-${level}` } + this.emittersMap.set(level, new SqsQueueEmitter(sqsClient, config, tracer, parentLog)) + } + } + + public async init(): Promise { + await Promise.all( + Array.from(this.emittersMap.values()) + .map((e) => e.init()) + .concat(this.defaultEmitter.init()), + ) + } + + public async setMessageVisibilityTimeout( + receiptHandle: string, + newVisibility: number, + priorityLevel?: QueuePriorityLevel, + ): Promise { + if (priorityLevel) { + const emitter = this.emittersMap.get(priorityLevel) + if (!emitter) { + throw new Error(`Unknown priority level: ${priorityLevel}`) + } + return emitter.setMessageVisibilityTimeout(receiptHandle, newVisibility) + } else { + return this.defaultEmitter.setMessageVisibilityTimeout(receiptHandle, newVisibility) + } + } + + public async sendMessage( + groupId: string, + message: T, + deduplicationId?: string, + priorityLevel?: QueuePriorityLevel, + ): Promise { + if (priorityLevel) { + const emitter = this.emittersMap.get(priorityLevel) + if (!emitter) { + throw new Error(`Unknown priority level: ${priorityLevel}`) + } + return emitter.sendMessage(groupId, message, deduplicationId) + } else { + return this.defaultEmitter.sendMessage(groupId, message, deduplicationId) + } + } + + public async sendMessages( + messages: { + payload: T + groupId: string + deduplicationId?: string + id?: string + priorityLevel?: QueuePriorityLevel + }[], + ): Promise { + const grouped = groupBy( + messages.filter((m) => m.priorityLevel !== undefined), + (m) => m.priorityLevel, + ) + + for (const level of Array.from(grouped.keys()) as QueuePriorityLevel[]) { + const emitter = this.emittersMap.get(level) + if (!emitter) { + throw new Error(`Unknown priority level: ${level}`) + } + + const messages = grouped.get(level) || [] + if (messages.length > 0) { + await emitter.sendMessages(messages) + } + } + + const noPriorityLevel = messages.filter((m) => m.priorityLevel === undefined) + if (noPriorityLevel.length > 0) { + await this.defaultEmitter.sendMessages(noPriorityLevel) + } + } +} diff --git a/services/libs/sqs/src/queue.ts b/services/libs/sqs/src/queue.ts index b6c51016d4..20186d3aab 100644 --- a/services/libs/sqs/src/queue.ts +++ b/services/libs/sqs/src/queue.ts @@ -159,7 +159,10 @@ export abstract class SqsQueueReceiver extends SqsQueueBase { this.removeJob() }) // if error is detected don't delete the message from the queue - .catch(() => this.removeJob()) + .catch((err) => { + this.log.error(err, 'Error processing message!') + this.removeJob() + }) if (this.deleteMessageImmediately) { await this.deleteMessage(message.ReceiptHandle) @@ -214,7 +217,7 @@ export abstract class SqsQueueReceiver extends SqsQueueBase { } } -export abstract class SqsQueueEmitter extends SqsQueueBase implements ISqsQueueEmitter { +export class SqsQueueEmitter extends SqsQueueBase implements ISqsQueueEmitter { constructor(sqsClient: SqsClient, queueConf: ISqsQueueConfig, tracer: Tracer, parentLog: Logger) { super(sqsClient, queueConf, tracer, parentLog) } diff --git a/services/libs/sqs/src/types.ts b/services/libs/sqs/src/types.ts index 795ab1f57a..5afb3b1625 100644 --- a/services/libs/sqs/src/types.ts +++ b/services/libs/sqs/src/types.ts @@ -39,6 +39,16 @@ export enum SqsFifoThroughputLimitType { PER_MESSAGE_GROUP_ID = 'perMessageGroupId', } +export enum CrowdQueue { + INTEGRATION_RUN_WORKER = 'integration-run-worker', + INTEGRATION_STREAM_WORKER = 'integration-stream-worker', + INTEGRATION_DATA_WORKER = 'integration-data-worker', + DATA_SINK_WORKER = 'data-sink-worker', + NODEJS_WORKER = 'nodejs-worker', + SEARCH_SYNC_WORKER = 'search-sync-worker', + INTEGRATION_SYNC_WORKER = 'integration-sync-worker', +} + export interface ISqsQueueConfig { name: string type: SqsQueueType diff --git a/services/libs/types/src/enums/featureFlags.ts b/services/libs/types/src/enums/featureFlags.ts index 03f8dddfc8..6de3bab89b 100644 --- a/services/libs/types/src/enums/featureFlags.ts +++ b/services/libs/types/src/enums/featureFlags.ts @@ -7,6 +7,7 @@ export enum FeatureFlag { MEMBER_ENRICHMENT = 'member-enrichment', ORGANIZATION_ENRICHMENT = 'organization-enrichment', SEGMENTS = 'segments', + PRIORITIZED_QUEUES = 'prioritized-queues', FIND_GITHUB = 'find-github', // opensearch diff --git a/services/libs/types/src/enums/index.ts b/services/libs/types/src/enums/index.ts index 198fe35ec1..1744a55ade 100644 --- a/services/libs/types/src/enums/index.ts +++ b/services/libs/types/src/enums/index.ts @@ -8,6 +8,7 @@ export * from './members' export * from './opensearch' export * from './organizations' export * from './platforms' +export * from './priorities' export * from './temporal' export * from './tenants' export * from './webhooks' diff --git a/services/libs/types/src/enums/priorities.ts b/services/libs/types/src/enums/priorities.ts new file mode 100644 index 0000000000..94b89597c0 --- /dev/null +++ b/services/libs/types/src/enums/priorities.ts @@ -0,0 +1,14 @@ +import { TenantPlans } from './tenants' + +export enum QueuePriorityLevel { + SYSTEM = 'system', + NORMAL = 'normal', + HIGH = 'high', + URGENT = 'urgent', +} + +export interface IQueuePriorityCalculationContext { + onboarding?: boolean + dbPriority?: QueuePriorityLevel | null + plan: TenantPlans +} diff --git a/services/libs/types/src/queue/index.ts b/services/libs/types/src/queue/index.ts index c0cf792c22..d298327fe7 100644 --- a/services/libs/types/src/queue/index.ts +++ b/services/libs/types/src/queue/index.ts @@ -4,11 +4,22 @@ export interface IQueueMessage { export interface ISqsQueueReceiver { start(): Promise - stop() + + stop(): void + processMessage(data: IQueueMessage): Promise } export interface ISqsQueueEmitter { init(): Promise - sendMessage(groupId: string, message: IQueueMessage, deduplicationId?: string): Promise + + sendMessage( + groupId: string, + message: T, + deduplicationId?: string, + ): Promise + + sendMessages( + messages: { payload: T; groupId: string; deduplicationId?: string; id?: string }[], + ): Promise } diff --git a/services/libs/types/src/queue/integration_sync_worker/index.ts b/services/libs/types/src/queue/integration_sync_worker/index.ts index 27900f991d..42d7b4791e 100644 --- a/services/libs/types/src/queue/integration_sync_worker/index.ts +++ b/services/libs/types/src/queue/integration_sync_worker/index.ts @@ -1,3 +1,5 @@ +import { AutomationSyncTrigger } from '../../automations' + export enum IntegrationSyncWorkerQueueMessageType { SYNC_ALL_MARKED_MEMBERS = 'sync_all_marked_members', SYNC_MEMBER = 'sync_member', @@ -5,3 +7,30 @@ export enum IntegrationSyncWorkerQueueMessageType { SYNC_ORGANIZATION = 'sync_organization', ONBOARD_AUTOMATION = 'onboard_automation', } + +export interface IIntegrationSyncWorkerEmitter { + triggerSyncMarkedMembers(tenantId: string, integrationId: string): Promise + + triggerSyncMember( + tenantId: string, + integrationId: string, + memberId: string, + syncRemoteId: string, + ): Promise + + triggerOnboardAutomation( + tenantId: string, + integrationId: string, + automationId: string, + automationTrigger: AutomationSyncTrigger, + ): Promise + + triggerSyncMarkedOrganizations(tenantId: string, integrationId: string): Promise + + triggerSyncOrganization( + tenantId: string, + integrationId: string, + organizationId: string, + syncRemoteId: string, + ): Promise +} diff --git a/services/libs/types/src/queue/nodejs_worker/index.ts b/services/libs/types/src/queue/nodejs_worker/index.ts index bb83e1276a..fea70b082f 100644 --- a/services/libs/types/src/queue/nodejs_worker/index.ts +++ b/services/libs/types/src/queue/nodejs_worker/index.ts @@ -1,12 +1,19 @@ +import { AutomationTrigger, AutomationType } from '../../automations' import { IQueueMessage } from '../' +/* eslint-disable @typescript-eslint/no-explicit-any */ + export enum NodejsWorkerQueueMessageType { NODE_MICROSERVICE = 'node_microservice', } +export enum ExportableEntity { + MEMBERS = 'members', +} + export class NewActivityAutomationQueueMessage implements IQueueMessage { public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE - public readonly trigger = 'new_activity' + public readonly trigger = AutomationTrigger.NEW_ACTIVITY public readonly service = 'automation' constructor( @@ -18,8 +25,117 @@ export class NewActivityAutomationQueueMessage implements IQueueMessage { export class NewMemberAutomationQueueMessage implements IQueueMessage { public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE - public readonly trigger = 'new_member' + public readonly trigger = AutomationTrigger.NEW_MEMBER public readonly service = 'automation' - constructor(public readonly tenant: string, public readonly memberId: string) {} + constructor( + public readonly tenant: string, + public readonly memberId: string, + public readonly segmentId: string, + ) {} +} + +export class ProcessAutomationQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'automation-process' + + constructor( + public readonly tenant: string, + public readonly automationType: AutomationType, + public readonly automation: any, + public readonly eventId: string, + public readonly payload: any, + ) {} +} + +export class BulkEnrichQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'bulk-enrich' + + constructor( + public readonly tenant: string, + public readonly memberIds: string[], + public readonly segmentIds: string[], + public readonly notifyFrontend: boolean, + public readonly skipCredits: boolean, + ) {} +} + +export class OrgMergeQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'org-merge' + + constructor( + public readonly tenantId: string, + public readonly primaryOrgId: string, + public readonly secondaryOrgId: string, + public readonly notifyFrontend: boolean, + ) {} +} + +export class ExportCSVQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'csv-export' + + constructor( + public readonly user: string, + public readonly tenant: string, + public readonly entity: ExportableEntity, + public readonly segmentIds: string[], + public readonly criteria: any, + ) {} +} + +export class EagleEyeEmailDigestQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'eagle-eye-email-digest' + + constructor(public readonly tenant: string, public readonly user: string) {} +} + +export class IntegrationDataCheckerQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'integration-data-checker' + + constructor(public readonly tenantId: string, public readonly integrationId: string) {} +} + +export class WeeklyAnalyticsEmailQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'weekly-analytics-emails' + + constructor(public readonly tenant: string) {} +} + +export class StripeWebhookQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'stripe-webhooks' + + constructor(public readonly event: any) {} +} + +export class SendgridWebhookQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'sendgrid-webhooks' + + constructor(public readonly event: any) {} +} + +export class RefreshSampleDataQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'refresh-sample-data' +} + +export class EnrichOrganizationQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'enrich-organizations' + + constructor(public readonly tenantId: string, public readonly maxEnrichLimit: number) {} +} + +export class MergeSuggestionsQueueMessage implements IQueueMessage { + public readonly type: string = NodejsWorkerQueueMessageType.NODE_MICROSERVICE + public readonly service = 'merge-suggestions' + + constructor(public readonly tenant: string) {} } From ef8d10c6e43c6ce35f9d1e1294894ed8e79c0096 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Tue, 12 Dec 2023 12:50:38 +0100 Subject: [PATCH 015/185] quick fix --- backend/.env.dist.local | 3 ++- backend/config/custom-environment-variables.json | 1 + backend/src/bin/nodejs-worker.ts | 9 ++++++--- backend/src/conf/configTypes.ts | 1 + 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/backend/.env.dist.local b/backend/.env.dist.local index 246da4d990..d891c9b341 100755 --- a/backend/.env.dist.local +++ b/backend/.env.dist.local @@ -15,7 +15,8 @@ CROWD_API_JWT_EXPIRES_IN='100 years' CROWD_SQS_HOST=localhost CROWD_SQS_PORT=9324 CROWD_SQS_ENDPOINT=http://localhost:9324 -CROWD_SQS_NODEJS_WORKER_QUEUE=http://localhost:9324/000000000000/nodejs-worker-normal.fifo +CROWD_SQS_NODEJS_WORKER_QUEUE=http://localhost:9324/000000000000/nodejs-worker.fifo +CROWD_SQS_NODEJS_WORKER_PRIORITY_QUEUE=http://localhost:9324/000000000000/nodejs-worker-normal.fifo CROWD_SQS_PYTHON_WORKER_QUEUE=http://localhost:9324/000000000000/python-worker.fifo CROWD_SQS_AWS_ACCOUNT_ID=000000000000 CROWD_SQS_AWS_ACCESS_KEY_ID=x diff --git a/backend/config/custom-environment-variables.json b/backend/config/custom-environment-variables.json index 11381c299a..19e0ff9663 100644 --- a/backend/config/custom-environment-variables.json +++ b/backend/config/custom-environment-variables.json @@ -17,6 +17,7 @@ "host": "CROWD_SQS_HOST", "port": "CROWD_SQS_PORT", "nodejsWorkerQueue": "CROWD_SQS_NODEJS_WORKER_QUEUE", + "nodejsWorkerPriorityQueue": "CROWD_SQS_NODEJS_WORKER_PRIORITY_QUEUE", "integrationRunWorkerQueue": "CROWD_SQS_INTEGRATION_RUN_WORKER_QUEUE", "pythonWorkerQueue": "CROWD_SQS_PYTHON_WORKER_QUEUE", "aws": { diff --git a/backend/src/bin/nodejs-worker.ts b/backend/src/bin/nodejs-worker.ts index fe1c82591f..8e26b78ca7 100644 --- a/backend/src/bin/nodejs-worker.ts +++ b/backend/src/bin/nodejs-worker.ts @@ -65,9 +65,9 @@ const removeWorkerJob = (): void => { processingMessages-- } -async function handleMessages() { +async function handleMessages(queue: string) { const handlerLogger = getChildLogger('messages', serviceLogger, { - queue: SQS_CONFIG.nodejsWorkerQueue, + queue, }) handlerLogger.info('Listening for messages!') @@ -181,7 +181,10 @@ setImmediate(async () => { await initRedisSeq() await getNodejsWorkerEmitter() - await handleMessages() + await Promise.all([ + handleMessages(SQS_CONFIG.nodejsWorkerQueue), + handleMessages(SQS_CONFIG.nodejsWorkerPriorityQueue), + ]) }) const liveFilePath = path.join(__dirname, 'tmp/nodejs-worker-live.tmp') diff --git a/backend/src/conf/configTypes.ts b/backend/src/conf/configTypes.ts index 830c4d774c..0016bea78b 100644 --- a/backend/src/conf/configTypes.ts +++ b/backend/src/conf/configTypes.ts @@ -23,6 +23,7 @@ export interface SQSConfiguration { host?: string port?: number nodejsWorkerQueue: string + nodejsWorkerPriorityQueue: string integrationRunWorkerQueue: string pythonWorkerQueue: string aws: AwsCredentials From 9bef427d01f3c14623e8262ca99f238dfdc95aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Tue, 12 Dec 2023 13:46:27 +0100 Subject: [PATCH 016/185] fix deploy action --- .github/actions/deploy-service/action.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/actions/deploy-service/action.yaml b/.github/actions/deploy-service/action.yaml index d97b98588b..df00da2108 100644 --- a/.github/actions/deploy-service/action.yaml +++ b/.github/actions/deploy-service/action.yaml @@ -17,12 +17,12 @@ inputs: prioritized: description: Is the service listening on prioritized queues? required: false - default: "false" + default: 'false' only_normal_queue_level: description: Is the service prioritized but only listening on normal level? (staging/lf) required: false - default: "false" + default: 'false' runs: using: composite @@ -36,12 +36,12 @@ runs: AWS_REGION: ${{ env.AWS_REGION }} - name: Deploy image (non prioritized) - if: inputs.prioritized == "false" + if: inputs.prioritized == 'false' shell: bash run: kubectl set image deployments/${{ inputs.service }}-dpl ${{ inputs.service }}=${{ inputs.image }} - name: Deploy image (prioritized) - if: inputs.prioritized == "true" && inputs.only_normal_queue_level == "false" + if: inputs.prioritized == 'true' && inputs.only_normal_queue_level == 'false' shell: bash run: | kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} @@ -50,10 +50,10 @@ runs: kubectl set image deployments/${{ inputs.service }}-urgent-dpl ${{ inputs.service }}-urgent=${{ inputs.image }} - name: Deploy image (prioritized - normal only) - if: inputs.prioritized == "true" && inputs.only_normal_queue_level == "true" + if: inputs.prioritized == 'true' && inputs.only_normal_queue_level == 'true' shell: bash run: kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} - uses: ./.github/actions/slack-notify with: - message: "Service *${{ inputs.service }}* was just deployed using docker image `${{ inputs.image }}`" + message: 'Service *${{ inputs.service }}* was just deployed using docker image `${{ inputs.image }}`' From 6c232bcb548f6352acd1adf28797b66945b9ff75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Tue, 12 Dec 2023 14:10:43 +0100 Subject: [PATCH 017/185] lfx and staging deploys fixed --- .github/actions/deploy-service/action.yaml | 25 ++++++++++++++----- .../workflows/lf-production-deploy-new.yaml | 4 +-- .../lf-production-deploy-original.yaml | 10 ++++---- .../workflows/lf-staging-deploy-backend.yaml | 12 ++++----- .../lf-staging-deploy-data-sink-worker.yaml | 10 ++++---- ...taging-deploy-integration-data-worker.yaml | 10 ++++---- ...staging-deploy-integration-run-worker.yaml | 10 ++++---- ...ging-deploy-integration-stream-worker.yaml | 10 ++++---- .../lf-staging-deploy-search-sync-worker.yaml | 10 ++++---- .github/workflows/staging-deploy-backend.yaml | 12 ++++----- .../staging-deploy-data-sink-worker.yaml | 10 ++++---- ...taging-deploy-integration-data-worker.yaml | 10 ++++---- ...staging-deploy-integration-run-worker.yaml | 10 ++++---- ...ging-deploy-integration-stream-worker.yaml | 10 ++++---- ...taging-deploy-integration-sync-worker.yaml | 10 ++++---- .../staging-deploy-search-sync-worker.yaml | 10 ++++---- 16 files changed, 93 insertions(+), 80 deletions(-) diff --git a/.github/actions/deploy-service/action.yaml b/.github/actions/deploy-service/action.yaml index df00da2108..bfcd759ada 100644 --- a/.github/actions/deploy-service/action.yaml +++ b/.github/actions/deploy-service/action.yaml @@ -19,8 +19,13 @@ inputs: required: false default: 'false' - only_normal_queue_level: - description: Is the service prioritized but only listening on normal level? (staging/lf) + staging: + description: Is the service prioritized and deployed to staging? + required: false + default: 'false' + + lfx: + description: Is the service prioritized and deployed to lfx? required: false default: 'false' @@ -40,8 +45,8 @@ runs: shell: bash run: kubectl set image deployments/${{ inputs.service }}-dpl ${{ inputs.service }}=${{ inputs.image }} - - name: Deploy image (prioritized) - if: inputs.prioritized == 'true' && inputs.only_normal_queue_level == 'false' + - name: Deploy image (prioritized - production) + if: inputs.prioritized == 'true' && inputs.lfx == 'false' shell: bash run: | kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} @@ -49,8 +54,16 @@ runs: kubectl set image deployments/${{ inputs.service }}-high-dpl ${{ inputs.service }}-high=${{ inputs.image }} kubectl set image deployments/${{ inputs.service }}-urgent-dpl ${{ inputs.service }}-urgent=${{ inputs.image }} - - name: Deploy image (prioritized - normal only) - if: inputs.prioritized == 'true' && inputs.only_normal_queue_level == 'true' + - name: Deploy image (prioritized - lfx production) + if: inputs.prioritized == 'true' && inputs.lfx == 'true' + shell: bash + run: | + kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} + kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} + kubectl set image deployments/${{ inputs.service }}-high-dpl ${{ inputs.service }}-high=${{ inputs.image }} + + - name: Deploy image (prioritized - staging) + if: inputs.prioritized == 'true' && inputs.staging == 'true' shell: bash run: kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} diff --git a/.github/workflows/lf-production-deploy-new.yaml b/.github/workflows/lf-production-deploy-new.yaml index 2cb8fa2c5c..90b2070b0b 100644 --- a/.github/workflows/lf-production-deploy-new.yaml +++ b/.github/workflows/lf-production-deploy-new.yaml @@ -208,7 +208,7 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push-search-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-search-sync-api: @@ -246,7 +246,7 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push-integration-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-webhook-api: diff --git a/.github/workflows/lf-production-deploy-original.yaml b/.github/workflows/lf-production-deploy-original.yaml index f17493efce..9290c21a6d 100644 --- a/.github/workflows/lf-production-deploy-original.yaml +++ b/.github/workflows/lf-production-deploy-original.yaml @@ -237,7 +237,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push-backend.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-discord-ws: @@ -293,7 +293,7 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push-integration-run-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-integration-stream-worker: @@ -313,7 +313,7 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push-integration-stream-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-integration-data-worker: @@ -333,7 +333,7 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push-integration-data-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-data-sink-worker: @@ -353,7 +353,7 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push-data-sink-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + lfx: true prioritized: true deploy-frontend: diff --git a/.github/workflows/lf-staging-deploy-backend.yaml b/.github/workflows/lf-staging-deploy-backend.yaml index b857f1bf60..05a4e5b953 100644 --- a/.github/workflows/lf-staging-deploy-backend.yaml +++ b/.github/workflows/lf-staging-deploy-backend.yaml @@ -3,12 +3,12 @@ name: LF Staging Deploy Backend services on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "backend/**" - - "services/libs/**" - - "!backend/src/serverless/microservices/python/**" + - 'backend/**' + - 'services/libs/**' + - '!backend/src/serverless/microservices/python/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -76,7 +76,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true deploy-job-generator: diff --git a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml index bce35c6aba..da04a6aca1 100644 --- a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Data Sink Worker on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "services/libs/**" - - "services/apps/data_sink_worker/**" + - 'services/libs/**' + - 'services/apps/data_sink_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml index 5342817544..0f8c72ee7b 100644 --- a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Data Worker on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "services/libs/**" - - "services/apps/integration_data_worker/**" + - 'services/libs/**' + - 'services/apps/integration_data_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml index 64cf768a78..265fb27a51 100644 --- a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Run Worker on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "services/libs/**" - - "services/apps/integration_data_worker/**" + - 'services/libs/**' + - 'services/apps/integration_data_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml index db9bfb61ee..4388ddcf9f 100644 --- a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Integration Stream Worker on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "services/libs/**" - - "services/apps/integration_stream_worker/**" + - 'services/libs/**' + - 'services/apps/integration_stream_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml index cb76c2839a..a30b6861d0 100644 --- a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml @@ -3,11 +3,11 @@ name: LF Staging Deploy Search Sync Worker on: push: branches: - - "lf-staging/**" - - "lf-staging-**" + - 'lf-staging/**' + - 'lf-staging-**' paths: - - "services/libs/**" - - "services/apps/search_sync_worker/**" + - 'services/libs/**' + - 'services/apps/search_sync_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-backend.yaml b/.github/workflows/staging-deploy-backend.yaml index 833ba4ce83..3a40c176c9 100644 --- a/.github/workflows/staging-deploy-backend.yaml +++ b/.github/workflows/staging-deploy-backend.yaml @@ -3,12 +3,12 @@ name: Staging Deploy Backend services on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "backend/**" - - "services/libs/**" - - "!backend/src/serverless/microservices/python/**" + - 'backend/**' + - 'services/libs/**' + - '!backend/src/serverless/microservices/python/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -77,7 +77,7 @@ jobs: image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} prioritized: true - only_normal_queue_level: true + staging: true deploy-job-generator: needs: build-and-push diff --git a/.github/workflows/staging-deploy-data-sink-worker.yaml b/.github/workflows/staging-deploy-data-sink-worker.yaml index 1e6e1fda67..f6cde19ac7 100644 --- a/.github/workflows/staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/staging-deploy-data-sink-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Data Sink Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/data_sink_worker/**" + - 'services/libs/**' + - 'services/apps/data_sink_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-integration-data-worker.yaml b/.github/workflows/staging-deploy-integration-data-worker.yaml index bb1c26b2d6..d668ec3c2f 100644 --- a/.github/workflows/staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/staging-deploy-integration-data-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Data Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/integration_data_worker/**" + - 'services/libs/**' + - 'services/apps/integration_data_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-integration-run-worker.yaml b/.github/workflows/staging-deploy-integration-run-worker.yaml index 35fac4eb7a..bdb55a68b0 100644 --- a/.github/workflows/staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/staging-deploy-integration-run-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Run Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/integration_data_worker/**" + - 'services/libs/**' + - 'services/apps/integration_data_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-integration-stream-worker.yaml b/.github/workflows/staging-deploy-integration-stream-worker.yaml index 8176728bfb..a50fbda010 100644 --- a/.github/workflows/staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/staging-deploy-integration-stream-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Stream Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/integration_stream_worker/**" + - 'services/libs/**' + - 'services/apps/integration_stream_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-integration-sync-worker.yaml b/.github/workflows/staging-deploy-integration-sync-worker.yaml index 55fcad8c5a..214e53b440 100644 --- a/.github/workflows/staging-deploy-integration-sync-worker.yaml +++ b/.github/workflows/staging-deploy-integration-sync-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Integration Sync Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/integration_sync_worker/**" + - 'services/libs/**' + - 'services/apps/integration_sync_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true diff --git a/.github/workflows/staging-deploy-search-sync-worker.yaml b/.github/workflows/staging-deploy-search-sync-worker.yaml index 3607b0c0ca..eb10f4e83d 100644 --- a/.github/workflows/staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/staging-deploy-search-sync-worker.yaml @@ -3,11 +3,11 @@ name: Staging Deploy Search Sync Worker on: push: branches: - - "staging/**" - - "staging-**" + - 'staging/**' + - 'staging-**' paths: - - "services/libs/**" - - "services/apps/search_sync_worker/**" + - 'services/libs/**' + - 'services/apps/search_sync_worker/**' env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} @@ -58,5 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - only_normal_queue_level: true + staging: true prioritized: true From 7fc3d0668668fbb8984983706e7448c260f611f6 Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Tue, 12 Dec 2023 16:11:42 +0100 Subject: [PATCH 018/185] Improvement/test member enrichment with logs (#1953) Co-authored-by: anilb --- backend/src/database/repositories/memberRepository.ts | 2 +- .../src/services/premium/enrichment/memberEnrichmentService.ts | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/src/database/repositories/memberRepository.ts b/backend/src/database/repositories/memberRepository.ts index 6a31d8b7d2..6b687c2586 100644 --- a/backend/src/database/repositories/memberRepository.ts +++ b/backend/src/database/repositories/memberRepository.ts @@ -421,7 +421,7 @@ class MemberRepository { const query = ` INSERT INTO "memberToMerge" ("memberId", "toMergeId", "similarity", "createdAt", "updatedAt") - VALUES ${placeholders.join(', ')}; + VALUES ${placeholders.join(', ')} on conflict do nothing; ` try { await seq.query(query, { diff --git a/backend/src/services/premium/enrichment/memberEnrichmentService.ts b/backend/src/services/premium/enrichment/memberEnrichmentService.ts index 908b19497f..ae47c87587 100644 --- a/backend/src/services/premium/enrichment/memberEnrichmentService.ts +++ b/backend/src/services/premium/enrichment/memberEnrichmentService.ts @@ -390,6 +390,7 @@ export default class MemberEnrichmentService extends LoggerBase { await SequelizeRepository.commitTransaction(transaction) return result } catch (error) { + this.log.error(error, 'Error while enriching a member!') await SequelizeRepository.rollbackTransaction(transaction) throw error } From 51037032dd1d331086fb2250969064b68fc62adf Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Wed, 13 Dec 2023 10:12:37 +0100 Subject: [PATCH 019/185] fix query error when checking for delayed results (#1956) --- .../src/repo/dataSink.repo.ts | 40 ++++++++++++++----- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts index 510ddcf498..72f3b12e5b 100644 --- a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts +++ b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts @@ -2,6 +2,7 @@ import { DbStore, RepositoryBase } from '@crowd/database' import { Logger } from '@crowd/logging' import { IIntegrationResult, IntegrationResultState, TenantPlans } from '@crowd/types' import { IDelayedResults, IFailedResultData, IResultData } from './dataSink.data' +import { distinct } from '@crowd/common' export default class DataSinkRepository extends RepositoryBase { constructor(dbStore: DbStore, parentLog: Logger) { @@ -262,18 +263,15 @@ export default class DataSinkRepository extends RepositoryBase r.runId)) + if (runIds.length > 0) { + const runInfos = await this.db().any( + ` + select id, onboarding + from integration.runs + where id in ($(runIds:csv)) + `, + { + runIds, + }, + ) + + for (const runInfo of runInfos) { + for (const result of resultData.filter((r) => r.runId === runInfo.id)) { + result.onboarding = runInfo.onboarding + } + } + } + + return resultData } catch (err) { this.log.error(err, 'Failed to get delayed results!') throw err From b0021042b3a567ef5e672035600367897000cad6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Wed, 13 Dec 2023 10:35:47 +0100 Subject: [PATCH 020/185] bugfix --- backend/src/bin/nodejs-worker.ts | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/backend/src/bin/nodejs-worker.ts b/backend/src/bin/nodejs-worker.ts index 8e26b78ca7..e86ca6ac7d 100644 --- a/backend/src/bin/nodejs-worker.ts +++ b/backend/src/bin/nodejs-worker.ts @@ -56,15 +56,6 @@ const removeFromQueue = (receiptHandle: string): Promise => { return deleteMessage(SQS_CLIENT(), params) } -let processingMessages = 0 -const isWorkerAvailable = (): boolean => processingMessages <= 3 -const addWorkerJob = (): void => { - processingMessages++ -} -const removeWorkerJob = (): void => { - processingMessages-- -} - async function handleMessages(queue: string) { const handlerLogger = getChildLogger('messages', serviceLogger, { queue, @@ -91,10 +82,7 @@ async function handleMessages(queue: string) { return } - messageLogger.debug( - { messageType: msg.type, messagePayload: JSON.stringify(msg) }, - 'Received a new queue message!', - ) + messageLogger.info({ messageType: msg.type }, 'Received a new queue message!') let processFunction: (msg: NodeWorkerMessageBase, logger?: Logger) => Promise @@ -129,12 +117,26 @@ async function handleMessages(queue: string) { type: msg.type, }, ) + } else { + messageLogger.error( + { messageType: msg.type }, + 'Error while parsing queue message! Invalid type.', + ) } } catch (err) { messageLogger.error(err, { payload: msg }, 'Error while processing queue message!') } } + let processingMessages = 0 + const isWorkerAvailable = (): boolean => processingMessages <= 3 + const addWorkerJob = (): void => { + processingMessages++ + } + const removeWorkerJob = (): void => { + processingMessages-- + } + // noinspection InfiniteLoopJS while (!exiting) { if (isWorkerAvailable()) { From 2173c50fc77bb1b02590d4eabbef714142ed73b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Wed, 13 Dec 2023 10:44:00 +0100 Subject: [PATCH 021/185] receive and delete from a specific queue --- backend/src/bin/nodejs-worker.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/src/bin/nodejs-worker.ts b/backend/src/bin/nodejs-worker.ts index e86ca6ac7d..a9025fbb4d 100644 --- a/backend/src/bin/nodejs-worker.ts +++ b/backend/src/bin/nodejs-worker.ts @@ -33,9 +33,9 @@ process.on('SIGTERM', async () => { exiting = true }) -const receive = async (): Promise => { +const receive = async (queue: string): Promise => { const params: SqsReceiveMessageRequest = { - QueueUrl: SQS_CONFIG.nodejsWorkerQueue, + QueueUrl: queue, } const messages = await receiveMessage(SQS_CLIENT(), params) @@ -47,9 +47,9 @@ const receive = async (): Promise => { return undefined } -const removeFromQueue = (receiptHandle: string): Promise => { +const removeFromQueue = (queue: string, receiptHandle: string): Promise => { const params: SqsDeleteMessageRequest = { - QueueUrl: SQS_CONFIG.nodejsWorkerQueue, + QueueUrl: queue, ReceiptHandle: receiptHandle, } @@ -78,7 +78,7 @@ async function handleMessages(queue: string) { messageLogger.warn( 'Skipping enrich_member_organizations message! Purging the queue because they are not needed anymore!', ) - await removeFromQueue(message.ReceiptHandle) + await removeFromQueue(queue, message.ReceiptHandle) return } @@ -103,7 +103,7 @@ async function handleMessages(queue: string) { 'nodejs_worker.process_message', async () => { // remove the message from the queue as it's about to be processed - await removeFromQueue(message.ReceiptHandle) + await removeFromQueue(queue, message.ReceiptHandle) messagesInProgress.set(message.MessageId, msg) try { await processFunction(msg, messageLogger) @@ -140,7 +140,7 @@ async function handleMessages(queue: string) { // noinspection InfiniteLoopJS while (!exiting) { if (isWorkerAvailable()) { - const message = await receive() + const message = await receive(queue) if (message) { addWorkerJob() From 205985387cc1e8817b105cd37f3430f8198fd64f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Wed, 13 Dec 2023 10:51:07 +0100 Subject: [PATCH 022/185] removed info log on every message --- backend/src/bin/nodejs-worker.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/bin/nodejs-worker.ts b/backend/src/bin/nodejs-worker.ts index a9025fbb4d..11b90aa6ec 100644 --- a/backend/src/bin/nodejs-worker.ts +++ b/backend/src/bin/nodejs-worker.ts @@ -82,7 +82,7 @@ async function handleMessages(queue: string) { return } - messageLogger.info({ messageType: msg.type }, 'Received a new queue message!') + messageLogger.debug({ messageType: msg.type }, 'Received a new queue message!') let processFunction: (msg: NodeWorkerMessageBase, logger?: Logger) => Promise From ee1fb0c0618896183cc8bc98c29f201694cbc739 Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Wed, 13 Dec 2023 11:03:56 +0100 Subject: [PATCH 023/185] =?UTF-8?q?use=20true=20for=20default=20value=20fo?= =?UTF-8?q?r=20onboarding=20when=20runId=20is=20null=20for=20inte=E2=80=A6?= =?UTF-8?q?=20(#1957)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/apps/data_sink_worker/src/repo/dataSink.repo.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts index 72f3b12e5b..2ef4497473 100644 --- a/services/apps/data_sink_worker/src/repo/dataSink.repo.ts +++ b/services/apps/data_sink_worker/src/repo/dataSink.repo.ts @@ -2,7 +2,7 @@ import { DbStore, RepositoryBase } from '@crowd/database' import { Logger } from '@crowd/logging' import { IIntegrationResult, IntegrationResultState, TenantPlans } from '@crowd/types' import { IDelayedResults, IFailedResultData, IResultData } from './dataSink.data' -import { distinct } from '@crowd/common' +import { distinct, singleOrDefault } from '@crowd/common' export default class DataSinkRepository extends RepositoryBase { constructor(dbStore: DbStore, parentLog: Logger) { @@ -293,9 +293,12 @@ export default class DataSinkRepository extends RepositoryBase r.runId === runInfo.id)) { + for (const result of resultData) { + const runInfo = singleOrDefault(runInfos, (r) => r.id === result.runId) + if (runInfo) { result.onboarding = runInfo.onboarding + } else { + result.onboarding = true } } } From 53642dc0f32e3c637ab50165cb5575845ecf840c Mon Sep 17 00:00:00 2001 From: anilb Date: Wed, 13 Dec 2023 11:36:40 +0100 Subject: [PATCH 024/185] Missing awaits on merging roles (#1958) --- backend/src/services/memberOrganizationService.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/services/memberOrganizationService.ts b/backend/src/services/memberOrganizationService.ts index c0543f87ec..1b2c1fcf17 100644 --- a/backend/src/services/memberOrganizationService.ts +++ b/backend/src/services/memberOrganizationService.ts @@ -67,7 +67,7 @@ export default class MemberOrganizationService extends LoggerBase { secondaryOrganizationId: string, primaryOrganizationId: string, ): Promise { - this.moveRolesBetweenEntities( + await this.moveRolesBetweenEntities( primaryOrganizationId, secondaryOrganizationId, OrgMergeStrat(primaryOrganizationId), @@ -75,7 +75,7 @@ export default class MemberOrganizationService extends LoggerBase { } async moveOrgsBetweenMembers(primaryMemberId: string, secondaryMemberId: string): Promise { - this.moveRolesBetweenEntities( + await this.moveRolesBetweenEntities( primaryMemberId, secondaryMemberId, MemberMergeStrat(primaryMemberId), From 2e67f838b119824fb1d1755ebb321827ddd47c76 Mon Sep 17 00:00:00 2001 From: Igor Kotua <36304232+garrrikkotua@users.noreply.github.com> Date: Wed, 13 Dec 2023 17:02:09 +0300 Subject: [PATCH 025/185] Add checks for paid Discord integrations (#1961) --- .../repositories/integrationRepository.ts | 7 +++++++ .../services/integrationTickProcessor.ts | 15 ++++++++++++++- .../src/integrations/discord/index.ts | 1 + 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/backend/src/database/repositories/integrationRepository.ts b/backend/src/database/repositories/integrationRepository.ts index de882ba681..a1f5dfc44c 100644 --- a/backend/src/database/repositories/integrationRepository.ts +++ b/backend/src/database/repositories/integrationRepository.ts @@ -258,6 +258,13 @@ class IntegrationRepository { status: 'done', platform, }, + include: [ + { + model: options.database.tenant, + as: 'tenant', + required: true, + }, + ], limit: perPage, offset: (page - 1) * perPage, order: [['id', 'ASC']], diff --git a/backend/src/serverless/integrations/services/integrationTickProcessor.ts b/backend/src/serverless/integrations/services/integrationTickProcessor.ts index e418573eb7..4e5ea98e60 100644 --- a/backend/src/serverless/integrations/services/integrationTickProcessor.ts +++ b/backend/src/serverless/integrations/services/integrationTickProcessor.ts @@ -6,7 +6,7 @@ import { } from '@crowd/common_services' import { INTEGRATION_SERVICES } from '@crowd/integrations' import { LoggerBase, getChildLogger } from '@crowd/logging' -import { IntegrationType } from '@crowd/types' +import { IntegrationType, TenantPlans } from '@crowd/types' import IntegrationRepository from '@/database/repositories/integrationRepository' import IntegrationRunRepository from '../../../database/repositories/integrationRunRepository' import { IServiceOptions } from '../../../services/IServiceOptions' @@ -128,6 +128,19 @@ export class IntegrationTickProcessor extends LoggerBase { const chunkIndex = Math.min(Math.floor(rand), CHUNKS - 1) const delay = chunkIndex * DELAY_BETWEEN_CHUNKS + if ( + newIntService.type === IntegrationType.DISCORD && + integration.tenant.plan === TenantPlans.Essential + ) { + // not triggering discord integrations for essential plan, only paid plans + logger.info( + { integrationId: integration.id }, + 'Not triggering new integration check for Discord for essential plan!', + ) + // eslint-disable-next-line no-continue + continue + } + // Divide integrations into chunks for Discord if (newIntService.type === IntegrationType.DISCORD) { setTimeout(async () => { diff --git a/services/libs/integrations/src/integrations/discord/index.ts b/services/libs/integrations/src/integrations/discord/index.ts index 7ff7911ab5..c3b3285ae5 100644 --- a/services/libs/integrations/src/integrations/discord/index.ts +++ b/services/libs/integrations/src/integrations/discord/index.ts @@ -9,6 +9,7 @@ import processWebhookStream from './processWebhookStream' const descriptor: IIntegrationDescriptor = { type: PlatformType.DISCORD, memberAttributes: DISCORD_MEMBER_ATTRIBUTES, + checkEvery: 3 * 60, // 3 hours generateStreams, processStream, processData, From 78e6ea1b9ad648243754b83c03a781a01d1ccf57 Mon Sep 17 00:00:00 2001 From: anilb Date: Wed, 13 Dec 2023 16:35:40 +0100 Subject: [PATCH 026/185] Better organization existence checks and organizations without activities now synced to opensearch (#1939) Co-authored-by: Gasper Grom --- backend/package.json | 4 +- .../generate-merge-suggestions-synchronous.ts | 102 ++++++++++ .../scripts/merge-similar-organizations.ts | 177 ++++++++++++++++++ .../repositories/organizationRepository.ts | 175 +++++++++++------ backend/src/services/organizationService.ts | 2 +- .../enrichment/memberEnrichmentService.ts | 23 ++- .../organization/config/saved-views/main.ts | 2 + .../settings/hasActivities/config.ts | 16 ++ .../saved-views/views/all-organizations.ts | 1 + .../opensearch/src/repo/organization.repo.ts | 13 +- 10 files changed, 438 insertions(+), 77 deletions(-) create mode 100644 backend/src/bin/scripts/generate-merge-suggestions-synchronous.ts create mode 100644 backend/src/bin/scripts/merge-similar-organizations.ts create mode 100644 frontend/src/modules/organization/config/saved-views/settings/hasActivities/config.ts diff --git a/backend/package.json b/backend/package.json index 49bb3280f5..451ef73296 100644 --- a/backend/package.json +++ b/backend/package.json @@ -39,11 +39,13 @@ "script:enrich-members-organizations": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/enrich-members-and-organizations.ts", "script:enrich-organizations": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/enrich-organizations-synchronous.ts", "script:generate-merge-suggestions": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/generate-merge-suggestions.ts", + "script:generate-merge-suggestions-synchronous": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/generate-merge-suggestions-synchronous.ts", "script:merge-organizations": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/merge-organizations.ts", "script:get-member-enrichment-data": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/get-member-enrichment-data.ts", "script:get-organization-enrichment-data": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/get-organization-enrichment-data.ts", "script:refresh-materialized-views": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/refresh-materialized-views.ts", - "script:unmerge-members": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/unmerge-members.ts" + "script:unmerge-members": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/unmerge-members.ts", + "script:merge-similar-organizations": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/scripts/merge-similar-organizations.ts" }, "dependencies": { "@aws-sdk/client-comprehend": "^3.159.0", diff --git a/backend/src/bin/scripts/generate-merge-suggestions-synchronous.ts b/backend/src/bin/scripts/generate-merge-suggestions-synchronous.ts new file mode 100644 index 0000000000..3b5d4e614f --- /dev/null +++ b/backend/src/bin/scripts/generate-merge-suggestions-synchronous.ts @@ -0,0 +1,102 @@ +import commandLineArgs from 'command-line-args' +import commandLineUsage from 'command-line-usage' +import { getOpensearchClient } from '@crowd/opensearch' +import { OrganizationMergeSuggestionType } from '@crowd/types' +import * as fs from 'fs' +import path from 'path' +import { IRepositoryOptions } from '@/database/repositories/IRepositoryOptions' +import getUserContext from '@/database/utils/getUserContext' +import SegmentService from '@/services/segmentService' +import { OPENSEARCH_CONFIG } from '@/conf' +import OrganizationService from '@/services/organizationService' +import TenantService from '@/services/tenantService' + +/* eslint-disable no-console */ + +const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') + +const options = [ + { + name: 'tenant', + alias: 't', + type: String, + description: + 'The unique ID of that tenant that you would like to generate merge suggestions for.', + }, + { + name: 'plan', + alias: 'p', + type: String, + description: + 'Comma separated plans - works with allTenants flag. Only generate suggestions for tenants with specific plans. Available plans: Growth, Scale, Enterprise', + }, + { + name: 'allTenants', + alias: 'a', + type: Boolean, + defaultValue: false, + description: 'Set this flag to merge similar organizations for all tenants.', + }, + { + name: 'help', + alias: 'h', + type: Boolean, + description: 'Print this usage guide.', + }, +] +const sections = [ + { + content: banner, + raw: true, + }, + { + header: 'Generate merge suggestions for a tenant', + content: 'Generate merge suggestions for a tenant', + }, + { + header: 'Options', + optionList: options, + }, +] + +const usage = commandLineUsage(sections) +const parameters = commandLineArgs(options) + +if (parameters.help || (!parameters.tenant && !parameters.allTenants)) { + console.log(usage) +} else { + setImmediate(async () => { + let tenantIds + + if (parameters.allTenants) { + tenantIds = (await TenantService._findAndCountAllForEveryUser({})).rows + if (parameters.plan) { + tenantIds = tenantIds.filter((tenant) => parameters.plan.split(',').includes(tenant.plan)) + } + tenantIds = tenantIds.map((t) => t.id) + } else if (parameters.tenant) { + tenantIds = parameters.tenant.split(',') + } else { + tenantIds = [] + } + + for (const tenantId of tenantIds) { + const userContext: IRepositoryOptions = await getUserContext(tenantId) + const segmentService = new SegmentService(userContext) + const { rows: segments } = await segmentService.querySubprojects({}) + userContext.currentSegments = segments + userContext.opensearch = getOpensearchClient(OPENSEARCH_CONFIG) + + console.log(`Generating organization merge suggestions for tenant ${tenantId}!`) + + const organizationService = new OrganizationService(userContext) + await organizationService.generateMergeSuggestions( + OrganizationMergeSuggestionType.BY_IDENTITY, + ) + + console.log(`Done generating organization merge suggestions for tenant ${tenantId}!`) + } + + process.exit(0) + }) +} diff --git a/backend/src/bin/scripts/merge-similar-organizations.ts b/backend/src/bin/scripts/merge-similar-organizations.ts new file mode 100644 index 0000000000..5199f4c4ab --- /dev/null +++ b/backend/src/bin/scripts/merge-similar-organizations.ts @@ -0,0 +1,177 @@ +import commandLineArgs from 'command-line-args' +import commandLineUsage from 'command-line-usage' +import { QueryTypes } from 'sequelize' +import * as fs from 'fs' +import path from 'path' +import SequelizeRepository from '../../database/repositories/sequelizeRepository' +import TenantService from '@/services/tenantService' +import OrganizationService from '@/services/organizationService' +import getUserContext from '@/database/utils/getUserContext' +import { IRepositoryOptions } from '@/database/repositories/IRepositoryOptions' +import { + MergeActionState, + MergeActionType, + MergeActionsRepository, +} from '@/database/repositories/mergeActionsRepository' + +/* eslint-disable no-console */ + +const banner = fs.readFileSync(path.join(__dirname, 'banner.txt'), 'utf8') + +const options = [ + { + name: 'tenant', + alias: 't', + type: String, + description: 'The unique ID of tenant', + }, + { + name: 'allTenants', + alias: 'a', + type: Boolean, + defaultValue: false, + description: 'Set this flag to merge similar organizations for all tenants.', + }, + { + name: 'similarityThreshold', + alias: 's', + type: String, + defaultValue: false, + description: + 'Similarity threshold of organization merge suggestions. Suggestions lower than this value will not be merged. Defaults to 0.95', + }, + { + name: 'hardLimit', + alias: 'l', + type: String, + defaultValue: false, + description: `Hard limit for # of organizations that'll be merged. Mostly a flag for testing purposes.`, + }, + { + name: 'help', + alias: 'h', + type: Boolean, + description: 'Print this usage guide.', + }, +] +const sections = [ + { + content: banner, + raw: true, + }, + { + header: 'Merge organizations with similarity higher than given threshold.', + content: 'Merge organizations with similarity higher than given threshold.', + }, + { + header: 'Options', + optionList: options, + }, +] + +const usage = commandLineUsage(sections) +const parameters = commandLineArgs(options) + +if (parameters.help || (!parameters.tenant && !parameters.allTenants)) { + console.log(usage) +} else { + setImmediate(async () => { + const options = await SequelizeRepository.getDefaultIRepositoryOptions() + + let tenantIds + + if (parameters.allTenants) { + tenantIds = (await TenantService._findAndCountAllForEveryUser({})).rows.map((t) => t.id) + } else if (parameters.tenant) { + tenantIds = parameters.tenant.split(',') + } else { + tenantIds = [] + } + + for (const tenantId of tenantIds) { + const userContext: IRepositoryOptions = await getUserContext(tenantId) + const orgService = new OrganizationService(userContext) + + let hasMoreData = true + let counter = 0 + + while (hasMoreData) { + // find organization merge suggestions of tenant + const result = await options.database.sequelize.query( + ` + SELECT + "ot"."organizationId", + "ot"."toMergeId", + "ot".similarity, + "ot".status, + "org1"."displayName" AS "orgDisplayName", + "org2"."displayName" AS "mergeDisplayName" + FROM + "organizationToMerge" "ot" + LEFT JOIN + "organizations" "org1" + ON + "ot"."organizationId" = "org1"."id" + LEFT JOIN + "organizations" "org2" + ON + "ot"."toMergeId" = "org2"."id" + WHERE + ("ot".similarity > :similarityThreshold) AND + ("org1"."displayName" ilike "org2"."displayName") AND + ("org1"."tenantId" = :tenantId) AND + ("org2"."tenantId" = :tenantId) + ORDER BY + "ot".similarity DESC + LIMIT 100 + OFFSET :offset;`, + { + replacements: { + similarityThreshold: parameters.similarityThreshold || 0.95, + offset: 0, + tenantId, + }, + type: QueryTypes.SELECT, + }, + ) + + if (result.length === 0) { + hasMoreData = false + } else { + for (const row of result) { + try { + console.log( + `Merging [${row.organizationId}] "${row.orgDisplayName}" into ${row.toMergeId} "${row.mergeDisplayName}"...`, + ) + await MergeActionsRepository.add( + MergeActionType.ORG, + row.organizationId, + row.toMergeId, + userContext, + ) + await orgService.mergeSync(row.organizationId, row.toMergeId) + } catch (err) { + console.log('Error merging organizations - continuing with the rest', err) + await MergeActionsRepository.setState( + MergeActionType.ORG, + row.organizationId, + row.toMergeId, + MergeActionState.ERROR, + userContext, + ) + } + + if (parameters.hardLimit && counter >= parameters.hardLimit) { + console.log(`Hard limit of ${parameters.hardLimit} reached. Exiting...`) + process.exit(0) + } + + counter += 1 + } + } + } + } + + process.exit(0) + }) +} diff --git a/backend/src/database/repositories/organizationRepository.ts b/backend/src/database/repositories/organizationRepository.ts index 089d983b40..7c5a000a7d 100644 --- a/backend/src/database/repositories/organizationRepository.ts +++ b/backend/src/database/repositories/organizationRepository.ts @@ -40,6 +40,7 @@ interface IOrganizationPartialAggregatesOpensearch { string_name: string }[] uuid_arr_noMergeIds: string[] + keyword_displayName: string } } @@ -49,6 +50,7 @@ interface ISimilarOrganization { uuid_organizationId: string nested_identities: IOrganizationIdentityOpensearch[] nested_weakIdentities: IOrganizationIdentityOpensearch[] + keyword_displayName: string } } @@ -1189,6 +1191,7 @@ class OrganizationRepository { for (const primaryIdentity of primaryOrganization._source.nested_identities) { // similar organization has a weakIdentity as one of primary organization's strong identity, return score 95 if ( + similarOrganization._source.nested_weakIdentities && similarOrganization._source.nested_weakIdentities.length > 0 && similarOrganization._source.nested_weakIdentities.some( (weakIdentity) => @@ -1198,6 +1201,15 @@ class OrganizationRepository { ) { return 0.95 } + + // check displayName match + if ( + similarOrganization._source.keyword_displayName === + primaryOrganization._source.keyword_displayName + ) { + return 0.98 + } + for (const secondaryIdentity of similarOrganization._source.nested_identities) { const currentLevenstheinDistance = getLevenshteinDistance( primaryIdentity.string_name, @@ -1233,7 +1245,12 @@ class OrganizationRepository { collapse: { field: 'uuid_organizationId', }, - _source: ['uuid_organizationId', 'nested_identities', 'uuid_arr_noMergeIds'], + _source: [ + 'uuid_organizationId', + 'nested_identities', + 'uuid_arr_noMergeIds', + 'keyword_displayName', + ], } let organizations: IOrganizationPartialAggregatesOpensearch[] = [] @@ -1244,25 +1261,6 @@ class OrganizationRepository { queryBody.query = { bool: { filter: [ - { - bool: { - should: [ - { - range: { - int_activityCount: { - gt: 0, - }, - }, - }, - { - term: { - bool_manuallyCreated: true, - }, - }, - ], - minimum_should_match: 1, - }, - }, { term: { uuid_tenantId: tenant.id, @@ -1282,25 +1280,6 @@ class OrganizationRepository { queryBody.query = { bool: { filter: [ - { - bool: { - should: [ - { - range: { - int_activityCount: { - gt: 0, - }, - }, - }, - { - term: { - bool_manuallyCreated: true, - }, - }, - ], - minimum_should_match: 1, - }, - }, { term: { uuid_tenantId: tenant.id, @@ -1330,6 +1309,11 @@ class OrganizationRepository { ) { const identitiesPartialQuery = { should: [ + { + term: { + [`keyword_displayName`]: organization._source.keyword_displayName, + }, + }, { nested: { path: 'nested_weakIdentities', @@ -1369,25 +1353,6 @@ class OrganizationRepository { uuid_tenantId: tenant.id, }, }, - { - bool: { - should: [ - { - range: { - int_activityCount: { - gt: 0, - }, - }, - }, - { - term: { - bool_manuallyCreated: true, - }, - }, - ], - minimum_should_match: 1, - }, - }, ], } @@ -1396,7 +1361,7 @@ class OrganizationRepository { for (const identity of organization._source.nested_identities) { if (identity.string_name.length > 0) { // weak identity search - identitiesPartialQuery.should[0].nested.query.bool.should.push({ + identitiesPartialQuery.should[1].nested.query.bool.should.push({ bool: { must: [ { match: { [`nested_weakIdentities.keyword_name`]: identity.string_name } }, @@ -1417,7 +1382,7 @@ class OrganizationRepository { if (Number.isNaN(Number(identity.string_name))) { hasFuzzySearch = true // fuzzy search for identities - identitiesPartialQuery.should[1].nested.query.bool.should.push({ + identitiesPartialQuery.should[2].nested.query.bool.should.push({ match: { [`nested_identities.keyword_name`]: { query: cleanedIdentityName, @@ -1429,7 +1394,7 @@ class OrganizationRepository { // also check for prefix for identities that has more than 5 characters and no whitespace if (identity.string_name.length > 5 && identity.string_name.indexOf(' ') === -1) { - identitiesPartialQuery.should[1].nested.query.bool.should.push({ + identitiesPartialQuery.should[2].nested.query.bool.should.push({ prefix: { [`nested_identities.keyword_name`]: { value: cleanedIdentityName.slice(0, prefixLength(cleanedIdentityName)), @@ -1468,7 +1433,12 @@ class OrganizationRepository { collapse: { field: 'uuid_organizationId', }, - _source: ['uuid_organizationId', 'nested_identities', 'nested_weakIdentities'], + _source: [ + 'uuid_organizationId', + 'nested_identities', + 'nested_weakIdentities', + 'keyword_displayName', + ], } const organizationsToMerge: ISimilarOrganization[] = @@ -1624,6 +1594,87 @@ class OrganizationRepository { return segments } + static async findByIdentities( + identities: IOrganizationIdentity[], + options: IRepositoryOptions, + ): Promise { + const transaction = SequelizeRepository.getTransaction(options) + const sequelize = SequelizeRepository.getSequelize(options) + const currentTenant = SequelizeRepository.getCurrentTenant(options) + + const identityConditions = identities + .map( + (identity, index) => ` + (oi.platform = :platform${index} and oi.name = :name${index}) + `, + ) + .join(' or ') + + const results = await sequelize.query( + ` + with + "organizationsWithIdentity" as ( + select oi."organizationId" + from "organizationIdentities" oi + where ${identityConditions} + ), + "organizationsWithCounts" as ( + select o.id, count(oi."organizationId") as total_counts + from organizations o + join "organizationIdentities" oi on o.id = oi."organizationId" + where o.id in (select "organizationId" from "organizationsWithIdentity") + group by o.id + ) + select o.id, + o.description, + o.emails, + o.logo, + o.tags, + o.github, + o.twitter, + o.linkedin, + o.crunchbase, + o.employees, + o.location, + o.website, + o.type, + o.size, + o.headline, + o.industry, + o.founded, + o.attributes + from organizations o + inner join "organizationsWithCounts" oc on o.id = oc.id + where o."tenantId" = :tenantId + order by oc.total_counts desc + limit 1; + `, + { + replacements: { + tenantId: currentTenant.id, + ...identities.reduce( + (acc, identity, index) => ({ + ...acc, + [`platform${index}`]: identity.platform, + [`name${index}`]: identity.name, + }), + {}, + ), + }, + type: QueryTypes.SELECT, + transaction, + }, + ) + + if (results.length === 0) { + return null + } + + const result = results[0] as IOrganization + + return result + } + static async findByIdentity( identity: IOrganizationIdentity, options: IRepositoryOptions, diff --git a/backend/src/services/organizationService.ts b/backend/src/services/organizationService.ts index 01871d70d1..a1f750a54a 100644 --- a/backend/src/services/organizationService.ts +++ b/backend/src/services/organizationService.ts @@ -498,7 +498,7 @@ export default class OrganizationService extends LoggerBase { } if (!existing) { - existing = await OrganizationRepository.findByIdentity(primaryIdentity, this.options) + existing = await OrganizationRepository.findByIdentities(data.identities, this.options) } if (existing) { diff --git a/backend/src/services/premium/enrichment/memberEnrichmentService.ts b/backend/src/services/premium/enrichment/memberEnrichmentService.ts index ae47c87587..ba42cd974f 100644 --- a/backend/src/services/premium/enrichment/memberEnrichmentService.ts +++ b/backend/src/services/premium/enrichment/memberEnrichmentService.ts @@ -13,6 +13,7 @@ import { PlatformType, OrganizationSource, SyncMode, + IOrganizationIdentity, } from '@crowd/types' import { ENRICHMENT_CONFIG, REDIS_CONFIG } from '../../../conf' import { AttributeData } from '../../../database/attributes/attribute' @@ -352,14 +353,24 @@ export default class MemberEnrichmentService extends LoggerBase { const organizationService = new OrganizationService(options) if (enrichmentData.work_experiences) { for (const workExperience of enrichmentData.work_experiences) { + const organizationIdentities: IOrganizationIdentity[] = [ + { + name: workExperience.company, + platform: PlatformType.ENRICHMENT, + }, + ] + + if (workExperience.companyLinkedInUrl) { + organizationIdentities.push({ + name: workExperience.companyLinkedInUrl.split('/').pop(), + platform: PlatformType.LINKEDIN, + url: workExperience.companyLinkedInUrl, + }) + } + const org = await organizationService.createOrUpdate( { - identities: [ - { - name: workExperience.company, - platform: PlatformType.ENRICHMENT, - }, - ], + identities: organizationIdentities, }, { doSync: true, diff --git a/frontend/src/modules/organization/config/saved-views/main.ts b/frontend/src/modules/organization/config/saved-views/main.ts index b3b23e6657..78d0c8b528 100644 --- a/frontend/src/modules/organization/config/saved-views/main.ts +++ b/frontend/src/modules/organization/config/saved-views/main.ts @@ -2,11 +2,13 @@ import { SavedViewsConfig } from '@/shared/modules/saved-views/types/SavedViewsC import allOrganizations from './views/all-organizations'; import teamOrganization from './settings/teamOrganization/config'; +import hasActivities from './settings/hasActivities/config'; export const organizationSavedViews: SavedViewsConfig = { defaultView: allOrganizations, settings: { teamOrganization, + hasActivities, }, sorting: { displayName: 'Organization', diff --git a/frontend/src/modules/organization/config/saved-views/settings/hasActivities/config.ts b/frontend/src/modules/organization/config/saved-views/settings/hasActivities/config.ts new file mode 100644 index 0000000000..847f5f6a3c --- /dev/null +++ b/frontend/src/modules/organization/config/saved-views/settings/hasActivities/config.ts @@ -0,0 +1,16 @@ +import { SavedViewsSetting } from '@/shared/modules/saved-views/types/SavedViewsConfig'; + +const hasActivities: SavedViewsSetting = { + inSettings: false, + defaultValue: true, + queryUrlParser(value: string): boolean { + return value === 'true'; + }, + apiFilterRenderer(): any[] { + return [ + { activityCount: { gt: 0 } }, + ]; + }, +}; + +export default hasActivities; diff --git a/frontend/src/modules/organization/config/saved-views/views/all-organizations.ts b/frontend/src/modules/organization/config/saved-views/views/all-organizations.ts index b9b87a403e..2ebc9ff5c3 100644 --- a/frontend/src/modules/organization/config/saved-views/views/all-organizations.ts +++ b/frontend/src/modules/organization/config/saved-views/views/all-organizations.ts @@ -14,6 +14,7 @@ const allOrganizations: SavedView = { }, settings: { teamOrganization: 'exclude', + hasActivities: 'true', }, }, }; diff --git a/services/libs/opensearch/src/repo/organization.repo.ts b/services/libs/opensearch/src/repo/organization.repo.ts index 0130eeccf8..29095f2197 100644 --- a/services/libs/opensearch/src/repo/organization.repo.ts +++ b/services/libs/opensearch/src/repo/organization.repo.ts @@ -250,7 +250,7 @@ export class OrganizationRepository extends RepositoryBase { const results = await this.db().any( ` - select distinct a."segmentId", a."organizationId" - from activities a - where a."organizationId" in ($(ids:csv)); + select distinct mo."organizationId", a."segmentId" + from "memberOrganizations" mo + inner join activities a on mo."memberId" = a."memberId" + where mo."organizationId" in ($(ids:csv)); `, { ids, From 01d7d62833233191508886f18f4538eec637579e Mon Sep 17 00:00:00 2001 From: Igor Kotua <36304232+garrrikkotua@users.noreply.github.com> Date: Wed, 13 Dec 2023 19:03:23 +0300 Subject: [PATCH 027/185] Update script path for trigger-all-webhooks (#1965) --- services/apps/integration_stream_worker/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/apps/integration_stream_worker/package.json b/services/apps/integration_stream_worker/package.json index 629c57c3ba..a7016eb9d0 100644 --- a/services/apps/integration_stream_worker/package.json +++ b/services/apps/integration_stream_worker/package.json @@ -16,7 +16,7 @@ "script:process-webhook": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/process-webhook.ts", "script:process-all-streams": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/process-all-streams.ts", "script:process-all-webhooks": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/process-all-webhooks.ts", - "script:trigger-all-failed-webhooks": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/process-all-failed-webhooks.ts", + "script:trigger-all-failed-webhooks": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/trigger-all-failed-webhooks.ts", "script:trigger-webhook": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/process-webhook.ts", "script:trigger-all-streams": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/trigger-all-streams.ts", "script:trigger-all-streams-for-integration": "SERVICE=script TS_NODE_TRANSPILE_ONLY=true node -r tsconfig-paths/register -r ts-node/register src/bin/trigger-all-streams-for-integration.ts", From 3e54b476ead183928b6d46ab76b2889d23f0915b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Uro=C5=A1=20Marolt?= Date: Thu, 14 Dec 2023 08:16:06 +0100 Subject: [PATCH 028/185] bugfix --- services/apps/webhook_api/src/middleware/emitters.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/services/apps/webhook_api/src/middleware/emitters.ts b/services/apps/webhook_api/src/middleware/emitters.ts index 72b82099df..f6a5e87f7e 100644 --- a/services/apps/webhook_api/src/middleware/emitters.ts +++ b/services/apps/webhook_api/src/middleware/emitters.ts @@ -8,11 +8,14 @@ export interface IEmittersRequest { } export const emittersMiddleware = ( - integrationStreamWorkerEmitter: IntegrationStreamWorkerEmitter, + integrationStreamWorker: IntegrationStreamWorkerEmitter, ): RequestHandler => { return (req: Request, _res: Response, next: NextFunction) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any - ;(req as any).integrationStreamWorker = integrationStreamWorkerEmitter + ;(req as any).emitters = { + integrationStreamWorker, + } + next() } } From 508765474e24efb70af5aba3165e9a083f771fae Mon Sep 17 00:00:00 2001 From: Igor Kotua <36304232+garrrikkotua@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:19:08 +0300 Subject: [PATCH 029/185] Fix member add handler in discord-ws (#1967) --- backend/package.json | 4 +-- backend/src/bin/discord-ws.ts | 21 +++++++------- pnpm-lock.yaml | 29 ++++++++++++++----- .../src/integrations/discord/api/getMember.ts | 11 +++++++ 4 files changed, 45 insertions(+), 20 deletions(-) diff --git a/backend/package.json b/backend/package.json index 451ef73296..97b814c8ea 100644 --- a/backend/package.json +++ b/backend/package.json @@ -56,6 +56,7 @@ "@aws-sdk/util-format-url": "^3.226.0", "@crowd/alerting": "file:../services/libs/alerting", "@crowd/common": "file:../services/libs/common", + "@crowd/common_services": "file:../services/libs/common_services", "@crowd/cubejs": "file:../services/libs/cubejs", "@crowd/feature-flags": "file:../services/libs/feature-flags", "@crowd/integrations": "file:../services/libs/integrations", @@ -67,7 +68,6 @@ "@crowd/temporal": "file:../services/libs/temporal", "@crowd/tracing": "file:../services/libs/tracing", "@crowd/types": "file:../services/libs/types", - "@crowd/common_services": "file:../services/libs/common_services", "@cubejs-client/core": "^0.30.4", "@google-cloud/storage": "5.3.0", "@octokit/auth-app": "^3.6.1", @@ -99,7 +99,7 @@ "cron-time-generator": "^1.3.0", "crowd-sentiment": "^1.1.7", "crypto-js": "^4.1.1", - "discord.js": "^14.7.1", + "discord.js": "^14.14.1", "dotenv": "8.2.0", "dotenv-expand": "^8.0.3", "emoji-dictionary": "^1.0.11", diff --git a/backend/src/bin/discord-ws.ts b/backend/src/bin/discord-ws.ts index 98cda5c245..ac94e2ebce 100644 --- a/backend/src/bin/discord-ws.ts +++ b/backend/src/bin/discord-ws.ts @@ -140,25 +140,24 @@ async function spawnClient( }) // listen to discord events - client.on(Events.GuildMemberAdd, async (m) => { - const member = m as any + client.on(Events.GuildMemberAdd, async (member) => { + // discord.js is cruel. member object here is typed, + // but it has custom toString and toJSON methods + // and they you print and JSON.stringify it + // the structure turns out to be different await executeIfNotExists( - `member-${member.userId}`, + `discord-ws-member-${member.user.id}-${member.guild.id}`, cache, async () => { logger.debug( { member: member.displayName, - guildId: member.guildId ?? member.guild.id, - userId: member.userId, + guildId: member.guild.id, + userId: member.user.id, }, 'Member joined guild!', ) - await processPayload( - DiscordWebsocketEvent.MEMBER_ADDED, - member, - member.guildId ?? member.guild.id, - ) + await processPayload(DiscordWebsocketEvent.MEMBER_ADDED, member, member.guild.id) }, delayMilliseconds, ) @@ -167,7 +166,7 @@ async function spawnClient( client.on(Events.MessageCreate, async (message) => { if (message.type === MessageType.Default || message.type === MessageType.Reply) { await executeIfNotExists( - `msg-${message.id}`, + `discord-ws-msg-${message.id}`, cache, async () => { logger.debug( diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8c654133e8..7bead25974 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -162,7 +162,7 @@ importers: specifier: ^4.1.1 version: 4.2.0 discord.js: - specifier: ^14.7.1 + specifier: ^14.14.1 version: 14.14.1(bufferutil@4.0.8)(utf-8-validate@5.0.10) dotenv: specifier: 8.2.0 @@ -16657,7 +16657,7 @@ packages: https-proxy-agent: 5.0.1 node-fetch: 2.7.0 stream-events: 1.0.5 - uuid: 8.3.0 + uuid: 8.3.2 transitivePeerDependencies: - encoding - supports-color @@ -17414,11 +17414,6 @@ packages: hasBin: true dev: false - /uuid@8.3.0: - resolution: {integrity: sha512-fX6Z5o4m6XsXBdli9g7DtWgAx+osMsRRZFKma1mIUsLCz6vRvv+pz5VNbyu9UEDzpMWulZfvpgb/cmDXVulYFQ==} - hasBin: true - dev: false - /uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true @@ -17921,6 +17916,7 @@ packages: resolution: {directory: services/archetypes/consumer, type: directory} id: file:services/archetypes/consumer name: '@crowd/archetype-consumer' + version: 1.0.0 dependencies: '@crowd/archetype-standard': file:services/archetypes/standard(@swc/core@1.3.100)(@types/node@20.10.1)(typescript@5.3.2) '@temporalio/client': 1.8.6 @@ -17943,6 +17939,7 @@ packages: resolution: {directory: services/archetypes/standard, type: directory} id: file:services/archetypes/standard name: '@crowd/archetype-standard' + version: 1.0.0 dependencies: '@crowd/feature-flags': file:services/libs/feature-flags '@crowd/integrations': file:services/libs/integrations @@ -17971,6 +17968,7 @@ packages: resolution: {directory: services/archetypes/worker, type: directory} id: file:services/archetypes/worker name: '@crowd/archetype-worker' + version: 1.0.0 dependencies: '@crowd/archetype-standard': file:services/archetypes/standard(@swc/core@1.3.100)(@types/node@20.10.1)(typescript@5.3.2) '@crowd/database': file:services/libs/database @@ -18000,6 +17998,7 @@ packages: file:services/libs/alerting: resolution: {directory: services/libs/alerting, type: directory} name: '@crowd/alerting' + version: 1.0.0 dependencies: '@types/node': 20.10.1 '@typescript-eslint/eslint-plugin': 5.62.0(@typescript-eslint/parser@5.62.0)(eslint@8.54.0)(typescript@5.3.2) @@ -18018,6 +18017,7 @@ packages: file:services/libs/common: resolution: {directory: services/libs/common, type: directory} name: '@crowd/common' + version: 1.0.0 dependencies: '@crowd/logging': file:services/libs/logging '@crowd/types': file:services/libs/types @@ -18031,6 +18031,7 @@ packages: file:services/libs/common_services: resolution: {directory: services/libs/common_services, type: directory} name: '@crowd/common_services' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/database': file:services/libs/database @@ -18050,6 +18051,7 @@ packages: file:services/libs/conversations: resolution: {directory: services/libs/conversations, type: directory} name: '@crowd/conversations' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/database': file:services/libs/database @@ -18065,6 +18067,7 @@ packages: resolution: {directory: services/libs/cubejs, type: directory} id: file:services/libs/cubejs name: '@crowd/cubejs' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/logging': file:services/libs/logging @@ -18082,6 +18085,7 @@ packages: file:services/libs/database: resolution: {directory: services/libs/database, type: directory} name: '@crowd/database' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/logging': file:services/libs/logging @@ -18094,6 +18098,7 @@ packages: file:services/libs/feature-flags: resolution: {directory: services/libs/feature-flags, type: directory} name: '@crowd/feature-flags' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/logging': file:services/libs/logging @@ -18108,6 +18113,7 @@ packages: file:services/libs/integrations: resolution: {directory: services/libs/integrations, type: directory} name: '@crowd/integrations' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/logging': file:services/libs/logging @@ -18130,6 +18136,7 @@ packages: file:services/libs/logging: resolution: {directory: services/libs/logging, type: directory} name: '@crowd/logging' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/tracing': file:services/libs/tracing @@ -18142,6 +18149,7 @@ packages: file:services/libs/opensearch: resolution: {directory: services/libs/opensearch, type: directory} name: '@crowd/opensearch' + version: 1.0.0 dependencies: '@crowd/database': file:services/libs/database '@crowd/logging': file:services/libs/logging @@ -18158,6 +18166,7 @@ packages: file:services/libs/redis: resolution: {directory: services/libs/redis, type: directory} name: '@crowd/redis' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@crowd/logging': file:services/libs/logging @@ -18170,6 +18179,7 @@ packages: file:services/libs/sentiment: resolution: {directory: services/libs/sentiment, type: directory} name: '@crowd/sentiment' + version: 1.0.0 dependencies: '@aws-sdk/client-comprehend': 3.462.0 '@crowd/common': file:services/libs/common @@ -18182,6 +18192,7 @@ packages: file:services/libs/sqs: resolution: {directory: services/libs/sqs, type: directory} name: '@crowd/sqs' + version: 1.0.0 dependencies: '@aws-sdk/client-sqs': 3.462.0 '@aws-sdk/types': 3.460.0 @@ -18198,6 +18209,7 @@ packages: file:services/libs/telemetry: resolution: {directory: services/libs/telemetry, type: directory} name: '@crowd/telemetry' + version: 1.0.0 dependencies: '@crowd/logging': file:services/libs/logging dd-trace: 4.20.0 @@ -18208,6 +18220,7 @@ packages: file:services/libs/temporal: resolution: {directory: services/libs/temporal, type: directory} name: '@crowd/temporal' + version: 1.0.0 dependencies: '@crowd/logging': file:services/libs/logging '@temporalio/client': 1.8.6 @@ -18220,6 +18233,7 @@ packages: file:services/libs/tracing: resolution: {directory: services/libs/tracing, type: directory} name: '@crowd/tracing' + version: 1.0.0 dependencies: '@crowd/common': file:services/libs/common '@opentelemetry/api': 1.6.0 @@ -18243,6 +18257,7 @@ packages: file:services/libs/types: resolution: {directory: services/libs/types, type: directory} name: '@crowd/types' + version: 1.0.0 dev: false github.com/clearbit/needle/84d28b5f2c3916db1e7eb84aeaa9d976cc40054b: diff --git a/services/libs/integrations/src/integrations/discord/api/getMember.ts b/services/libs/integrations/src/integrations/discord/api/getMember.ts index 7af5705ab3..a33cbb7a2e 100644 --- a/services/libs/integrations/src/integrations/discord/api/getMember.ts +++ b/services/libs/integrations/src/integrations/discord/api/getMember.ts @@ -10,6 +10,17 @@ export const getMember = async ( token: string, ctx: IProcessStreamContext, ): Promise => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof guildId !== 'string' || guildId.trim() === '') { + throw new Error('Invalid guildId') + } + if (typeof userId !== 'string' || userId.trim() === '') { + throw new Error('Invalid userId') + } + if (typeof token !== 'string' || token.trim() === '') { + throw new Error('Invalid token') + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any const config: AxiosRequestConfig = { method: 'get', From 005a79e2b20aa29f322cb594db087717e07fd927 Mon Sep 17 00:00:00 2001 From: Uros Marolt Date: Thu, 14 Dec 2023 21:47:55 +0100 Subject: [PATCH 030/185] fixed ci deploy (#1968) --- .github/actions/deploy-service/action.yaml | 22 +++++++++---------- .../workflows/lf-production-deploy-new.yaml | 4 ++-- .../lf-production-deploy-original.yaml | 10 ++++----- .../workflows/lf-staging-deploy-backend.yaml | 2 +- .../lf-staging-deploy-data-sink-worker.yaml | 2 +- ...taging-deploy-integration-data-worker.yaml | 2 +- ...staging-deploy-integration-run-worker.yaml | 2 +- ...ging-deploy-integration-stream-worker.yaml | 2 +- .../lf-staging-deploy-search-sync-worker.yaml | 2 +- .github/workflows/production-deploy-new.yaml | 2 ++ .../workflows/production-deploy-original.yaml | 5 +++++ .github/workflows/staging-deploy-backend.yaml | 2 +- .../staging-deploy-data-sink-worker.yaml | 2 +- ...taging-deploy-integration-data-worker.yaml | 2 +- ...staging-deploy-integration-run-worker.yaml | 2 +- ...ging-deploy-integration-stream-worker.yaml | 2 +- ...taging-deploy-integration-sync-worker.yaml | 2 +- .../staging-deploy-search-sync-worker.yaml | 2 +- 18 files changed, 38 insertions(+), 31 deletions(-) diff --git a/.github/actions/deploy-service/action.yaml b/.github/actions/deploy-service/action.yaml index bfcd759ada..f21c4e915c 100644 --- a/.github/actions/deploy-service/action.yaml +++ b/.github/actions/deploy-service/action.yaml @@ -19,15 +19,10 @@ inputs: required: false default: 'false' - staging: - description: Is the service prioritized and deployed to staging? + cloud_env: + description: Which cloud environment are we deploying to? required: false - default: 'false' - - lfx: - description: Is the service prioritized and deployed to lfx? - required: false - default: 'false' + default: 'default' runs: using: composite @@ -46,7 +41,7 @@ runs: run: kubectl set image deployments/${{ inputs.service }}-dpl ${{ inputs.service }}=${{ inputs.image }} - name: Deploy image (prioritized - production) - if: inputs.prioritized == 'true' && inputs.lfx == 'false' + if: inputs.prioritized == 'true' && inputs.cloud_env == 'prod' shell: bash run: | kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} @@ -55,7 +50,7 @@ runs: kubectl set image deployments/${{ inputs.service }}-urgent-dpl ${{ inputs.service }}-urgent=${{ inputs.image }} - name: Deploy image (prioritized - lfx production) - if: inputs.prioritized == 'true' && inputs.lfx == 'true' + if: inputs.prioritized == 'true' && inputs.cloud_env == 'lfx_prod' shell: bash run: | kubectl set image deployments/${{ inputs.service }}-system-dpl ${{ inputs.service }}-system=${{ inputs.image }} @@ -63,7 +58,12 @@ runs: kubectl set image deployments/${{ inputs.service }}-high-dpl ${{ inputs.service }}-high=${{ inputs.image }} - name: Deploy image (prioritized - staging) - if: inputs.prioritized == 'true' && inputs.staging == 'true' + if: inputs.prioritized == 'true' && inputs.cloud_env == 'staging' + shell: bash + run: kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} + + - name: Deploy image (prioritized - lfx staging) + if: inputs.prioritized == 'true' && inputs.cloud_env == 'lfx_staging' shell: bash run: kubectl set image deployments/${{ inputs.service }}-normal-dpl ${{ inputs.service }}-normal=${{ inputs.image }} diff --git a/.github/workflows/lf-production-deploy-new.yaml b/.github/workflows/lf-production-deploy-new.yaml index 90b2070b0b..649a90d6f5 100644 --- a/.github/workflows/lf-production-deploy-new.yaml +++ b/.github/workflows/lf-production-deploy-new.yaml @@ -208,7 +208,7 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push-search-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-search-sync-api: @@ -246,7 +246,7 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push-integration-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-webhook-api: diff --git a/.github/workflows/lf-production-deploy-original.yaml b/.github/workflows/lf-production-deploy-original.yaml index 9290c21a6d..6e1b38125a 100644 --- a/.github/workflows/lf-production-deploy-original.yaml +++ b/.github/workflows/lf-production-deploy-original.yaml @@ -237,7 +237,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push-backend.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-discord-ws: @@ -293,7 +293,7 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push-integration-run-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-integration-stream-worker: @@ -313,7 +313,7 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push-integration-stream-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-integration-data-worker: @@ -333,7 +333,7 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push-integration-data-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-data-sink-worker: @@ -353,7 +353,7 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push-data-sink-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - lfx: true + cloud_env: lfx_prod prioritized: true deploy-frontend: diff --git a/.github/workflows/lf-staging-deploy-backend.yaml b/.github/workflows/lf-staging-deploy-backend.yaml index 05a4e5b953..d39f330fbc 100644 --- a/.github/workflows/lf-staging-deploy-backend.yaml +++ b/.github/workflows/lf-staging-deploy-backend.yaml @@ -76,7 +76,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true deploy-job-generator: diff --git a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml index da04a6aca1..6dabe492a4 100644 --- a/.github/workflows/lf-staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/lf-staging-deploy-data-sink-worker.yaml @@ -58,5 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml index 0f8c72ee7b..74325f88d6 100644 --- a/.github/workflows/lf-staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-data-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml index 265fb27a51..a18be6d287 100644 --- a/.github/workflows/lf-staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-run-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true diff --git a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml index 4388ddcf9f..1bc1cbb0fc 100644 --- a/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/lf-staging-deploy-integration-stream-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true diff --git a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml index a30b6861d0..391f0a01ab 100644 --- a/.github/workflows/lf-staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/lf-staging-deploy-search-sync-worker.yaml @@ -58,5 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: lfx_staging prioritized: true diff --git a/.github/workflows/production-deploy-new.yaml b/.github/workflows/production-deploy-new.yaml index 421b769bfc..9628bc5e1b 100644 --- a/.github/workflows/production-deploy-new.yaml +++ b/.github/workflows/production-deploy-new.yaml @@ -208,6 +208,7 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push-search-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-search-sync-api: @@ -245,6 +246,7 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push-integration-sync-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-webhook-api: diff --git a/.github/workflows/production-deploy-original.yaml b/.github/workflows/production-deploy-original.yaml index be5f892bf4..c681cc82ad 100644 --- a/.github/workflows/production-deploy-original.yaml +++ b/.github/workflows/production-deploy-original.yaml @@ -237,6 +237,7 @@ jobs: service: nodejs-worker image: ${{ needs.build-and-push-backend.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-discord-ws: @@ -292,6 +293,7 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push-integration-run-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-integration-stream-worker: @@ -311,6 +313,7 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push-integration-stream-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-integration-data-worker: @@ -330,6 +333,7 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push-integration-data-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-data-sink-worker: @@ -349,6 +353,7 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push-data-sink-worker.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} + cloud_env: prod prioritized: true deploy-frontend: diff --git a/.github/workflows/staging-deploy-backend.yaml b/.github/workflows/staging-deploy-backend.yaml index 3a40c176c9..a71dde5657 100644 --- a/.github/workflows/staging-deploy-backend.yaml +++ b/.github/workflows/staging-deploy-backend.yaml @@ -77,7 +77,7 @@ jobs: image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} prioritized: true - staging: true + cloud_env: staging deploy-job-generator: needs: build-and-push diff --git a/.github/workflows/staging-deploy-data-sink-worker.yaml b/.github/workflows/staging-deploy-data-sink-worker.yaml index f6cde19ac7..716aa7d184 100644 --- a/.github/workflows/staging-deploy-data-sink-worker.yaml +++ b/.github/workflows/staging-deploy-data-sink-worker.yaml @@ -58,5 +58,5 @@ jobs: service: data-sink-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true diff --git a/.github/workflows/staging-deploy-integration-data-worker.yaml b/.github/workflows/staging-deploy-integration-data-worker.yaml index d668ec3c2f..e5a5b5d586 100644 --- a/.github/workflows/staging-deploy-integration-data-worker.yaml +++ b/.github/workflows/staging-deploy-integration-data-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-data-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true diff --git a/.github/workflows/staging-deploy-integration-run-worker.yaml b/.github/workflows/staging-deploy-integration-run-worker.yaml index bdb55a68b0..b066cf936f 100644 --- a/.github/workflows/staging-deploy-integration-run-worker.yaml +++ b/.github/workflows/staging-deploy-integration-run-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-run-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true diff --git a/.github/workflows/staging-deploy-integration-stream-worker.yaml b/.github/workflows/staging-deploy-integration-stream-worker.yaml index a50fbda010..bb993f8f7f 100644 --- a/.github/workflows/staging-deploy-integration-stream-worker.yaml +++ b/.github/workflows/staging-deploy-integration-stream-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-stream-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true diff --git a/.github/workflows/staging-deploy-integration-sync-worker.yaml b/.github/workflows/staging-deploy-integration-sync-worker.yaml index 214e53b440..2085a4a796 100644 --- a/.github/workflows/staging-deploy-integration-sync-worker.yaml +++ b/.github/workflows/staging-deploy-integration-sync-worker.yaml @@ -58,5 +58,5 @@ jobs: service: integration-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true diff --git a/.github/workflows/staging-deploy-search-sync-worker.yaml b/.github/workflows/staging-deploy-search-sync-worker.yaml index eb10f4e83d..d74c4cbd36 100644 --- a/.github/workflows/staging-deploy-search-sync-worker.yaml +++ b/.github/workflows/staging-deploy-search-sync-worker.yaml @@ -58,5 +58,5 @@ jobs: service: search-sync-worker image: ${{ needs.build-and-push.outputs.image }} cluster: ${{ env.CROWD_CLUSTER }} - staging: true + cloud_env: staging prioritized: true From 32847a9438584454fcc23d586479e873a225c88d Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Fri, 15 Dec 2023 07:58:31 +0000 Subject: [PATCH 031/185] Fix member copy and banner validation (#1966) --- frontend/src/modules/member/components/member-enrichment.vue | 2 +- frontend/src/modules/tenant/store/getters.js | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/src/modules/member/components/member-enrichment.vue b/frontend/src/modules/member/components/member-enrichment.vue index 4eeb2fa879..f043c84260 100644 --- a/frontend/src/modules/member/components/member-enrichment.vue +++ b/frontend/src/modules/member/components/member-enrichment.vue @@ -37,7 +37,7 @@ class="btn btn--primary btn--full !h-8" :disabled="isEditLockedForSampleData" @click="onEnrichmentClick" - >Enrich member + >Enrich contact ( From c66ceeb3e3e2853d21fc02dfa09c1ac86b2b42a5 Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Fri, 15 Dec 2023 13:25:54 +0000 Subject: [PATCH 032/185] Fix identity display name in merge dialog (#1969) --- .../suggestions/organization-merge-suggestions-details.vue | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue b/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue index 2afb8af7ad..a59103262f 100644 --- a/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue +++ b/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue @@ -243,6 +243,7 @@ {{ getPlatformDetails(identity.platform)?.organization.handle(identity) + ?? identity.name ?? getPlatformDetails(identity.platform)?.name ?? identity.platform }}
From 0b4fa7dffd6472c12500ad909b465270b727adbb Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Fri, 15 Dec 2023 13:28:37 +0000 Subject: [PATCH 033/185] Fix pre-selected remote filters (#1972) --- .../modules/activity/config/filters/organizations/config.ts | 4 ++-- .../src/modules/member/config/filters/organizations/config.ts | 4 ++-- .../organization/config/filters/organizations/config.ts | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/src/modules/activity/config/filters/organizations/config.ts b/frontend/src/modules/activity/config/filters/organizations/config.ts index 6e60f1a9ea..bb48600beb 100644 --- a/frontend/src/modules/activity/config/filters/organizations/config.ts +++ b/frontend/src/modules/activity/config/filters/organizations/config.ts @@ -19,7 +19,7 @@ const organizations: MultiSelectAsyncFilterConfig = { value: organization.id, logo: organization.logo, }))), - remotePopulateItems: (ids: string[]) => OrganizationService.listAutocomplete({ + remotePopulateItems: (ids: string[]) => OrganizationService.query({ filter: { and: [ ...DEFAULT_ORGANIZATION_FILTERS, @@ -28,7 +28,7 @@ const organizations: MultiSelectAsyncFilterConfig = { }, ], }, - orderBy: null, + orderBy: 'displayName_ASC', limit: ids.length, offset: 0, }) diff --git a/frontend/src/modules/member/config/filters/organizations/config.ts b/frontend/src/modules/member/config/filters/organizations/config.ts index f1379061f1..45368963a4 100644 --- a/frontend/src/modules/member/config/filters/organizations/config.ts +++ b/frontend/src/modules/member/config/filters/organizations/config.ts @@ -19,7 +19,7 @@ const organizations: MultiSelectAsyncFilterConfig = { value: organization.id, logo: organization.logo, }))), - remotePopulateItems: (ids: string[]) => OrganizationService.listAutocomplete({ + remotePopulateItems: (ids: string[]) => OrganizationService.query({ filter: { and: [ ...DEFAULT_ORGANIZATION_FILTERS, @@ -28,7 +28,7 @@ const organizations: MultiSelectAsyncFilterConfig = { }, ], }, - orderBy: null, + orderBy: 'displayName_ASC', limit: ids.length, offset: 0, }) diff --git a/frontend/src/modules/organization/config/filters/organizations/config.ts b/frontend/src/modules/organization/config/filters/organizations/config.ts index aa59fa6b5e..0445f203ea 100644 --- a/frontend/src/modules/organization/config/filters/organizations/config.ts +++ b/frontend/src/modules/organization/config/filters/organizations/config.ts @@ -19,7 +19,7 @@ const organizations: MultiSelectAsyncFilterConfig = { value: organization.id, logo: organization.logo, }))), - remotePopulateItems: (ids: string[]) => OrganizationService.listAutocomplete({ + remotePopulateItems: (ids: string[]) => OrganizationService.query({ filter: { and: [ ...DEFAULT_ORGANIZATION_FILTERS, @@ -28,7 +28,7 @@ const organizations: MultiSelectAsyncFilterConfig = { }, ], }, - orderBy: null, + orderBy: 'displayName_ASC', limit: ids.length, offset: 0, }) From 8577e4d780060eb20ef3275dceb0a05df2064e1d Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Mon, 18 Dec 2023 13:51:51 +0000 Subject: [PATCH 034/185] Fix contacts query search in organization profile (#1975) --- .../view/organization-view-members.vue | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/frontend/src/modules/organization/components/view/organization-view-members.vue b/frontend/src/modules/organization/components/view/organization-view-members.vue index f5865195f5..d7e65ce8dc 100644 --- a/frontend/src/modules/organization/components/view/organization-view-members.vue +++ b/frontend/src/modules/organization/components/view/organization-view-members.vue @@ -1,5 +1,14 @@ @@ -444,17 +460,24 @@ @@ -482,17 +513,24 @@ @@ -525,17 +571,24 @@ @@ -626,15 +687,14 @@ +
+ + + + + + currentTenant.value?.plan !== Plans.values.essential); + const defaultSort = computed(() => ({ prop: 'lastActive', order: 'descending', @@ -826,6 +911,44 @@ const onActionBtnClick = (member) => { } }; +const setEnrichmentAttributesRef = (el, id) => { + if (el) { + enrichmentRefs.value[id] = el; + } +}; + +const handleCellMouseEnter = (row, column) => { + const validValues = ['reach', 'seniorityLevel', 'programmingLanguages', 'skills']; + + if (validValues.includes(column.property)) { + showEnrichmentPopover.value = true; + selectedEnrichmentAttribute.value = `${row.id}-${column.property}`; + } +}; + +const onColumnHeaderMouseOver = (id) => { + showEnrichmentPopover.value = true; + selectedEnrichmentAttribute.value = id; +}; + +const handleCellMouseLeave = (_row, column) => { + const validValues = ['reach', 'seniorityLevel', 'programmingLanguages', 'skills']; + + if (!validValues.includes(column.property)) { + closeEnrichmentPopover(); + } +}; + +const closeEnrichmentPopover = (ev) => { + if (ev?.toElement?.id !== 'popover-content') { + showEnrichmentPopover.value = false; + + setTimeout(() => { + selectedEnrichmentAttribute.value = null; + }, 100); + } +}; + const closeDropdown = () => { showMemberDropdownPopover.value = false; diff --git a/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue b/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue index 00ce64cdad..5ffd40fa43 100644 --- a/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue +++ b/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue @@ -6,7 +6,7 @@ Attributes -
-

- {{ attribute.label }} + + + + + + +

+
+

+ {{ attribute.label }} + + + +

- + -

- - - -
-
- +
- - + + + +
+

+ {{ + formattedComputedAttributeValue( + member.attributes[attribute.name].default, + ) + }} +

-

- {{ - formattedComputedAttributeValue( - member.attributes[attribute.name].default, - ) - }} -

+ +
+ +
+ currentTenant.value.plan !== Plans.values.essential); + const isEditLockedForSampleData = computed(() => new MemberPermissions( store.getters['auth/currentTenant'], store.getters['auth/currentUser'], ).editLockedForSampleData); -const computedCustomAttributes = computed(() => Object.values(customAttributes.value) - .filter((attribute) => ( - attribute.show - && ![ - 'bio', - 'url', - 'location', - 'emails', - 'jobTitle', - 'workExperiences', // we render them in _aside-work-experience - 'certifications', // we render them in _aside-work-certifications - 'education', // we render them in _aside-work-education - 'awards', // we render them in _aside-work-awards - ].includes(attribute.name) - && props.member.attributes[attribute.name] - )) - .sort((a, b) => { - if (props.member.attributes[a.name].enrich) { - return props.member.attributes[b.name].enrich - ? 0 - : -1; - } - return 1; - })); +const hiddenAttributes = ref([ + { + name: 'seniorityLevel', + label: 'Seniority level', + value: 'Senior', + show: true, + }, + { + name: 'programmingLanguages', + label: 'Programming languages', + value: 'Javascript, Java', + show: true, + }, + { + name: 'skills', + label: 'Skills', + show: true, + value: 'Web development', + }, + { + name: 'reach', + label: 'Reach', + show: false, + value: 150, + }, +]); + +const hiddenAttributeNames = computed(() => hiddenAttributes.value.map((att) => att.name)); + +const computedCustomAttributes = computed(() => { + const attributes = Object.values(customAttributes.value) + .filter((attribute) => ( + attribute.show + && ![ + 'bio', + 'url', + 'location', + 'emails', + 'jobTitle', + 'workExperiences', // we render them in _aside-work-experience + 'certifications', // we render them in _aside-work-certifications + 'education', // we render them in _aside-work-education + 'awards', // we render them in _aside-work-awards + ].includes(attribute.name) + && props.member.attributes[attribute.name] + )) + .sort((a, b) => { + if (props.member.attributes[a.name].enrich) { + return props.member.attributes[b.name].enrich + ? 0 + : -1; + } + return 1; + }); + const attributeNames = attributes.map((att) => att.name); + const staticAttributes = isEnrichmentEnabled.value + ? [] + : hiddenAttributes.value.filter((att) => att.show && !attributeNames.includes(att.name)); + return [ + ...staticAttributes, + ...attributes, + ]; +}); const attributesSameSource = computed(() => { const sources = computedCustomAttributes.value.map((attribute) => getAttributeSourceName(props.member.attributes[attribute.name])); diff --git a/frontend/src/modules/member/components/view/_aside/_aside-enriched.vue b/frontend/src/modules/member/components/view/_aside/_aside-enriched.vue index df58a687e5..5c902406a8 100644 --- a/frontend/src/modules/member/components/view/_aside/_aside-enriched.vue +++ b/frontend/src/modules/member/components/view/_aside/_aside-enriched.vue @@ -37,9 +37,8 @@ -
+
Certifications @@ -74,35 +73,38 @@ -
+
- Awards -
- - - -
- - - + + + +
+ + + +
diff --git a/frontend/src/modules/member/components/view/member-view-aside.vue b/frontend/src/modules/member/components/view/member-view-aside.vue index 290f9b9aa0..94bae2b226 100644 --- a/frontend/src/modules/member/components/view/member-view-aside.vue +++ b/frontend/src/modules/member/components/view/member-view-aside.vue @@ -18,7 +18,6 @@ />
diff --git a/frontend/src/modules/member/components/view/member-view-contributions-cta.vue b/frontend/src/modules/member/components/view/member-view-contributions-cta.vue new file mode 100644 index 0000000000..245e89e65b --- /dev/null +++ b/frontend/src/modules/member/components/view/member-view-contributions-cta.vue @@ -0,0 +1,59 @@ + + + + + diff --git a/frontend/src/modules/member/components/view/member-view-header.vue b/frontend/src/modules/member/components/view/member-view-header.vue index d41b0ba0e8..a0078e3e78 100644 --- a/frontend/src/modules/member/components/view/member-view-header.vue +++ b/frontend/src/modules/member/components/view/member-view-header.vue @@ -68,28 +68,39 @@

-
-

- Reach -

- - - - - - - -
- -

- -

+ + +

@@ -133,6 +144,7 @@ import AppMemberMergeDialog from '@/modules/member/components/member-merge-dialo import AppMemberFindGithubDrawer from '@/modules/member/components/member-find-github-drawer.vue'; import AppSvg from '@/shared/svg/svg.vue'; import { getAttributeSourceName } from '@/shared/helpers/attribute.helpers'; +import CrEnrichmentSneakPeak from '@/shared/modules/enrichment/components/enrichment-sneak-peak.vue'; defineProps({ member: { @@ -151,6 +163,7 @@ const formattedInformation = (value, type) => { value === undefined || value === null || value === -1 + || value === '' // If the timestamp is 1970, we show "-" || (type === 'date' && moment(value).isBefore(moment().subtract(40, 'years'))) ) { diff --git a/frontend/src/modules/member/pages/member-view-page.vue b/frontend/src/modules/member/pages/member-view-page.vue index f84f8d64f6..8d5fbfb417 100644 --- a/frontend/src/modules/member/pages/member-view-page.vue +++ b/frontend/src/modules/member/pages/member-view-page.vue @@ -20,8 +20,11 @@

+ @@ -75,9 +78,11 @@ import AppMemberViewHeader from '@/modules/member/components/view/member-view-he import AppMemberViewAside from '@/modules/member/components/view/member-view-aside.vue'; import AppMemberViewNotes from '@/modules/member/components/view/member-view-notes.vue'; import AppMemberViewContributions from '@/modules/member/components/view/member-view-contributions.vue'; +import AppMemberViewContributionsCta from '@/modules/member/components/view/member-view-contributions-cta.vue'; import AppMemberViewTasks from '@/modules/member/components/view/member-view-tasks.vue'; import { useMemberStore } from '@/modules/member/store/pinia'; import { storeToRefs } from 'pinia'; +import Plans from '@/security/plans'; const store = useStore(); const props = defineProps({ @@ -94,6 +99,7 @@ const { customAttributes } = storeToRefs(memberStore); const { getMemberCustomAttributes } = memberStore; const member = computed(() => store.getters['member/find'](props.id) || {}); +const isEnrichmentEnabled = computed(() => currentTenant.value.plan !== Plans.values.essential); const isTaskLocked = computed( () => new TaskPermissions( diff --git a/frontend/src/modules/organization/components/list/organization-list-table.vue b/frontend/src/modules/organization/components/list/organization-list-table.vue index 368c7772c7..456567477b 100644 --- a/frontend/src/modules/organization/components/list/organization-list-table.vue +++ b/frontend/src/modules/organization/components/list/organization-list-table.vue @@ -88,6 +88,8 @@ :row-class-name="rowClass" @sort-change="doChangeSort" @selection-change="selectedOrganizations = $event" + @cell-mouse-enter="handleCellMouseEnter" + @cell-mouse-leave="handleCellMouseLeave" > @@ -429,24 +443,31 @@ prop="size" > @@ -463,27 +489,35 @@ @@ -505,12 +544,17 @@ sortable > @@ -541,27 +593,35 @@ @@ -578,35 +643,49 @@ @@ -660,15 +739,14 @@
+
+ + + + + + @@ -702,6 +795,9 @@ import revenueRange from '@/modules/organization/config/enrichment/revenueRange' import AppTagList from '@/modules/tag/components/tag-list.vue'; import { ClickOutside as vClickOutside } from 'element-plus'; import AppSvg from '@/shared/svg/svg.vue'; +import CrEnrichmentSneakPeakContent from '@/shared/modules/enrichment/components/enrichment-sneak-peak-content.vue'; +import { mapGetters } from '@/shared/vuex/vuex.helpers'; +import Plans from '@/security/plans'; import AppOrganizationIdentities from '../organization-identities.vue'; import AppOrganizationListToolbar from './organization-list-toolbar.vue'; import AppOrganizationName from '../organization-name.vue'; @@ -744,10 +840,17 @@ const isTableHovered = ref(false); const isCursorDown = ref(false); const showOrganizationDropdownPopover = ref(false); -const OrganizationDropdownPopover = ref(null); const actionBtnRefs = ref({}); const selectedActionOrganization = ref(null); +const showEnrichmentPopover = ref(false); +const enrichmentRefs = ref({}); +const selectedEnrichmentAttribute = ref(null); + +const { currentTenant } = mapGetters('auth'); + +const isEnrichEnabled = computed(() => currentTenant.value?.plan !== Plans.values.essential); + const pagination = computed({ get() { return props.pagination; @@ -814,6 +917,44 @@ const onActionBtnClick = (organization) => { } }; +const setEnrichmentAttributesRef = (el, id) => { + if (el) { + enrichmentRefs.value[id] = el; + } +}; + +const handleCellMouseEnter = (row, column) => { + const validValues = ['industry', 'size', 'revenueRange', 'founded', 'employeeGrowthRate', 'tags']; + + if (validValues.includes(column.property)) { + showEnrichmentPopover.value = true; + selectedEnrichmentAttribute.value = `${row.id}-${column.property}`; + } +}; + +const onColumnHeaderMouseOver = (id) => { + showEnrichmentPopover.value = true; + selectedEnrichmentAttribute.value = id; +}; + +const handleCellMouseLeave = (_row, column) => { + const validValues = ['industry', 'size', 'revenueRange', 'founded', 'employeeGrowthRate', 'tags']; + + if (!validValues.includes(column.property)) { + closeEnrichmentPopover(); + } +}; + +const closeEnrichmentPopover = (ev) => { + if (ev?.toElement?.id !== 'popover-content') { + showEnrichmentPopover.value = false; + + setTimeout(() => { + selectedEnrichmentAttribute.value = null; + }, 100); + } +}; + const closeDropdown = () => { showOrganizationDropdownPopover.value = false; diff --git a/frontend/src/modules/organization/components/view/_aside/_aside-enriched.vue b/frontend/src/modules/organization/components/view/_aside/_aside-enriched.vue index 208214ee5d..d3cecb1763 100644 --- a/frontend/src/modules/organization/components/view/_aside/_aside-enriched.vue +++ b/frontend/src/modules/organization/components/view/_aside/_aside-enriched.vue @@ -3,25 +3,41 @@
-
+ +
+
+
+ {{ attribute.label }} +
+ +
+
+
+ {{ attribute.enrichmentSneakPeakValue }} +
+
+
+
+
{{ attribute.label }}
- +
-
+
@@ -55,6 +72,10 @@ import { computed, defineProps } from 'vue'; import enrichmentAttributes from '@/modules/organization/config/enrichment'; import { AttributeType } from '@/modules/organization/types/Attributes'; import AppSvg from '@/shared/svg/svg.vue'; +import CrEnrichmentSneakPeak from '@/shared/modules/enrichment/components/enrichment-sneak-peak.vue'; +import CrEnrichmentSneakPeakContent from '@/shared/modules/enrichment/components/enrichment-sneak-peak-content.vue'; +import { mapGetters } from '@/shared/vuex/vuex.helpers'; +import Plans from '@/security/plans'; const props = defineProps({ organization: { @@ -63,8 +84,16 @@ const props = defineProps({ }, }); +const { currentTenant } = mapGetters('auth'); +const isEnrichmentEnabled = computed(() => currentTenant.value.plan !== Plans.values.essential); + const visibleAttributes = computed(() => enrichmentAttributes - .filter((a) => ((props.organization[a.name] && a.type !== AttributeType.ARRAY && a.type !== AttributeType.JSON) - || (a.type === AttributeType.ARRAY && props.organization[a.name]?.length) - || (a.type === AttributeType.JSON && props.organization[a.name] && Object.keys(props.organization[a.name]))) && a.showInAttributes)); + .filter((a) => { + if (!isEnrichmentEnabled.value) { + return a.enrichmentSneakPeak && a.showInAttributes; + } + return ((props.organization[a.name] && a.type !== AttributeType.ARRAY && a.type !== AttributeType.JSON) + || (a.type === AttributeType.ARRAY && props.organization[a.name]?.length) + || (a.type === AttributeType.JSON && props.organization[a.name] && Object.keys(props.organization[a.name]))) && a.showInAttributes; + })); diff --git a/frontend/src/modules/organization/components/view/organization-view-aside.vue b/frontend/src/modules/organization/components/view/organization-view-aside.vue index dd027a5622..de1135400c 100644 --- a/frontend/src/modules/organization/components/view/organization-view-aside.vue +++ b/frontend/src/modules/organization/components/view/organization-view-aside.vue @@ -119,6 +119,8 @@ import { withHttp } from '@/utils/string'; import { AttributeType } from '@/modules/organization/types/Attributes'; import { CrowdIntegrations } from '@/integrations/integrations-config'; import AppPlatform from '@/shared/platform/platform.vue'; +import { mapGetters } from '@/shared/vuex/vuex.helpers'; +import Plans from '@/security/plans'; import AppOrganizationAsideEnriched from './_aside/_aside-enriched.vue'; const props = defineProps({ @@ -128,6 +130,8 @@ const props = defineProps({ }, }); +const { currentTenant } = mapGetters('auth'); + const showDivider = computed( () => (!!props.organization.emails?.length || !!props.organization.phoneNumbers?.length) @@ -150,13 +154,18 @@ const noIdentities = computed(() => ( || props.organization.phoneNumbers.length === 0) )); -const shouldShowAttributes = computed(() => enrichmentAttributes.some((a) => { - if (a.type === AttributeType.ARRAY) { - return !!props.organization[a.name]?.length; +const shouldShowAttributes = computed(() => { + if (currentTenant.value.plan === Plans.values.essential) { + return true; } + return enrichmentAttributes.some((a) => { + if (a.type === AttributeType.ARRAY) { + return !!props.organization[a.name]?.length; + } - return !!props.organization[a.name]; -})); + return !!props.organization[a.name]; + }); +}); const getPlatformDetails = (platform) => CrowdIntegrations.getConfig(platform); diff --git a/frontend/src/modules/organization/components/view/organization-view-header.vue b/frontend/src/modules/organization/components/view/organization-view-header.vue index ce0cfba027..43cc216864 100644 --- a/frontend/src/modules/organization/components/view/organization-view-header.vue +++ b/frontend/src/modules/organization/components/view/organization-view-header.vue @@ -113,25 +113,34 @@ }}

-
-
-

- Headcount -

- - - -
+ + +

Joined date @@ -145,23 +154,32 @@ }}

-
-
-

- Annual Revenue -

- - - -
-

- {{ - revenueRange.displayValue( - organization.revenueRange, - ) - }} -

-
+ + +

Last active @@ -198,6 +216,7 @@ import AppOrganizationDropdown from '@/modules/organization/components/organizat import AppOrganizationHeadline from '@/modules/organization/components/organization-headline..vue'; import AppOrganizationMergeDialog from '@/modules/organization/components/organization-merge-dialog.vue'; import AppSvg from '@/shared/svg/svg.vue'; +import CrEnrichmentSneakPeak from '@/shared/modules/enrichment/components/enrichment-sneak-peak.vue'; import revenueRange from '../../config/enrichment/revenueRange'; const props = defineProps({ diff --git a/frontend/src/modules/organization/config/enrichment/employeeGrowthRate.ts b/frontend/src/modules/organization/config/enrichment/employeeGrowthRate.ts index 73d18a49b1..33b0b5387f 100644 --- a/frontend/src/modules/organization/config/enrichment/employeeGrowthRate.ts +++ b/frontend/src/modules/organization/config/enrichment/employeeGrowthRate.ts @@ -10,6 +10,8 @@ const employeeGrowthRate: OrganizationEnrichmentConfig = { type: AttributeType.JSON, showInForm: true, showInAttributes: true, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: '10.25%', component: OrganizationAttributesJSONRenderer, valueParser: formatFloatToPercentage, keyParser: (key) => `${snakeToSentenceCase(key)}s`, diff --git a/frontend/src/modules/organization/config/enrichment/founded.ts b/frontend/src/modules/organization/config/enrichment/founded.ts index d946043579..ff1ce37e19 100644 --- a/frontend/src/modules/organization/config/enrichment/founded.ts +++ b/frontend/src/modules/organization/config/enrichment/founded.ts @@ -7,6 +7,8 @@ const founded: OrganizationEnrichmentConfig = { type: AttributeType.NUMBER, showInForm: true, showInAttributes: true, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: '2021', displayValue: (value) => value, }; diff --git a/frontend/src/modules/organization/config/enrichment/headcount.ts b/frontend/src/modules/organization/config/enrichment/headcount.ts index fc539882d1..231ba91ddd 100644 --- a/frontend/src/modules/organization/config/enrichment/headcount.ts +++ b/frontend/src/modules/organization/config/enrichment/headcount.ts @@ -8,6 +8,8 @@ const size: OrganizationEnrichmentConfig = { type: AttributeType.STRING, showInForm: true, showInAttributes: false, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: '11-50', displayValue: (value) => toSentenceCase(value), }; diff --git a/frontend/src/modules/organization/config/enrichment/index.ts b/frontend/src/modules/organization/config/enrichment/index.ts index be344d5ac3..e7225cc053 100644 --- a/frontend/src/modules/organization/config/enrichment/index.ts +++ b/frontend/src/modules/organization/config/enrichment/index.ts @@ -31,6 +31,8 @@ export interface OrganizationEnrichmentConfig { type: AttributeType; // Type of the attribute showInForm: boolean; // Display in Organization Form showInAttributes: boolean; // Display in Organization Profile + enrichmentSneakPeak?: boolean; // Display as a sneak peak attribute + enrichmentSneakPeakValue?: string; // Value to display in sneak peak isLink?: boolean; // If attribute is a url component?: any; // Component that will render attribute in organization profile displayValue?: (value: any) => string; // Formatter for displaying attribute value diff --git a/frontend/src/modules/organization/config/enrichment/industry.ts b/frontend/src/modules/organization/config/enrichment/industry.ts index f3d6895857..779a10e1b8 100644 --- a/frontend/src/modules/organization/config/enrichment/industry.ts +++ b/frontend/src/modules/organization/config/enrichment/industry.ts @@ -8,6 +8,8 @@ const industry: OrganizationEnrichmentConfig = { type: AttributeType.STRING, showInForm: true, showInAttributes: true, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: 'Software', displayValue: (value) => toSentenceCase(value), }; diff --git a/frontend/src/modules/organization/config/enrichment/revenueRange.ts b/frontend/src/modules/organization/config/enrichment/revenueRange.ts index b99077592d..4a7282978f 100644 --- a/frontend/src/modules/organization/config/enrichment/revenueRange.ts +++ b/frontend/src/modules/organization/config/enrichment/revenueRange.ts @@ -27,6 +27,8 @@ const revenueRange: OrganizationEnrichmentConfig = { type: AttributeType.STRING, showInForm: false, showInAttributes: false, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: '$1M-$10M', displayValue: (value) => { if (!Object.keys(value || {}).length) { return '-'; diff --git a/frontend/src/modules/organization/config/enrichment/tags.ts b/frontend/src/modules/organization/config/enrichment/tags.ts index 0e6d8f4da3..476a397781 100644 --- a/frontend/src/modules/organization/config/enrichment/tags.ts +++ b/frontend/src/modules/organization/config/enrichment/tags.ts @@ -8,6 +8,8 @@ const tags: OrganizationEnrichmentConfig = { type: AttributeType.ARRAY, showInForm: true, showInAttributes: true, + enrichmentSneakPeak: true, + enrichmentSneakPeakValue: 'Software', component: OrganizationAttributesArrayRenderer, }; diff --git a/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak-content.vue b/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak-content.vue new file mode 100644 index 0000000000..d260d2c092 --- /dev/null +++ b/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak-content.vue @@ -0,0 +1,65 @@ + + + + + + + diff --git a/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak.vue b/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak.vue new file mode 100644 index 0000000000..b6076f741a --- /dev/null +++ b/frontend/src/shared/modules/enrichment/components/enrichment-sneak-peak.vue @@ -0,0 +1,44 @@ + + + + + + + diff --git a/frontend/src/shared/modules/enrichment/constants/sneak-peak-popover.ts b/frontend/src/shared/modules/enrichment/constants/sneak-peak-popover.ts new file mode 100644 index 0000000000..12307befd8 --- /dev/null +++ b/frontend/src/shared/modules/enrichment/constants/sneak-peak-popover.ts @@ -0,0 +1,19 @@ +import { + EnrichSneakPeakPopoverType, + EnrichSneakPeakPopoverContent, +} from '@/shared/modules/enrichment/types/SneakPeakPopover'; + +export const popoverContent: Record = { + [EnrichSneakPeakPopoverType.CONTACT]: { + title: 'Contact enrichment', + body: 'Get more insights about this contact by enriching it with valuable ' + + 'details such as seniority level, OSS contributions, skills and much more.', + link: 'https://docs.crowd.dev/docs/guides/contacts/contact-enrichment', + }, + [EnrichSneakPeakPopoverType.ORGANIZATION]: { + title: 'Organization enrichment', + body: 'Get more insights about this organization by enriching it with valuable details such ' + + 'as headcount, industry, location, and more...', + link: 'https://docs.crowd.dev/docs/guides/organizations/organization-enrichment', + }, +}; diff --git a/frontend/src/shared/modules/enrichment/types/SneakPeakPopover.ts b/frontend/src/shared/modules/enrichment/types/SneakPeakPopover.ts new file mode 100644 index 0000000000..9a42e22ca2 --- /dev/null +++ b/frontend/src/shared/modules/enrichment/types/SneakPeakPopover.ts @@ -0,0 +1,9 @@ +export enum EnrichSneakPeakPopoverType { + CONTACT = 'contact', + ORGANIZATION = 'organization' +} +export interface EnrichSneakPeakPopoverContent { + title: string; + body: string; + link: string; +} diff --git a/scripts/cli b/scripts/cli index 69ae29fb68..457f1d50e6 100755 --- a/scripts/cli +++ b/scripts/cli @@ -524,6 +524,13 @@ do start exit; ;; + clean-start-backend) + declare -a INGORED_SERVICES=("frontend") + CLEAN_START=1 + DEV=1 + start + exit; + ;; start-backend) declare -a INGORED_SERVICES=("frontend") start From 55c819c7173d759c53bb98d78dc4f52fd1dd06a4 Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Thu, 21 Dec 2023 15:10:33 +0000 Subject: [PATCH 045/185] Add async feedback to organizations merge (#1970) --- .../nodejs/org-merge/orgMergeWorker.ts | 21 ++++- backend/src/services/organizationService.ts | 7 +- frontend/src/modules/auth/auth-socket.js | 90 +++++++++++++++++-- .../list/organization-list-toolbar.vue | 19 ++-- .../components/organization-merge-dialog.vue | 25 +++++- .../organization-selection-dropdown.vue | 1 - .../organization-merge-suggestions-page.vue | 27 +++++- .../organization/store/pinia/actions.ts | 9 ++ .../modules/organization/store/pinia/state.ts | 4 + 9 files changed, 179 insertions(+), 24 deletions(-) diff --git a/backend/src/serverless/microservices/nodejs/org-merge/orgMergeWorker.ts b/backend/src/serverless/microservices/nodejs/org-merge/orgMergeWorker.ts index 747927ddd2..7ea6f6c062 100644 --- a/backend/src/serverless/microservices/nodejs/org-merge/orgMergeWorker.ts +++ b/backend/src/serverless/microservices/nodejs/org-merge/orgMergeWorker.ts @@ -4,7 +4,15 @@ import { REDIS_CONFIG } from '../../../../conf' import getUserContext from '../../../../database/utils/getUserContext' import OrganizationService from '../../../../services/organizationService' -async function doNotifyFrontend({ log, success, tenantId, primaryOrgId, secondaryOrgId }) { +async function doNotifyFrontend({ + log, + success, + tenantId, + primaryOrgId, + secondaryOrgId, + original, + toMerge, +}) { const redis = await getRedisClient(REDIS_CONFIG, true) const apiPubSubEmitter = new RedisPubSubEmitter( 'api-pubsub', @@ -24,6 +32,8 @@ async function doNotifyFrontend({ log, success, tenantId, primaryOrgId, secondar tenantId, primaryOrgId, secondaryOrgId, + original, + toMerge, }), undefined, tenantId, @@ -42,8 +52,13 @@ async function orgMergeWorker( const organizationService = new OrganizationService(userContext) let success = true + let original + let toMerge try { - await organizationService.mergeSync(primaryOrgId, secondaryOrgId) + const response = await organizationService.mergeSync(primaryOrgId, secondaryOrgId) + + original = response.original + toMerge = response.toMerge } catch (err) { userContext.log.error(err, 'Error merging orgs') success = false @@ -56,6 +71,8 @@ async function orgMergeWorker( tenantId, primaryOrgId, secondaryOrgId, + original, + toMerge, }) } } diff --git a/backend/src/services/organizationService.ts b/backend/src/services/organizationService.ts index 86a5e9a2aa..7240ae21ca 100644 --- a/backend/src/services/organizationService.ts +++ b/backend/src/services/organizationService.ts @@ -206,7 +206,12 @@ export default class OrganizationService extends LoggerBase { ) this.options.log.info({ originalId, toMergeId }, 'Organizations merged!') - return { status: 200, mergedId: originalId } + return { + status: 200, + mergedId: originalId, + original: original.displayName, + toMerge: toMerge.displayName, + } } catch (err) { this.options.log.error(err, 'Error while merging organizations!', { originalId, diff --git a/frontend/src/modules/auth/auth-socket.js b/frontend/src/modules/auth/auth-socket.js index 2fd54e7553..7dabef5f52 100644 --- a/frontend/src/modules/auth/auth-socket.js +++ b/frontend/src/modules/auth/auth-socket.js @@ -1,5 +1,5 @@ import io from 'socket.io-client'; -import { computed } from 'vue'; +import { computed, h } from 'vue'; import pluralize from 'pluralize'; import config from '@/config'; import { store } from '@/store'; @@ -9,9 +9,20 @@ import { getEnrichmentMax, } from '@/modules/member/member-enrichment'; import { useMemberStore } from '@/modules/member/store/pinia'; +import { router } from '@/router'; +import { useOrganizationStore } from '@/modules/organization/store/pinia'; let socketIoClient; +const SocketEvents = { + connect: 'connect', + disconnect: 'disconnect', + integrationCompleted: 'integration-completed', + tenantPlanUpgraded: 'tenant-plan-upgraded', + bulkEnrichment: 'bulk-enrichment', + orgMerge: 'org-merge', +}; + export const connectSocket = (token) => { if (socketIoClient && socketIoClient.connected) { socketIoClient.disconnect(); @@ -36,15 +47,15 @@ export const connectSocket = (token) => { forceNew: true, }); - socketIoClient.on('connect', () => { + socketIoClient.on(SocketEvents.connect, () => { console.info('Socket connected'); }); - socketIoClient.on('disconnect', () => { + socketIoClient.on(SocketEvents.disconnect, () => { console.info('Socket disconnected'); }); - socketIoClient.on('integration-completed', (data) => { + socketIoClient.on(SocketEvents.integrationCompleted, (data) => { console.info('Integration onboarding done', data); store.dispatch( 'integration/doFind', @@ -53,7 +64,7 @@ export const connectSocket = (token) => { }); socketIoClient.on( - 'tenant-plan-upgraded', + SocketEvents.tenantPlanUpgraded, async (data) => { console.info( 'Tenant plan is upgraded. Force a hard refresh!', @@ -72,7 +83,7 @@ export const connectSocket = (token) => { }, ); - socketIoClient.on('bulk-enrichment', async (data) => { + socketIoClient.on(SocketEvents.bulkEnrichment, async (data) => { let parsed = data; if (typeof data === 'string') { parsed = JSON.parse(parsed); @@ -116,6 +127,73 @@ export const connectSocket = (token) => { } } }); + + socketIoClient.on(SocketEvents.orgMerge, (payload) => { + const { + success, + tenantId, + primaryOrgId, + secondaryOrgId, + original, + toMerge, + } = JSON.parse(payload); + + if (currentTenant.value.id !== tenantId) { + return; + } + + const { mergedOrganizations, removeMergedOrganizations } = useOrganizationStore(); + + const buttonElement = h( + 'el-button', + { + class: 'btn btn--xs btn--bordered !h-6 !w-fit', + onClick: () => { + router.push({ + name: 'organizationView', + params: { id: primaryOrgId }, + }); + Message.closeAll(); + }, + }, + 'View organization', + ); + + const messageElements = [buttonElement]; + + if (original && toMerge) { + const descriptionElement = h( + 'span', + { + innerHTML: `${toMerge} merged with ${original}.`, + }, + ); + + removeMergedOrganizations(primaryOrgId); + + messageElements.unshift(descriptionElement); + } + + Message.closeAll(); + + if (success) { + Message.success( + h( + 'div', + { + class: 'flex flex-col gap-2', + }, + messageElements, + ), + { + title: + 'Organizations merged successfully', + }, + ); + } else { + Message.error(`There was an error merging ${toMerge} with ${original}`); + } + }); }; export const disconnectSocket = () => { diff --git a/frontend/src/modules/organization/components/list/organization-list-toolbar.vue b/frontend/src/modules/organization/components/list/organization-list-toolbar.vue index 06ca2822fe..5196156b8a 100644 --- a/frontend/src/modules/organization/components/list/organization-list-toolbar.vue +++ b/frontend/src/modules/organization/components/list/organization-list-toolbar.vue @@ -111,6 +111,7 @@ const organizationStore = useOrganizationStore(); const { selectedOrganizations, filters, + mergedOrganizations, } = storeToRefs(organizationStore); const { fetchOrganizations } = organizationStore; @@ -175,17 +176,19 @@ const handleDoDestroyAllWithConfirm = () => ConfirmDialog({ const handleMergeOrganizations = async () => { const [firstOrganization, secondOrganization] = selectedOrganizations.value; - Message.info( - null, - { - title: 'Organizations are being merged', - }, - ); - OrganizationService.mergeOrganizations(firstOrganization.id, secondOrganization.id) .then(() => { Message.closeAll(); - Message.success('Organizations merged successfuly'); + + organizationStore + .addMergedOrganizations(firstOrganization.id, secondOrganization.id); + + const processesRunning = Object.keys(mergedOrganizations.value).length; + + Message.info(null, { + title: 'Organizations merging in progress', + message: processesRunning > 1 ? `${processesRunning} processes running` : null, + }); fetchOrganizations({ reload: true }); }) diff --git a/frontend/src/modules/organization/components/organization-merge-dialog.vue b/frontend/src/modules/organization/components/organization-merge-dialog.vue index e0b43bc0dc..594a6c224a 100644 --- a/frontend/src/modules/organization/components/organization-merge-dialog.vue +++ b/frontend/src/modules/organization/components/organization-merge-dialog.vue @@ -72,6 +72,7 @@ import AppOrganizationMergeSuggestionsDetails import { useOrganizationStore } from '@/modules/organization/store/pinia'; import { OrganizationService } from '@/modules/organization/organization-service'; import AppOrganizationSelectionDropdown from '@/modules/organization/components/organization-selection-dropdown.vue'; +import { storeToRefs } from 'pinia'; const props = defineProps({ modelValue: { @@ -85,7 +86,10 @@ const emit = defineEmits(['update:modelValue']); const route = useRoute(); const router = useRouter(); -const { fetchOrganizations, fetchOrganization } = useOrganizationStore(); +const organizationStore = useOrganizationStore(); +const { + mergedOrganizations, +} = storeToRefs(organizationStore); const originalOrganizationPrimary = ref(true); const sendingMerge = ref(false); @@ -119,14 +123,26 @@ const mergeSuggestion = () => { originalOrganizationPrimary.value ? organizationToMerge.value?.id : props.modelValue?.id, ) .then(() => { - Message.success('Organizations merged successfuly'); + const primaryOrganization = originalOrganizationPrimary.value ? props.modelValue : organizationToMerge.value; + const secondaryOrganization = originalOrganizationPrimary.value ? organizationToMerge.value : props.modelValue; + + organizationStore + .addMergedOrganizations(primaryOrganization.id, secondaryOrganization.id); + + const processesRunning = Object.keys(mergedOrganizations.value).length; + + Message.closeAll(); + Message.info(null, { + title: 'Organizations merging in progress', + message: processesRunning > 1 ? `${processesRunning} processes running` : null, + }); emit('update:modelValue', null); if (route.name === 'organizationView') { const { id } = originalOrganizationPrimary.value ? props.modelValue : organizationToMerge.value; - fetchOrganization(id).then(() => { + organizationStore.fetchOrganization(id).then(() => { router.replace({ params: { id, @@ -134,12 +150,13 @@ const mergeSuggestion = () => { }); }); } else if (route.name === 'organization') { - fetchOrganizations({ reload: true }); + organizationStore.fetchOrganizations({ reload: true }); } changeOrganization(); }) .catch(() => { + Message.closeAll(); Message.error('There was an error merging organizations'); }) .finally(() => { diff --git a/frontend/src/modules/organization/components/organization-selection-dropdown.vue b/frontend/src/modules/organization/components/organization-selection-dropdown.vue index 92f2c86627..2b149f67be 100644 --- a/frontend/src/modules/organization/components/organization-selection-dropdown.vue +++ b/frontend/src/modules/organization/components/organization-selection-dropdown.vue @@ -88,7 +88,6 @@ const fetchFn = async (query, limit) => { if (options.length !== filteredOptions.length) { filteredOptions.push({}); } - console.log(filteredOptions); return filteredOptions; }; diff --git a/frontend/src/modules/organization/pages/organization-merge-suggestions-page.vue b/frontend/src/modules/organization/pages/organization-merge-suggestions-page.vue index 0bfc3332ad..d48b2ff3d0 100644 --- a/frontend/src/modules/organization/pages/organization-merge-suggestions-page.vue +++ b/frontend/src/modules/organization/pages/organization-merge-suggestions-page.vue @@ -139,11 +139,18 @@ import Message from '@/shared/message/message'; import { mapGetters } from '@/shared/vuex/vuex.helpers'; import AppLoading from '@/shared/loading/loading-placeholder.vue'; import AppOrganizationMergeSuggestionsDetails from '@/modules/organization/components/suggestions/organization-merge-suggestions-details.vue'; +import { useOrganizationStore } from '@/modules/organization/store/pinia'; +import { storeToRefs } from 'pinia'; import { OrganizationService } from '../organization-service'; import { OrganizationPermissions } from '../organization-permissions'; const { currentTenant, currentUser } = mapGetters('auth'); +const organizationStore = useOrganizationStore(); +const { + mergedOrganizations, +} = storeToRefs(organizationStore); + const organizationsToMerge = ref([]); const primary = ref(0); const offset = ref(0); @@ -245,16 +252,32 @@ const mergeSuggestion = () => { return; } sendingMerge.value = true; + OrganizationService.mergeOrganizations( organizationsToMerge.value.organizations[primary.value].id, organizationsToMerge.value.organizations[(primary.value + 1) % 2].id, ) .then(() => { + const primaryOrganization = organizationsToMerge.value.organizations[primary.value]; + const secondaryOrganization = organizationsToMerge.value.organizations[(primary.value + 1) % 2].id; + + organizationStore + .addMergedOrganizations(primaryOrganization.id, secondaryOrganization.id); + primary.value = 0; - Message.success('Organizations merged successfuly'); + + const processesRunning = Object.keys(mergedOrganizations.value).length; + + Message.closeAll(); + Message.info(null, { + title: 'Organizations merging in progress', + message: processesRunning > 1 ? `${processesRunning} processes running...` : null, + }); + fetch(); }) - .catch(() => { + .catch((e) => { + Message.closeAll(); Message.error('There was an error merging organizations'); }) .finally(() => { diff --git a/frontend/src/modules/organization/store/pinia/actions.ts b/frontend/src/modules/organization/store/pinia/actions.ts index f3683562d4..daeda22ad1 100644 --- a/frontend/src/modules/organization/store/pinia/actions.ts +++ b/frontend/src/modules/organization/store/pinia/actions.ts @@ -20,6 +20,7 @@ export default { return Promise.reject(err); }); }, + fetchOrganization(this: OrganizationState, id: string): Promise { return OrganizationService.find(id) .then((organization: Organization) => { @@ -27,4 +28,12 @@ export default { return Promise.resolve(organization); }); }, + + addMergedOrganizations(this: OrganizationState, primaryId: string, secondaryId: string) { + this.mergedOrganizations[primaryId] = secondaryId; + }, + + removeMergedOrganizations(this: OrganizationState, primaryId: string) { + delete this.mergedOrganizations[primaryId]; + }, }; diff --git a/frontend/src/modules/organization/store/pinia/state.ts b/frontend/src/modules/organization/store/pinia/state.ts index be327c0a7b..09f14d52d7 100644 --- a/frontend/src/modules/organization/store/pinia/state.ts +++ b/frontend/src/modules/organization/store/pinia/state.ts @@ -9,6 +9,9 @@ export interface OrganizationState { organization: Organization | null; selectedOrganizations: Organization[]; totalOrganizations: number; + mergedOrganizations: { + [key: string]: string; + } } const state: OrganizationState = { @@ -20,6 +23,7 @@ const state: OrganizationState = { organization: null, selectedOrganizations: [], totalOrganizations: 0, + mergedOrganizations: {}, }; export default () => state; From 4f3cd0a709b3a0ac2bb4a76b1fb157e453d5237b Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Thu, 21 Dec 2023 15:31:01 +0000 Subject: [PATCH 046/185] Fix display of attributes in contacts (#1989) --- .../components/view/_aside/_aside-custom-attributes.vue | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue b/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue index 5ffd40fa43..1ebef3a741 100644 --- a/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue +++ b/frontend/src/modules/member/components/view/_aside/_aside-custom-attributes.vue @@ -51,18 +51,18 @@ class="attribute" > - + - diff --git a/frontend/src/modules/onboard/pages/onboard-plans-page.vue b/frontend/src/modules/onboard/pages/onboard-plans-page.vue new file mode 100644 index 0000000000..dfd3eafee2 --- /dev/null +++ b/frontend/src/modules/onboard/pages/onboard-plans-page.vue @@ -0,0 +1,40 @@ + + + + + diff --git a/frontend/src/modules/settings/components/plans-list.vue b/frontend/src/modules/settings/components/plans-list.vue new file mode 100644 index 0000000000..ea19fce109 --- /dev/null +++ b/frontend/src/modules/settings/components/plans-list.vue @@ -0,0 +1,229 @@ + + + + + + + diff --git a/frontend/src/modules/settings/pages/plans-page.vue b/frontend/src/modules/settings/pages/plans-page.vue index 5f090a7466..7b961902a2 100644 --- a/frontend/src/modules/settings/pages/plans-page.vue +++ b/frontend/src/modules/settings/pages/plans-page.vue @@ -5,9 +5,17 @@

Current plan -
- {{ activePlan }} -
+
+
+ {{ activePlan }} +
+ {{ + getTrialDate(currentTenant) + }} +
Active since {{ moment(currentTenant.createdAt).format('MMMM DD, YYYY') }}
-
-
- Yearly payment -
-
- Monthly payment -
-
-
-
- -
-
-
-
- -
- {{ plan.title }} -
- - {{ getBadge(plan.key).content }} -
- -
- {{ plan.description }} -
- -
- {{ !monthlyPayment ? plan.price : (plan.priceMonthly ?? plan.price) }} - -
- - - {{ plan.ctaLabel[activePlan] }} - -
- -
-
    -
  • - - - {{ value }} - -
    -
    - -
    - -
    -
    -
    -
    -
  • -
-
-
-
-
-
+
store.getters['auth/currentTenant'], ); -const plansList = computed(() => { - if (isCommunityVersion) { - return plans.community; - } - - return plans.crowdHosted; -}); - const activePlan = computed(() => { // Community Versions if (isCommunityVersion) { @@ -232,48 +117,9 @@ onMounted(() => { doRefreshCurrentUser({}); }); -const getBadge = (plan) => { - if (plan === crowdHostedPlans.essential) { - return null; - } - if (plan === crowdHostedPlans.scale && [crowdHostedPlans.essential, crowdHostedPlans.eagleEye].includes(activePlan.value)) { - // Recommended plan - return { - class: 'text-brand-600 bg-brand-50', - content: 'Recommended', - }; - } if (plan === activePlan.value) { - // Active plans - return { - class: 'text-white bg-brand-500', - content: 'Current plan', - }; - } - - return null; -}; - const onManageBillingClick = () => { window.open(config.stripe.customerPortalLink, '_blank'); }; - -const displayCalDialog = () => { - isCalDialogOpen.value = true; -}; - -const handleOnCtaClick = ({ key, ctaAction }) => { - // Send an event with plan request - window.analytics.track('Change Plan Request', { - tenantId: currentTenant.value.id, - tenantName: currentTenant.value.name, - requestedPlan: key, - }); - - ctaAction[activePlan.value]({ - displayCalDialog, - monthlyPayment: monthlyPayment.value, - }); -}; + + + + diff --git a/frontend/src/modules/member/components/member-dropdown-content.vue b/frontend/src/modules/member/components/member-dropdown-content.vue index 93f564a872..18132c6d15 100644 --- a/frontend/src/modules/member/components/member-dropdown-content.vue +++ b/frontend/src/modules/member/components/member-dropdown-content.vue @@ -1,5 +1,6 @@ - - - diff --git a/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue b/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue index a59103262f..b23664e208 100644 --- a/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue +++ b/frontend/src/modules/organization/components/suggestions/organization-merge-suggestions-details.vue @@ -229,64 +229,12 @@
@@ -303,8 +251,8 @@ import { mapGetters } from '@/shared/vuex/vuex.helpers'; import { withHttp } from '@/utils/string'; import { formatDateToTimeAgo } from '@/utils/date'; import revenueRange from '@/modules/organization/config/enrichment/revenueRange'; -import AppPlatform from '@/shared/platform/platform.vue'; -import { CrowdIntegrations } from '@/integrations/integrations-config'; +import AppIdentitiesVerticalListOrganizations from '@/shared/modules/identities/components/identities-vertical-list-organizations.vue'; +import organizationOrder from '@/shared/modules/identities/config/identitiesOrder/organization'; const props = defineProps({ organization: { @@ -346,15 +294,6 @@ const bio = ref(null); const displayShowMore = ref(null); const more = ref(null); -const getPlatformDetails = (platform) => CrowdIntegrations.getConfig(platform); - -const getIdentityLink = (identity) => { - if (identity.url) { - return withHttp(identity.url); - } - return null; -}; - onMounted(() => { setTimeout(() => { if (!bio.value) { diff --git a/frontend/src/modules/organization/components/view/_aside/_aside-identities-extra.vue b/frontend/src/modules/organization/components/view/_aside/_aside-identities-extra.vue new file mode 100644 index 0000000000..9bcd2350e5 --- /dev/null +++ b/frontend/src/modules/organization/components/view/_aside/_aside-identities-extra.vue @@ -0,0 +1,155 @@ + + + diff --git a/frontend/src/modules/organization/components/view/_aside/_aside-identities.vue b/frontend/src/modules/organization/components/view/_aside/_aside-identities.vue new file mode 100644 index 0000000000..1554c9adef --- /dev/null +++ b/frontend/src/modules/organization/components/view/_aside/_aside-identities.vue @@ -0,0 +1,63 @@ + + + diff --git a/frontend/src/modules/organization/components/view/organization-view-aside.vue b/frontend/src/modules/organization/components/view/organization-view-aside.vue index de1135400c..7704853e4b 100644 --- a/frontend/src/modules/organization/components/view/organization-view-aside.vue +++ b/frontend/src/modules/organization/components/view/organization-view-aside.vue @@ -1,104 +1,14 @@ + + diff --git a/frontend/src/shared/modules/identities/components/identities-horizontal-list-organizations.vue b/frontend/src/shared/modules/identities/components/identities-horizontal-list-organizations.vue new file mode 100644 index 0000000000..61684cd604 --- /dev/null +++ b/frontend/src/shared/modules/identities/components/identities-horizontal-list-organizations.vue @@ -0,0 +1,32 @@ + + + + + diff --git a/frontend/src/shared/modules/identities/components/identities-horizontal-list.vue b/frontend/src/shared/modules/identities/components/identities-horizontal-list.vue new file mode 100644 index 0000000000..b94d1c6d3b --- /dev/null +++ b/frontend/src/shared/modules/identities/components/identities-horizontal-list.vue @@ -0,0 +1,112 @@ + + + + + diff --git a/frontend/src/shared/modules/identities/components/identities-vertical-list-members.vue b/frontend/src/shared/modules/identities/components/identities-vertical-list-members.vue new file mode 100644 index 0000000000..f9d401eea4 --- /dev/null +++ b/frontend/src/shared/modules/identities/components/identities-vertical-list-members.vue @@ -0,0 +1,39 @@ + + + + + diff --git a/frontend/src/shared/modules/identities/components/identities-vertical-list-organizations.vue b/frontend/src/shared/modules/identities/components/identities-vertical-list-organizations.vue new file mode 100644 index 0000000000..c8df99e6a4 --- /dev/null +++ b/frontend/src/shared/modules/identities/components/identities-vertical-list-organizations.vue @@ -0,0 +1,41 @@ + + + + + diff --git a/frontend/src/shared/modules/identities/components/identities-vertical-list.vue b/frontend/src/shared/modules/identities/components/identities-vertical-list.vue new file mode 100644 index 0000000000..714a756723 --- /dev/null +++ b/frontend/src/shared/modules/identities/components/identities-vertical-list.vue @@ -0,0 +1,116 @@ + + + + + diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/member/index.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/member/index.ts new file mode 100644 index 0000000000..f02d97fd77 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/member/index.ts @@ -0,0 +1,9 @@ +import list from './list'; +import profile from './profile'; +import suggestions from './suggestions'; + +export default { + list, + profile, + suggestions, +}; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/member/list.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/member/list.ts new file mode 100644 index 0000000000..83795b6d91 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/member/list.ts @@ -0,0 +1,18 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.DISCORD, + Platform.HACKER_NEWS, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.SLACK, + Platform.DEVTO, + Platform.REDDIT, + Platform.STACK_OVERFLOW, + Platform.DISCOURSE, + Platform.HUBSPOT, + Platform.GIT, + Platform.GROUPS_IO, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/member/profile.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/member/profile.ts new file mode 100644 index 0000000000..83795b6d91 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/member/profile.ts @@ -0,0 +1,18 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.DISCORD, + Platform.HACKER_NEWS, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.SLACK, + Platform.DEVTO, + Platform.REDDIT, + Platform.STACK_OVERFLOW, + Platform.DISCOURSE, + Platform.HUBSPOT, + Platform.GIT, + Platform.GROUPS_IO, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/member/suggestions.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/member/suggestions.ts new file mode 100644 index 0000000000..83795b6d91 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/member/suggestions.ts @@ -0,0 +1,18 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.DISCORD, + Platform.HACKER_NEWS, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.SLACK, + Platform.DEVTO, + Platform.REDDIT, + Platform.STACK_OVERFLOW, + Platform.DISCOURSE, + Platform.HUBSPOT, + Platform.GIT, + Platform.GROUPS_IO, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/organization/index.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/index.ts new file mode 100644 index 0000000000..f02d97fd77 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/index.ts @@ -0,0 +1,9 @@ +import list from './list'; +import profile from './profile'; +import suggestions from './suggestions'; + +export default { + list, + profile, + suggestions, +}; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/organization/list.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/list.ts new file mode 100644 index 0000000000..1aceb5547e --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/list.ts @@ -0,0 +1,10 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.CRUNCHBASE, + Platform.HUBSPOT, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/organization/profile.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/profile.ts new file mode 100644 index 0000000000..1aceb5547e --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/profile.ts @@ -0,0 +1,10 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.CRUNCHBASE, + Platform.HUBSPOT, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/identitiesOrder/organization/suggestions.ts b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/suggestions.ts new file mode 100644 index 0000000000..1aceb5547e --- /dev/null +++ b/frontend/src/shared/modules/identities/config/identitiesOrder/organization/suggestions.ts @@ -0,0 +1,10 @@ +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default [ + Platform.GITHUB, + Platform.LINKEDIN, + Platform.TWITTER, + Platform.CRUNCHBASE, + Platform.HUBSPOT, + Platform.CUSTOM, +]; diff --git a/frontend/src/shared/modules/identities/config/useMemberIdentities.ts b/frontend/src/shared/modules/identities/config/useMemberIdentities.ts new file mode 100644 index 0000000000..decadaee87 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/useMemberIdentities.ts @@ -0,0 +1,136 @@ +import { CrowdIntegrations } from '@/integrations/integrations-config'; +import { Member } from '@/modules/member/types/Member'; +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default ({ + member, + order, +}: { + member: Partial; + order: Platform[]; +}) => { + const { username = {}, attributes = {}, emails = [] } = member || {}; + + const getIdentityHandles = (platform: string) => { + if (platform === Platform.CUSTOM) { + const customPlatforms = Object.keys(username).filter( + (p) => (!order.includes(p) || p === Platform.CUSTOM) + && p !== Platform.EMAIL + && p !== Platform.EMAILS, + ); + + return customPlatforms.flatMap((p) => username[p].map((u) => ({ + platform: p, + url: null, + name: u, + }))); + } + + return username[platform] + ? username[platform].map((u) => ({ + platform, + url: null, + name: u, + })) + : []; + }; + + const getIdentityLink = (identity: { + platform: string; + url: string; + name: string; + }, platform: string) => { + if (!CrowdIntegrations.getConfig(platform)?.showProfileLink) { + return null; + } + + return ( + identity.url + ?? CrowdIntegrations.getConfig(platform)?.url({ + username: identity.name, + attributes, + }) + ?? attributes?.url?.[platform] + ); + }; + + const getIdentities = (): { + [key: string]: { + handle: string; + link: string; + }[]; + } => order.reduce((acc, platform) => { + const handles = getIdentityHandles(platform); + + if (platform === Platform.CUSTOM && handles.length) { + const sortedCustomIdentities = handles.sort((a, b) => { + const platformComparison = a.platform.localeCompare(b.platform); + + if (platformComparison === 0) { + // If platforms are equal, sort by name + return a.name.localeCompare(b.name); + } + + return platformComparison; // Otherwise, sort by platform + }); + + sortedCustomIdentities.forEach((identity) => { + if (acc[identity.platform]?.length) { + acc[identity.platform].push({ + handle: identity.name, + link: getIdentityLink(identity, platform), + }); + } else { + acc[identity.platform] = [ + { + handle: identity.name, + link: getIdentityLink(identity, platform), + }, + ]; + } + }); + } else { + const platformHandlesValues = handles.map((identity) => ({ + handle: identity.name, + link: getIdentityLink(identity, platform), + })); + + if (platformHandlesValues.length) { + acc[platform] = platformHandlesValues; + } + } + + return acc; + }, {}); + + const getEmails = (): { + handle: string; + link: string; + }[] => { + const rootEmails = (emails || []).map((e) => ({ + link: `mailto:${e}`, + handle: e, + })); + + const usernameEmail = username.email + ? username.email.map((u) => ({ + link: null, + handle: u, + })) + : []; + + const usernameEmails = username.emails + ? username.emails.map((u) => ({ + link: `mailto:${e}`, + handle: u, + })) + : []; + + return [...rootEmails, ...usernameEmail, ...usernameEmails]; + }; + + return { + getIdentities, + getEmails, + }; +}; diff --git a/frontend/src/shared/modules/identities/config/useOrganizationIdentities.ts b/frontend/src/shared/modules/identities/config/useOrganizationIdentities.ts new file mode 100644 index 0000000000..1f25cff373 --- /dev/null +++ b/frontend/src/shared/modules/identities/config/useOrganizationIdentities.ts @@ -0,0 +1,132 @@ +import { CrowdIntegrations } from '@/integrations/integrations-config'; +import { withHttp } from '@/utils/string'; +import { Organization } from '@/modules/organization/types/Organization'; +import { Platform } from '@/shared/modules/platform/types/Platform'; + +export default ({ + organization, + order, +}: { + organization: Partial; + order: Platform[]; +}) => { + const { + identities = [], + emails = [], + phoneNumbers = [], + } = organization || {}; + + const getIdentityHandles = (platform: string) => { + const parsedIdentities = identities?.length ? identities : []; + + if (platform === Platform.CUSTOM) { + const customPlatforms = parsedIdentities.filter( + (i) => (!order.includes(i.platform) || i.platform === Platform.CUSTOM) + && i.platform !== Platform.EMAIL + && i.platform !== Platform.EMAILS, + ); + + return customPlatforms; + } + + return parsedIdentities.filter((i) => i.platform === platform) || []; + }; + + const getIdentities = (): { + [key: string]: { + handle: string; + link: string; + }[]; + } => order.reduce((acc, p) => { + const handles = getIdentityHandles(p); + + if ( + (p === Platform.CUSTOM || p === Platform.PHONE_NUMBERS) + && handles.length + ) { + const sortedCustomIdentities = handles.sort((a, b) => { + const platformComparison = a.platform.localeCompare(b.platform); + + if (platformComparison === 0) { + // If platforms are equal, sort by name + return a.name.localeCompare(b.name); + } + + return platformComparison; // Otherwise, sort by platform + }); + + sortedCustomIdentities.forEach((i) => { + if (acc[i.platform]?.length) { + acc[i.platform].push({ + handle: i.name, + link: i.url ? withHttp(i.url) : null, + }); + } else { + acc[i.platform] = [ + { + handle: i.name, + link: i.url ? withHttp(i.url) : null, + }, + ]; + } + }); + } else { + const handlesValues = handles.map((i) => ({ + handle: + CrowdIntegrations.getConfig(i.platform)?.organization?.handle(i) + ?? i.name + ?? CrowdIntegrations.getConfig(i.platform)?.name + ?? i.platform, + link: i.url ? withHttp(i.url) : null, + })); + + if (handlesValues.length) { + acc[p] = handlesValues; + } + } + + return acc; + }, {}); + + const getEmails = (): { + handle: string; + link: string; + }[] => { + const parsedIdentities = identities?.length ? identities : []; + + const rootEmails = (emails || []).map((e) => ({ + link: `mailto:${e}`, + handle: e, + })); + + const identitiesEmails = parsedIdentities + .filter((i) => i.platform === 'emails') + .map((i) => ({ + link: i.url ? `mailto:${i.url}` : null, + handle: i.name, + })); + + const identitiesEmail = parsedIdentities + .filter((i) => i.platform === 'email') + .map((i) => ({ + link: i.url ? `mailto:${i.url}` : null, + handle: i.name, + })); + + return [...rootEmails, ...identitiesEmails, ...identitiesEmail]; + }; + + const getPhoneNumbers = (): { + handle: string; + link: string; + }[] => (phoneNumbers || []).map((p) => ({ + link: `tel:${p}`, + handle: p, + })); + + return { + getIdentities, + getEmails, + getPhoneNumbers, + }; +}; diff --git a/frontend/src/shared/modules/platform/components/platform-icon.vue b/frontend/src/shared/modules/platform/components/platform-icon.vue new file mode 100644 index 0000000000..d46d93ff03 --- /dev/null +++ b/frontend/src/shared/modules/platform/components/platform-icon.vue @@ -0,0 +1,38 @@ + + + + + diff --git a/frontend/src/shared/modules/platform/components/platform-img.vue b/frontend/src/shared/modules/platform/components/platform-img.vue new file mode 100644 index 0000000000..3da8906d77 --- /dev/null +++ b/frontend/src/shared/modules/platform/components/platform-img.vue @@ -0,0 +1,33 @@ + + + + + diff --git a/frontend/src/shared/modules/platform/components/platform-svg.vue b/frontend/src/shared/modules/platform/components/platform-svg.vue new file mode 100644 index 0000000000..cd57246bad --- /dev/null +++ b/frontend/src/shared/modules/platform/components/platform-svg.vue @@ -0,0 +1,31 @@ + + + + + diff --git a/frontend/src/shared/modules/platform/components/platform.vue b/frontend/src/shared/modules/platform/components/platform.vue new file mode 100644 index 0000000000..a656b69a9b --- /dev/null +++ b/frontend/src/shared/modules/platform/components/platform.vue @@ -0,0 +1,87 @@ + + + + + + + diff --git a/frontend/src/shared/modules/platform/types/Platform.ts b/frontend/src/shared/modules/platform/types/Platform.ts new file mode 100644 index 0000000000..e4469d4012 --- /dev/null +++ b/frontend/src/shared/modules/platform/types/Platform.ts @@ -0,0 +1,20 @@ +export enum Platform { + GITHUB = 'github', + DISCORD = 'discord', + HACKER_NEWS = 'hackernews', + LINKEDIN = 'linkedin', + TWITTER = 'twitter', + SLACK = 'slack', + DEVTO = 'devto', + REDDIT = 'reddit', + STACK_OVERFLOW = 'stackOverflow', + DISCOURSE = 'discourse', + HUBSPOT = 'hubspot', + GIT = 'git', + GROUPS_IO = 'groupsio', + CUSTOM = 'custom', + EMAIL = 'email', + EMAILS = 'emails', + PHONE_NUMBERS = 'phoneNumbers', + CRUNCHBASE = 'crunchbase', +} diff --git a/frontend/src/shared/platform/platform-list.vue b/frontend/src/shared/platform/platform-list.vue deleted file mode 100644 index 7a60e0204b..0000000000 --- a/frontend/src/shared/platform/platform-list.vue +++ /dev/null @@ -1,87 +0,0 @@ - - - - - diff --git a/frontend/src/shared/platform/platform-popover.vue b/frontend/src/shared/platform/platform-popover.vue deleted file mode 100644 index 3948a90bc3..0000000000 --- a/frontend/src/shared/platform/platform-popover.vue +++ /dev/null @@ -1,82 +0,0 @@ - - - - - - - diff --git a/frontend/src/shared/platform/platform-svg-icon.vue b/frontend/src/shared/platform/platform-svg-icon.vue deleted file mode 100644 index d61d0e911d..0000000000 --- a/frontend/src/shared/platform/platform-svg-icon.vue +++ /dev/null @@ -1,87 +0,0 @@ - - - - - diff --git a/frontend/src/shared/platform/platform.vue b/frontend/src/shared/platform/platform.vue deleted file mode 100644 index 50d96391a4..0000000000 --- a/frontend/src/shared/platform/platform.vue +++ /dev/null @@ -1,167 +0,0 @@ - - - - - - - diff --git a/frontend/src/shared/shared-module.js b/frontend/src/shared/shared-module.js index efba4575dd..6e3ca84ff1 100644 --- a/frontend/src/shared/shared-module.js +++ b/frontend/src/shared/shared-module.js @@ -14,7 +14,6 @@ import InlineSelectInput from '@/shared/form/inline-select-input.vue'; import Dialog from '@/shared/dialog/dialog.vue'; import EmptyStateCta from '@/shared/empty-state/empty-state-cta.vue'; import EmptyState from '@/shared/empty-state/empty-state.vue'; -import Platform from '@/shared/platform/platform.vue'; import Drawer from '@/shared/drawer/drawer.vue'; import AppLoader from '@/shared/loading/loader.vue'; import AppPageWrapper from '@/shared/layout/page-wrapper.vue'; @@ -41,7 +40,6 @@ export default { 'app-dialog': Dialog, 'app-empty-state-cta': EmptyStateCta, 'app-empty-state': EmptyState, - 'app-platform': Platform, 'app-drawer': Drawer, 'app-loader': AppLoader, 'app-page-wrapper': AppPageWrapper, From d28b124a1807a95a8169ae590c15de3bc8a0f8d9 Mon Sep 17 00:00:00 2001 From: joanagmaia Date: Fri, 5 Jan 2024 13:36:02 +0000 Subject: [PATCH 072/185] Fix empty emails and phone numbers (#2028) --- .../components/list/member-list-table.vue | 10 +++---- .../components/identities-vertical-list.vue | 2 +- .../identities/config/useMemberIdentities.ts | 30 +++++++++++-------- .../config/useOrganizationIdentities.ts | 10 ++++--- 4 files changed, 30 insertions(+), 22 deletions(-) diff --git a/frontend/src/modules/member/components/list/member-list-table.vue b/frontend/src/modules/member/components/list/member-list-table.vue index 89408f3e4a..360639235e 100644 --- a/frontend/src/modules/member/components/list/member-list-table.vue +++ b/frontend/src/modules/member/components/list/member-list-table.vue @@ -199,11 +199,11 @@ class="block" >
+{{ scope.row.emails.length - 3 }} + >+{{ scope.row.emails.filter((e) => !!e).length - 3 }}
-
+
-
+