From d857ad60145bb8f090d0d191650f151b2e17eb11 Mon Sep 17 00:00:00 2001 From: Henry Fontanier Date: Tue, 14 Jan 2025 13:58:46 +0100 Subject: [PATCH 1/9] feat: add link to maintained document in tracker email (#9944) Co-authored-by: Henry Fontanier --- front/lib/api/tracker.ts | 104 +++++++++++++++------- front/lib/models/doc_tracker.ts | 8 +- front/lib/resources/tracker_resource.ts | 16 ++++ front/scripts/send_tracker_generations.ts | 101 +++++++++++++++++++++ types/src/front/tracker.ts | 16 ++-- 5 files changed, 205 insertions(+), 40 deletions(-) create mode 100644 front/scripts/send_tracker_generations.ts diff --git a/front/lib/api/tracker.ts b/front/lib/api/tracker.ts index 38627bf4f6c6..bddcb86984f9 100644 --- a/front/lib/api/tracker.ts +++ b/front/lib/api/tracker.ts @@ -1,5 +1,5 @@ import type { TrackerGenerationToProcess } from "@dust-tt/types"; -import { concurrentExecutor, CoreAPI } from "@dust-tt/types"; +import { concurrentExecutor, CoreAPI, removeNulls } from "@dust-tt/types"; import _ from "lodash"; import config from "@app/lib/api/config"; @@ -84,8 +84,8 @@ const sendTrackerEmail = async ({ const sendEmail = generations.length > 0 - ? _sendTrackerWithGenerationEmail - : _sendTrackerDefaultEmail; + ? sendTrackerWithGenerationEmail + : sendTrackerDefaultEmail; await Promise.all( Array.from(recipients).map((recipient) => @@ -94,7 +94,7 @@ const sendTrackerEmail = async ({ ); }; -const _sendTrackerDefaultEmail = async ({ +const sendTrackerDefaultEmail = async ({ name, recipient, }: { @@ -115,7 +115,7 @@ const _sendTrackerDefaultEmail = async ({ }); }; -const _sendTrackerWithGenerationEmail = async ({ +export const sendTrackerWithGenerationEmail = async ({ name, recipient, generations, @@ -127,15 +127,32 @@ const _sendTrackerWithGenerationEmail = async ({ localLogger: Logger; }): Promise => { const coreAPI = new CoreAPI(config.getCoreAPIConfig(), localLogger); - const generationsByDataSources = _.groupBy(generations, "dataSource.id"); - const documentsById = new Map(); + const dataSourceById = _.keyBy( + removeNulls( + generations.map((g) => [g.dataSource, g.maintainedDataSource]).flat() + ), + "id" + ); + const docsToFetchByDataSourceId = _.mapValues( + _.groupBy( + generations.map((g) => ({ + dataSourceId: g.dataSource.id, + documentIds: removeNulls([g.documentId, g.maintainedDocumentId]), + })), + "dataSourceId" + ), + (docs) => docs.map((d) => d.documentIds).flat() + ); + const documentsByIdentifier = new Map< + string, + { name: string; url: string | null } + >(); // Fetch documents for each data source in parallel. await concurrentExecutor( - Object.entries(generationsByDataSources), - async ([, generations]) => { - const dataSource = generations[0].dataSource; - const documentIds = [...new Set(generations.map((g) => g.documentId))]; + Object.entries(docsToFetchByDataSourceId), + async ([dataSourceId, documentIds]) => { + const dataSource = dataSourceById[dataSourceId]; const docsResult = await coreAPI.getDataSourceDocuments({ projectId: dataSource.dustAPIProjectId, @@ -156,7 +173,7 @@ const _sendTrackerWithGenerationEmail = async ({ } docsResult.value.documents.forEach((doc) => { - documentsById.set(doc.document_id, { + documentsByIdentifier.set(`${dataSource.id}__${doc.document_id}`, { name: doc.title ?? "Unknown document", url: doc.source_url ?? null, }); @@ -165,31 +182,56 @@ const _sendTrackerWithGenerationEmail = async ({ { concurrency: 5 } ); - const generationBody = generations.map((generation) => { - const doc = documentsById.get(generation.documentId) ?? { - name: "Unknown document", - url: null, - }; - - const title = doc.url - ? `${doc.name}` - : `[${doc.name}]`; - - return [ - `Changes in document ${title} from ${generation.dataSource.name}:`, - generation.thinking && ``, - `

${generation.content}.

`, - ] - .filter(Boolean) - .join(""); - }); + const generationBody = await Promise.all( + generations.map((g) => { + const doc = documentsByIdentifier.get( + `${g.dataSource.id}__${g.documentId}` + ) ?? { + name: "Unknown document", + url: null, + }; + const maintainedDoc = g.maintainedDataSource + ? documentsByIdentifier.get( + `${g.maintainedDataSource.id}__${g.maintainedDocumentId}` + ) ?? null + : null; + + const title = doc.url + ? `${doc.name}` + : `[${doc.name}]`; + + let maintainedTitle: string | null = null; + if (maintainedDoc) { + maintainedTitle = maintainedDoc.url + ? `${maintainedDoc.name}` + : `[${maintainedDoc.name}]`; + } + + let body = `Changes in document ${title} from ${g.dataSource.name}`; + if (maintainedTitle && g.maintainedDataSource) { + body += ` might affect ${maintainedTitle} from ${g.maintainedDataSource.name}`; + } + body += `:`; + + if (g.thinking) { + body += ` +
+ View thinking +

${g.thinking.replace(/\n/g, "
")}

+
`; + } + + body += `

${g.content.replace(/\n/g, "
")}.

`; + return body; + }) + ); const body = `

We have new suggestions for your tracker ${name}:

${generations.length} recommendations were generated due to changes in watched documents.



-${generationBody.join("
")} +${generationBody.join("
")} `; await sendEmailWithTemplate({ diff --git a/front/lib/models/doc_tracker.ts b/front/lib/models/doc_tracker.ts index 3b9af1c38b21..5e40b00c0265 100644 --- a/front/lib/models/doc_tracker.ts +++ b/front/lib/models/doc_tracker.ts @@ -237,14 +237,16 @@ export class TrackerGenerationModel extends SoftDeletableModel; declare dataSourceId: ForeignKey; declare documentId: string; - declare maintainedDocumentDataSourceId: ForeignKey; - declare maintainedDocumentId: string; + declare maintainedDocumentDataSourceId: ForeignKey< + DataSourceModel["id"] + > | null; + declare maintainedDocumentId: string | null; declare consumedAt: Date | null; declare trackerConfiguration: NonAttribute; declare dataSource: NonAttribute; - declare maintainedDocumentDataSource: NonAttribute; + declare maintainedDocumentDataSource: NonAttribute | null; } TrackerGenerationModel.init( diff --git a/front/lib/resources/tracker_resource.ts b/front/lib/resources/tracker_resource.ts index 7910e389db46..3a197b57c6c9 100644 --- a/front/lib/resources/tracker_resource.ts +++ b/front/lib/resources/tracker_resource.ts @@ -437,6 +437,11 @@ export class TrackerConfigurationResource extends ResourceWithSpace { + try { + // Validate email + if (!isEmailValid(email)) { + throw new Error("Invalid email address"); + } + + // Parse and validate generation IDs + const ids = generationIds.map((id) => parseInt(id)); + if (ids.some((id) => isNaN(id))) { + throw new Error("Invalid generation IDs - must be numbers"); + } + + if (execute) { + // Fetch generations with their data sources + const generations = await TrackerGenerationModel.findAll({ + where: { + id: ids, + }, + include: [ + { + model: DataSourceModel, + required: true, + }, + { + model: DataSourceModel, + as: "maintainedDocumentDataSource", + required: false, + }, + ], + }); + + if (generations.length === 0) { + throw new Error("No generations found with the provided IDs"); + } + + // Convert to TrackerGenerationToProcess format + const generationsToProcess = generations.map((g) => ({ + id: g.id, + content: g.content, + thinking: g.thinking, + documentId: g.documentId, + dataSource: { + id: g.dataSource.id, + name: g.dataSource.name, + dustAPIProjectId: g.dataSource.dustAPIProjectId, + dustAPIDataSourceId: g.dataSource.dustAPIDataSourceId, + }, + maintainedDocumentId: g.maintainedDocumentId, + maintainedDataSource: g.maintainedDocumentDataSource + ? { + id: g.maintainedDocumentDataSource.id, + name: g.maintainedDocumentDataSource.name, + dustAPIProjectId: + g.maintainedDocumentDataSource.dustAPIProjectId, + dustAPIDataSourceId: + g.maintainedDocumentDataSource.dustAPIDataSourceId, + } + : null, + })); + + // Send email + await sendTrackerWithGenerationEmail({ + name: "Manual Generation Email", + recipient: email, + generations: generationsToProcess, + localLogger: logger, + }); + + logger.info({}, "Email sent successfully"); + } else { + logger.info( + { generationIds: ids, email }, + "Dry run - would send email with these parameters" + ); + } + } finally { + await frontSequelize.close(); + } + } +); diff --git a/types/src/front/tracker.ts b/types/src/front/tracker.ts index 9eb0e275b0a1..74f960c4e6fe 100644 --- a/types/src/front/tracker.ts +++ b/types/src/front/tracker.ts @@ -68,15 +68,19 @@ export type TrackerIdWorkspaceId = { workspaceId: string; }; +export type TrackerDataSource = { + id: ModelId; + name: string; + dustAPIProjectId: string; + dustAPIDataSourceId: string; +}; + export type TrackerGenerationToProcess = { id: ModelId; content: string; thinking: string | null; documentId: string; - dataSource: { - id: ModelId; - name: string; - dustAPIProjectId: string; - dustAPIDataSourceId: string; - }; + dataSource: TrackerDataSource; + maintainedDataSource: TrackerDataSource | null; + maintainedDocumentId: string | null; }; From cab4e8b44206ad0699a5c682321aea9bff01afe7 Mon Sep 17 00:00:00 2001 From: Aubin <60398825+aubin-tchoi@users.noreply.github.com> Date: Tue, 14 Jan 2025 14:17:39 +0100 Subject: [PATCH 2/9] [front] Replace parents splice/unshift logic with validation in `lib/api/data_sources.ts` (#9948) * remove the ability of upsertTable and upsertDocument to mutate parents, replace with validation * fix a call site * use the tableId in upsertArgs if provided (contains the content node ID if already existing) * fix the check * move the validation up in the execution flow * rename name into document_id in UpsertDocumentArgs * patch the types * invert an && statement to optimize for the most common case --- .../data_source/MultipleDocumentsUpload.tsx | 2 +- front/lib/api/data_sources.ts | 108 +++++++++++------- front/lib/api/files/upsert.ts | 7 +- front/lib/error.ts | 1 + .../api/w/[wId]/data_sources/[dsId]/files.ts | 16 +-- .../[dsId]/documents/[documentId]/index.ts | 2 +- .../data_sources/[dsId]/documents/index.ts | 2 +- .../data_sources/[dsId]/tables/index.ts | 3 + 8 files changed, 81 insertions(+), 60 deletions(-) diff --git a/front/components/data_source/MultipleDocumentsUpload.tsx b/front/components/data_source/MultipleDocumentsUpload.tsx index f71760fb5109..118e11698381 100644 --- a/front/components/data_source/MultipleDocumentsUpload.tsx +++ b/front/components/data_source/MultipleDocumentsUpload.tsx @@ -122,7 +122,7 @@ export const MultipleDocumentsUpload = ({ // Have to use the filename to avoid fileId becoming apparent in the UI. upsertArgs: { title: blob.filename, - name: blob.filename, + document_id: blob.filename, }, }); diff --git a/front/lib/api/data_sources.ts b/front/lib/api/data_sources.ts index dba6cd1c18ee..a03478d79d7b 100644 --- a/front/lib/api/data_sources.ts +++ b/front/lib/api/data_sources.ts @@ -226,8 +226,9 @@ export async function augmentDataSourceWithConnectorDetails( fetchConnectorErrorMessage, }; } -export type UpsertDocumentArgs = { - name: string; + +export interface UpsertDocumentArgs { + document_id: string; source_url?: string | null; text?: string | null; section?: FrontDataSourceDocumentSectionType | null; @@ -240,9 +241,10 @@ export type UpsertDocumentArgs = { auth: Authenticator; mime_type: string; title: string; -}; +} + export async function upsertDocument({ - name, + document_id, source_url, text, section, @@ -268,6 +270,33 @@ export async function upsertDocument({ DustError > > { + // enforcing validation on the parents and parent_id + const documentId = document_id; + const documentParents = parents || [documentId]; + const documentParentId = parent_id ?? null; + + // parents must comply to the invariant parents[0] === document_id + if (documentParents[0] !== documentId) { + return new Err( + new DustError( + "invalid_parents", + "Invalid request body, parents[0] and document_id should be equal" + ) + ); + } + // parents and parentId must comply to the invariant parents[1] === parentId || (parentId === null && parents.length < 2) + if ( + (documentParents.length >= 2 || documentParentId !== null) && + documentParents[1] !== documentParentId + ) { + return new Err( + new DustError( + "invalid_parent_id", + "Invalid request body, parents[1] and parent_id should be equal" + ) + ); + } + let sourceUrl: string | null = null; if (source_url) { const { valid: isSourceUrlValid, standardized: standardizedSourceUrl } = @@ -317,15 +346,6 @@ export async function upsertDocument({ ); } - if (parent_id && parents && parents[1] !== parent_id) { - return new Err( - new DustError( - "invalid_parent_id", - "Invalid request body, parents[1] and parent_id should be equal" - ) - ); - } - const fullText = sectionFullText(generatedSection); const coreAPI = new CoreAPI(apiConfig.getCoreAPIConfig(), logger); @@ -384,24 +404,13 @@ export async function upsertDocument({ // Data source operations are performed with our credentials. const credentials = dustManagedCredentials(); - const documentId = name; - const documentParents = parents || []; - - // Ensure that the documentId is included in the parents as the first item. - // remove it if it's already present and add it as the first item. - const indexOfDocumentId = documentParents.indexOf(documentId); - if (indexOfDocumentId !== -1) { - documentParents.splice(indexOfDocumentId, 1); - } - documentParents.unshift(documentId); - // Create document with the Dust internal API. const upsertRes = await coreAPI.upsertDataSourceDocument({ projectId: dataSource.dustAPIProjectId, dataSourceId: dataSource.dustAPIDataSourceId, - documentId: documentId, + documentId, tags: nonNullTags, - parentId: parent_id ?? null, + parentId: documentParentId, parents: documentParents, sourceUrl, // TEMPORARY -- need to unstuck a specific entry @@ -426,8 +435,8 @@ export async function upsertDocument({ return new Ok(upsertRes.value); } -export type UpsertTableArgs = { - tableId?: string | null; +export interface UpsertTableArgs { + tableId: string; name: string; description: string; truncate: boolean; @@ -442,7 +451,8 @@ export type UpsertTableArgs = { useAppForHeaderDetection?: boolean; title: string; mimeType: string; -}; +} + export async function upsertTable({ tableId, name, @@ -460,16 +470,30 @@ export async function upsertTable({ title, mimeType, }: UpsertTableArgs) { - const nonNullTableId = tableId ?? generateRandomModelSId(); - const tableParents: string[] = parents ?? []; - - // Ensure that the nonNullTableId is included in the parents as the first item. - // remove it if it's already present and add it as the first item. - const indexOfTableId = tableParents.indexOf(nonNullTableId); - if (indexOfTableId !== -1) { - tableParents.splice(indexOfTableId, 1); + const tableParents = parents ?? [tableId]; + const tableParentId = parentId ?? null; + + // parents must comply to the invariant parents[0] === document_id + if (tableParents[0] !== tableId) { + return new Err( + new DustError( + "invalid_parents", + "Invalid request body, parents[0] and table_id should be equal" + ) + ); + } + // parents and parentId must comply to the invariant parents[1] === parentId + if ( + (tableParents.length >= 2 || tableParentId !== null) && + tableParents[1] !== tableParentId + ) { + return new Err( + new DustError( + "invalid_parent_id", + "Invalid request body, parents[1] and parent_id should be equal" + ) + ); } - tableParents.unshift(nonNullTableId); const flags = await getFeatureFlags(auth.getNonNullableWorkspace()); @@ -496,12 +520,12 @@ export async function upsertTable({ upsertTable: { workspaceId: auth.getNonNullableWorkspace().sId, dataSourceId: dataSource.sId, - tableId: nonNullTableId, + tableId, tableName: name, tableDescription: description, tableTimestamp: timestamp ?? null, tableTags: tags ?? [], - tableParentId: parentId ?? null, + tableParentId, tableParents, csv: csv ?? null, truncate, @@ -521,12 +545,12 @@ export async function upsertTable({ const tableRes = await upsertTableFromCsv({ auth, dataSource: dataSource, - tableId: nonNullTableId, + tableId, tableName: name, tableDescription: description, tableTimestamp: timestamp ?? null, tableTags: tags || [], - tableParentId: parentId ?? null, + tableParentId, tableParents, csv: csv ?? null, truncate, diff --git a/front/lib/api/files/upsert.ts b/front/lib/api/files/upsert.ts index 3295870b13d2..6f94fe30d4cf 100644 --- a/front/lib/api/files/upsert.ts +++ b/front/lib/api/files/upsert.ts @@ -207,14 +207,13 @@ const upsertDocumentToDatasource: ProcessingFunction = async ({ upsertArgs, }) => { // Use the file id as the document id to make it easy to track the document back to the file. - const documentId = file.sId; const sourceUrl = file.getPrivateUrl(auth); const upsertDocumentRes = await upsertDocument({ - name: documentId, + document_id: file.sId, source_url: sourceUrl, text: content, - parents: [documentId], + parents: [file.sId], tags: [`title:${file.fileName}`, `fileId:${file.sId}`], light_document_output: true, dataSource, @@ -245,7 +244,7 @@ const upsertTableToDatasource: ProcessingFunction = async ({ dataSource, upsertArgs, }) => { - const tableId = file.sId; // Use the file sId as the table id to make it easy to track the table back to the file. + const tableId = upsertArgs?.tableId ?? file.sId; // Use the file sId as a fallback for the table_id to make it easy to track the table back to the file. const upsertTableRes = await upsertTable({ tableId, name: slugify(file.fileName), diff --git a/front/lib/error.ts b/front/lib/error.ts index b41bfc5e7fb3..abf36afc2cc7 100644 --- a/front/lib/error.ts +++ b/front/lib/error.ts @@ -10,6 +10,7 @@ export type DustErrorCode = | "data_source_quota_error" | "text_or_section_required" | "invalid_url" + | "invalid_parents" | "invalid_parent_id" // Table | "missing_csv" diff --git a/front/pages/api/w/[wId]/data_sources/[dsId]/files.ts b/front/pages/api/w/[wId]/data_sources/[dsId]/files.ts index 78a49f135504..b45d4898b84d 100644 --- a/front/pages/api/w/[wId]/data_sources/[dsId]/files.ts +++ b/front/pages/api/w/[wId]/data_sources/[dsId]/files.ts @@ -1,6 +1,5 @@ import type { FileType, WithAPIErrorResponse } from "@dust-tt/types"; -import type { NextApiRequest } from "next"; -import type { NextApiResponse } from "next"; +import type { NextApiRequest, NextApiResponse } from "next"; import { withSessionAuthenticationForWorkspace } from "@app/lib/api/auth_wrappers"; import type { @@ -17,16 +16,11 @@ import { apiError } from "@app/logger/withlogging"; export interface UpsertFileToDataSourceRequestBody { fileId: string; upsertArgs?: - | Pick - | Pick< + | Pick + | (Pick< UpsertTableArgs, - | "name" - | "title" - | "description" - | "tableId" - | "tags" - | "useAppForHeaderDetection" - >; + "name" | "title" | "description" | "tags" | "useAppForHeaderDetection" + > & { tableId: string | undefined }); // we actually don't always have a tableId, this is very dirty, but the refactoring should be done at the level of the whole upsertArgs mechanic } export interface UpsertFileToDataSourceResponseBody { diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/[documentId]/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/[documentId]/index.ts index bff299183488..2ffb8f9fd7d0 100644 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/[documentId]/index.ts +++ b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/[documentId]/index.ts @@ -118,7 +118,7 @@ async function handler( } = bodyValidation.right; const upsertResult = await upsertDocument({ - name: documentId, + document_id: documentId, source_url, text, section, diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/index.ts index 4d7635cf23ec..a7dafafaede5 100644 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/index.ts +++ b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/documents/index.ts @@ -111,7 +111,7 @@ async function handler( } = bodyValidation.right; const upsertResult = await upsertDocument({ - name, + document_id: name, // using the name as the document_id since we don't have one here source_url, text, section, diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts index a207d2170d90..08bba7a8e192 100644 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts +++ b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts @@ -10,6 +10,7 @@ import { withResourceFetchingFromRoute } from "@app/lib/api/resource_wrappers"; import type { Authenticator } from "@app/lib/auth"; import { DataSourceResource } from "@app/lib/resources/data_source_resource"; import type { SpaceResource } from "@app/lib/resources/space_resource"; +import { generateRandomModelSId } from "@app/lib/resources/string_ids"; import { apiError } from "@app/logger/withlogging"; export const config = { @@ -93,11 +94,13 @@ async function handler( }); } + const tableId = generateRandomModelSId(); const upsertRes = await upsertTable({ ...bodyValidation.right, async: bodyValidation.right.async ?? false, dataSource, auth, + tableId, }); if (upsertRes.isErr()) { From 1e206b1d3a71490700b781590e437a717fb9889e Mon Sep 17 00:00:00 2001 From: Thomas Draier Date: Tue, 14 Jan 2025 15:08:59 +0100 Subject: [PATCH 3/9] [front] Import/export apps api (#9870) * Import/export apps * use system keys * more flexible schema * Add read permission check * Skip app run if no dataset * extract method to lib * Use poke for triggering sync * swagger * missing files * missing files * more logging/error info * Add possibility to display previous specification stored in core * Update front/lib/api/poke/plugins/spaces/sync_apps.ts Co-authored-by: Flavien David * Update front/lib/api/poke/plugins/spaces/sync_apps.ts Co-authored-by: Flavien David * Update front/lib/utils/apps.ts Co-authored-by: Flavien David * review comments * split function * doc * replaced promise.all * lint * Add column with dust-app status * review comments * Track and return app run errors * cleaning results * Add app checks script * check endpoint * Update front/components/spaces/SpaceAppsList.tsx Co-authored-by: Flavien David * review comments * format --------- Co-authored-by: Flavien David --- front/admin/cli.ts | 62 ++- front/components/spaces/SpaceAppsList.tsx | 109 ++++- front/lib/api/config.ts | 20 + front/lib/api/poke/plugins/index.ts | 1 + front/lib/api/poke/plugins/spaces/index.ts | 1 + .../lib/api/poke/plugins/spaces/sync_apps.ts | 40 ++ front/lib/utils/apps.ts | 416 ++++++++++++++++++ .../v1/w/[wId]/spaces/[spaceId]/apps/check.ts | 102 +++++ .../w/[wId]/spaces/[spaceId]/apps/export.ts | 87 ++++ .../poke/[wId]/spaces/[spaceId]/index.tsx | 8 + .../[spaceId]/apps/[aId]/specification.tsx | 38 +- front/public/swagger.json | 4 +- sdks/js/src/index.ts | 32 ++ sdks/js/src/types.ts | 52 ++- 14 files changed, 948 insertions(+), 24 deletions(-) create mode 100644 front/lib/api/poke/plugins/spaces/index.ts create mode 100644 front/lib/api/poke/plugins/spaces/sync_apps.ts create mode 100644 front/lib/utils/apps.ts create mode 100644 front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/check.ts create mode 100644 front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/export.ts diff --git a/front/admin/cli.ts b/front/admin/cli.ts index 7fd4b2c23933..0f290d8ddbc1 100644 --- a/front/admin/cli.ts +++ b/front/admin/cli.ts @@ -1,18 +1,7 @@ -import { - assertNever, - ConnectorsAPI, - removeNulls, - SUPPORTED_MODEL_CONFIGS, -} from "@dust-tt/types"; -import parseArgs from "minimist"; - import { getConversation } from "@app/lib/api/assistant/conversation"; import { renderConversationForModel } from "@app/lib/api/assistant/generation"; -import { - getTextContentFromMessage, - getTextRepresentationFromMessages, -} from "@app/lib/api/assistant/utils"; -import config from "@app/lib/api/config"; +import { getTextRepresentationFromMessages } from "@app/lib/api/assistant/utils"; +import { default as config } from "@app/lib/api/config"; import { getDataSources } from "@app/lib/api/data_sources"; import { garbageCollectGoogleDriveDocument } from "@app/lib/api/poke/plugins/data_sources/garbage_collect_google_drive_document"; import { Authenticator } from "@app/lib/auth"; @@ -38,6 +27,14 @@ import { stopRetrieveTranscriptsWorkflow, } from "@app/temporal/labs/client"; import { REGISTERED_CHECKS } from "@app/temporal/production_checks/activities"; +import { DustAPI } from "@dust-tt/client"; +import { + assertNever, + ConnectorsAPI, + removeNulls, + SUPPORTED_MODEL_CONFIGS, +} from "@dust-tt/types"; +import parseArgs from "minimist"; // `cli` takes an object type and a command as first two arguments and then a list of arguments. const workspace = async (command: string, args: parseArgs.ParsedArgs) => { @@ -493,6 +490,45 @@ const productionCheck = async (command: string, args: parseArgs.ParsedArgs) => { ); return; } + case "check-apps": { + if (!args.url) { + throw new Error("Missing --url argument"); + } + if (!args.wId) { + throw new Error("Missing --wId argument"); + } + if (!args.spaceId) { + throw new Error("Missing --spaceId argument"); + } + const api = new DustAPI( + config.getDustAPIConfig(), + { apiKey: args.apiKey, workspaceId: args.wId }, + logger, + args.url + ); + + const actions = Object.values(DustProdActionRegistry); + + const res = await api.checkApps( + { + apps: actions.map((action) => ({ + appId: action.app.appId, + appHash: action.app.appHash, + })), + }, + args.spaceId + ); + if (res.isErr()) { + throw new Error(res.error.message); + } + const notDeployedApps = res.value.filter((a) => !a.deployed); + if (notDeployedApps.length > 0) { + throw new Error( + "Missing apps: " + notDeployedApps.map((a) => a.appId).join(", ") + ); + } + console.log("All apps are deployed"); + } } }; diff --git a/front/components/spaces/SpaceAppsList.tsx b/front/components/spaces/SpaceAppsList.tsx index d1683f51b5e1..600e9b851c10 100644 --- a/front/components/spaces/SpaceAppsList.tsx +++ b/front/components/spaces/SpaceAppsList.tsx @@ -8,19 +8,32 @@ import { Spinner, usePaginationFromUrl, } from "@dust-tt/sparkle"; -import type { ConnectorType, SpaceType, WorkspaceType } from "@dust-tt/types"; +import type { + AppType, + ConnectorType, + LightWorkspaceType, + SpaceType, + WorkspaceType, +} from "@dust-tt/types"; import type { CellContext } from "@tanstack/react-table"; import { sortBy } from "lodash"; +import Link from "next/link"; import { useRouter } from "next/router"; import type { ComponentType } from "react"; -import { useRef } from "react"; -import { useState } from "react"; import * as React from "react"; +import { useRef, useState } from "react"; import { SpaceCreateAppModal } from "@app/components/spaces/SpaceCreateAppModal"; -import { useApps } from "@app/lib/swr/apps"; +import type { Action } from "@app/lib/registry"; +import { + DustProdActionRegistry, + PRODUCTION_DUST_APPS_SPACE_ID, + PRODUCTION_DUST_APPS_WORKSPACE_ID, +} from "@app/lib/registry"; +import { useApps, useSavedRunStatus } from "@app/lib/swr/apps"; type RowData = { + app: AppType; category: string; name: string; icon: ComponentType; @@ -44,10 +57,85 @@ const getTableColumns = () => { ]; }; +const getDustAppsColumns = (owner: WorkspaceType) => ({ + id: "hash", + cell: (info: CellContext) => { + const { app } = info.row.original; + const registryApp = Object.values(DustProdActionRegistry).find( + (action) => action.app.appId === app.sId + ); + if (!registryApp) { + return ( + + No registry app + + ); + } + return ( + + + + ); + }, + accessorFn: (row: RowData) => row.name, +}); + +type AppHashCheckerProps = { + owner: LightWorkspaceType; + app: AppType; + registryApp: Action["app"]; +}; + +const AppHashChecker = ({ owner, app, registryApp }: AppHashCheckerProps) => { + const { run, isRunError } = useSavedRunStatus(owner, app, (data) => { + switch (data?.run?.status?.run) { + case "running": + return 100; + default: + return 0; + } + }); + + if ( + registryApp.appHash && + run?.app_hash && + registryApp.appHash !== run.app_hash + ) { + return ( + + Inconsistent hashes,{" "} + { + e.stopPropagation(); + }} + > + compare + + + ); + } + + if (isRunError) { + return Error: {isRunError.error?.message}; + } + + if (!run) { + return No run found; + } + + if (run?.status.run === "errored") { + return Run failed; + } + + return ""; +}; + interface SpaceAppsListProps { canWriteInSpace: boolean; onSelect: (sId: string) => void; - owner: WorkspaceType; + owner: LightWorkspaceType; space: SpaceType; } @@ -73,6 +161,7 @@ export const SpaceAppsList = ({ const rows: RowData[] = React.useMemo( () => sortBy(apps, "name").map((app) => ({ + app, sId: app.sId, category: "apps", name: app.name, @@ -91,6 +180,14 @@ export const SpaceAppsList = ({ ); } + const columns = getTableColumns(); + if ( + owner.sId === PRODUCTION_DUST_APPS_WORKSPACE_ID && + space.sId === PRODUCTION_DUST_APPS_SPACE_ID + ) { + columns.push(getDustAppsColumns(owner)); + } + return ( <> {rows.length === 0 ? ( @@ -140,7 +237,7 @@ export const SpaceAppsList = ({ { return EnvironmentConfig.getEnvVariable("STATUS_PAGE_API_TOKEN"); }, + getDustAppsSyncEnabled: (): boolean => { + return ( + EnvironmentConfig.getOptionalEnvVariable("DUST_APPS_SYNC_ENABLED") === + "true" + ); + }, + getDustAppsSyncMasterApiUrl: (): string => { + return EnvironmentConfig.getEnvVariable("DUST_APPS_SYNC_MASTER_API_URL"); + }, + getDustAppsSyncMasterWorkspaceId: (): string => { + return EnvironmentConfig.getEnvVariable( + "DUST_APPS_SYNC_MASTER_WORKSPACE_ID" + ); + }, + getDustAppsSyncMasterSpaceId: (): string => { + return EnvironmentConfig.getEnvVariable("DUST_APPS_SYNC_MASTER_SPACE_ID"); + }, + getDustAppsSyncMasterApiKey: (): string => { + return EnvironmentConfig.getEnvVariable("DUST_APPS_SYNC_MASTER_API_KEY"); + }, }; export default config; diff --git a/front/lib/api/poke/plugins/index.ts b/front/lib/api/poke/plugins/index.ts index c4a160d94c4b..a32eefdb54a8 100644 --- a/front/lib/api/poke/plugins/index.ts +++ b/front/lib/api/poke/plugins/index.ts @@ -1,4 +1,5 @@ export * from "./data_source_views"; export * from "./data_sources"; export * from "./global"; +export * from "./spaces"; export * from "./workspaces"; diff --git a/front/lib/api/poke/plugins/spaces/index.ts b/front/lib/api/poke/plugins/spaces/index.ts new file mode 100644 index 000000000000..aefade5833dd --- /dev/null +++ b/front/lib/api/poke/plugins/spaces/index.ts @@ -0,0 +1 @@ +export * from "./sync_apps"; diff --git a/front/lib/api/poke/plugins/spaces/sync_apps.ts b/front/lib/api/poke/plugins/spaces/sync_apps.ts new file mode 100644 index 000000000000..4bd9dd83cf8b --- /dev/null +++ b/front/lib/api/poke/plugins/spaces/sync_apps.ts @@ -0,0 +1,40 @@ +import { Err, Ok } from "@dust-tt/types"; + +import { createPlugin } from "@app/lib/api/poke/types"; +import { SpaceResource } from "@app/lib/resources/space_resource"; +import { synchronizeDustApps } from "@app/lib/utils/apps"; + +export const syncAppsPlugin = createPlugin( + { + id: "sync-apps", + name: "Sync dust-apps", + description: "Synchronize dust-apps from production", + resourceTypes: ["spaces"], + args: {}, + }, + async (auth, spaceId) => { + if (!spaceId) { + return new Err(new Error("No space specified")); + } + + const space = await SpaceResource.fetchById(auth, spaceId); + if (!space) { + return new Err(new Error("Space not found")); + } + const result = await synchronizeDustApps(auth, space); + if (result.isErr()) { + return new Err(new Error(`Error when syncing: ${result.error.message}`)); + } + if (!result.value) { + return new Ok({ + display: "text", + value: "Sync not enabled.", + }); + } + + return new Ok({ + display: "json", + value: { importedApp: result.value }, + }); + } +); diff --git a/front/lib/utils/apps.ts b/front/lib/utils/apps.ts new file mode 100644 index 000000000000..c767a3afcb6b --- /dev/null +++ b/front/lib/utils/apps.ts @@ -0,0 +1,416 @@ +import type { ApiAppType } from "@dust-tt/client"; +import { DustAPI } from "@dust-tt/client"; +import type { CoreAPIError, Result, TraceType } from "@dust-tt/types"; +import { + CoreAPI, + credentialsFromProviders, + Err, + Ok, + removeNulls, +} from "@dust-tt/types"; +import { createParser } from "eventsource-parser"; +import _ from "lodash"; + +import { default as apiConfig, default as config } from "@app/lib/api/config"; +import { getDustAppSecrets } from "@app/lib/api/dust_app_secrets"; +import type { Authenticator } from "@app/lib/auth"; +import { AppResource } from "@app/lib/resources/app_resource"; +import { RunResource } from "@app/lib/resources/run_resource"; +import type { SpaceResource } from "@app/lib/resources/space_resource"; +import { Dataset, Provider } from "@app/lib/resources/storage/models/apps"; +import { dumpSpecification } from "@app/lib/specification"; +import logger from "@app/logger/logger"; + +async function updateOrCreateApp( + auth: Authenticator, + { + appToImport, + space, + }: { + appToImport: ApiAppType; + space: SpaceResource; + } +): Promise< + Result<{ app: AppResource; updated: boolean }, Error | CoreAPIError> +> { + const existingApps = await AppResource.listBySpace(auth, space, { + includeDeleted: true, + }); + const existingApp = existingApps.find((a) => a.sId === appToImport.sId); + if (existingApp) { + // Check if existing app was deleted + if (existingApp.deletedAt) { + return new Err( + new Error("App has been deleted, it can't be reimported.") + ); + } + + // Now update if name/descriptions have been modified + if ( + existingApp.name !== appToImport.name || + existingApp.description !== appToImport.description + ) { + await existingApp.updateSettings(auth, { + name: appToImport.name, + description: appToImport.description, + }); + return new Ok({ app: existingApp, updated: true }); + } + return new Ok({ app: existingApp, updated: false }); + } else { + // An app with this sId exist, check workspace and space first to see if it matches + const existingApp = await AppResource.fetchById(auth, appToImport.sId); + if (existingApp) { + return new Err( + new Error("App with this sId already exists in another space.") + ); + } + + // App does not exist, create a new app + const coreAPI = new CoreAPI(config.getCoreAPIConfig(), logger); + const p = await coreAPI.createProject(); + + if (p.isErr()) { + return p; + } + const dustAPIProject = p.value.project; + + const owner = auth.getNonNullableWorkspace(); + const newApp = await AppResource.makeNew( + { + sId: appToImport.sId, + name: appToImport.name, + description: appToImport.description, + visibility: "private", + dustAPIProjectId: dustAPIProject.project_id.toString(), + workspaceId: owner.id, + }, + space + ); + + return new Ok({ app: newApp, updated: true }); + } +} + +async function updateDatasets( + auth: Authenticator, + { + app, + datasetsToImport, + }: { + app: AppResource; + datasetsToImport: ApiAppType["datasets"]; + } +): Promise> { + if (datasetsToImport) { + const owner = auth.getNonNullableWorkspace(); + const coreAPI = new CoreAPI(config.getCoreAPIConfig(), logger); + + // Getting all existing datasets for this app + const existingDatasets = await Dataset.findAll({ + where: { + workspaceId: owner.id, + appId: app.id, + }, + }); + + for (const datasetToImport of datasetsToImport) { + // First, create or update the dataset in core + const coreDataset = await coreAPI.createDataset({ + projectId: app.dustAPIProjectId, + datasetId: datasetToImport.name, + data: datasetToImport.data || [], + }); + if (coreDataset.isErr()) { + return coreDataset; + } + + // Now update the dataset in front if it exists, or create one + const dataset = existingDatasets.find( + (d) => d.name === datasetToImport.name + ); + if (dataset) { + if ( + !_.isEqual(dataset.schema, datasetToImport.schema) || + dataset.description !== datasetToImport.description + ) { + await dataset.update({ + description: datasetToImport.description, + schema: datasetToImport.schema, + }); + } + } else { + await Dataset.create({ + name: datasetToImport.name, + description: datasetToImport.description, + appId: app.id, + workspaceId: owner.id, + schema: datasetToImport.schema, + }); + } + } + } + return new Ok(true); +} + +async function updateAppSpecifications( + auth: Authenticator, + { + app, + savedSpecification, + savedConfig, + }: { + app: AppResource; + savedSpecification: string; + savedConfig: string; + } +): Promise> { + // Specification and config have been modified and need to be imported + if ( + savedSpecification !== app.savedSpecification && + savedConfig !== app.savedConfig + ) { + // Fetch all datasets from core for this app + const coreAPI = new CoreAPI(config.getCoreAPIConfig(), logger); + const coreDatasets = await coreAPI.getDatasets({ + projectId: app.dustAPIProjectId, + }); + if (coreDatasets.isErr()) { + return coreDatasets; + } + + const latestDatasets: { [key: string]: string } = {}; + for (const d in coreDatasets.value.datasets) { + latestDatasets[d] = coreDatasets.value.datasets[d][0].hash; + } + + const [datasetId] = Object.keys(latestDatasets); + if (datasetId) { + const owner = auth.getNonNullableWorkspace(); + // Fetch providers and secrets + const [providers, secrets] = await Promise.all([ + Provider.findAll({ + where: { + workspaceId: owner.id, + }, + }), + getDustAppSecrets(auth, true), + ]); + + // Create a new run to save specifications and configs + const dustRun = await coreAPI.createRunStream(owner, auth.groups(), { + projectId: app.dustAPIProjectId, + runType: "local", + specification: dumpSpecification( + JSON.parse(savedSpecification), + latestDatasets + ), + config: { blocks: JSON.parse(savedConfig) }, + credentials: credentialsFromProviders(providers), + datasetId, + secrets, + storeBlocksResults: true, + }); + + if (dustRun.isErr()) { + logger.error(app, "Failed to create run for app"); + return dustRun; + } + + let error = undefined; + try { + // Intercept block_execution events to store token usages. + const parser = createParser((event) => { + if (event.type === "event") { + if (event.data) { + const data = JSON.parse(event.data); + if (data.type === "block_execution") { + const traces: TraceType[][] = data.content.execution; + const errs = traces.flatMap((trace) => + removeNulls(trace.map((t) => t.error)) + ); + if (errs.length > 0) { + throw new Error(errs[0]); + } + } + } + } + }); + + for await (const chunk of dustRun.value.chunkStream) { + parser.feed(new TextDecoder().decode(chunk)); + } + } catch (err) { + if (err instanceof Error) { + error = err.message; + } else { + error = String(err); + } + } + + const dustRunId = await dustRun.value.dustRunId; + + // Update app state + await Promise.all([ + RunResource.makeNew({ + dustRunId, + appId: app.id, + runType: "local", + workspaceId: owner.id, + }), + + app.updateState(auth, { + savedSpecification, + savedConfig, + savedRun: dustRunId, + }), + ]); + + if (error) { + return new Err(new Error(error)); + } + + return new Ok(true); + } + } + return new Ok(false); +} + +export async function importApp( + auth: Authenticator, + space: SpaceResource, + appToImport: ApiAppType +): Promise< + Result<{ app: AppResource; updated: boolean }, CoreAPIError | Error> +> { + const appRes = await updateOrCreateApp(auth, { + appToImport, + space, + }); + if (appRes.isErr()) { + logger.error( + { sId: appToImport.sId, name: appToImport.name, error: appRes.error }, + "Error when importing app config" + ); + return appRes; + } + + const { app, updated } = appRes.value; + + const datasetsRes = await updateDatasets(auth, { + app, + datasetsToImport: appToImport.datasets, + }); + if (datasetsRes.isErr()) { + logger.error( + { + sId: app.sId, + name: app.name, + error: datasetsRes.error, + }, + "Error when importing app datasets" + ); + return datasetsRes; + } + + if (appToImport.savedSpecification && appToImport.savedConfig) { + const updateSpecificationsRes = await updateAppSpecifications(auth, { + app, + savedSpecification: appToImport.savedSpecification, + savedConfig: appToImport.savedConfig, + }); + if (updateSpecificationsRes.isErr()) { + logger.error( + { + sId: app.sId, + name: app.name, + error: updateSpecificationsRes.error, + }, + "Error when importing app specifications" + ); + return updateSpecificationsRes; + } + + const specUpdated = updateSpecificationsRes.value; + if (updated || specUpdated) { + logger.info( + { sId: app.sId, appName: app.name }, + "App imported successfully" + ); + } + + return new Ok({ app, updated: updated || specUpdated }); + } + + if (updated) { + logger.info( + { sId: app.sId, appName: app.name }, + "App imported successfully" + ); + } + return new Ok({ app, hash: undefined, updated }); +} + +interface ImportRes { + sId: string; + name: string; + error?: string; +} + +export async function importApps( + auth: Authenticator, + space: SpaceResource, + appsToImport: ApiAppType[] +): Promise { + const apps: ImportRes[] = []; + + for (const appToImport of appsToImport) { + const res = await importApp(auth, space, appToImport); + if (res.isErr()) { + apps.push({ + sId: appToImport.sId, + name: appToImport.name, + error: res.error.message, + }); + } else { + const { app, updated } = res.value; + if (updated) { + apps.push({ sId: app.sId, name: app.name }); + } + } + } + + return apps; +} + +export async function synchronizeDustApps( + auth: Authenticator, + space: SpaceResource +): Promise> { + if (!apiConfig.getDustAppsSyncEnabled()) { + return new Ok([]); + } + + const syncMasterApi = new DustAPI( + apiConfig.getDustAPIConfig(), + { + apiKey: apiConfig.getDustAppsSyncMasterApiKey(), + workspaceId: apiConfig.getDustAppsSyncMasterWorkspaceId(), + }, + logger, + apiConfig.getDustAppsSyncMasterApiUrl() + ); + + const exportRes = await syncMasterApi.exportApps({ + appSpaceId: apiConfig.getDustAppsSyncMasterSpaceId(), + }); + + if (exportRes.isErr()) { + const e = exportRes.error; + return new Err(new Error(`Cannot export: ${e.message}`)); + } + + const importRes = await importApps(auth, space, exportRes.value); + logger.info({ importedApp: importRes }, "Apps imported"); + return new Ok(importRes); +} diff --git a/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/check.ts b/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/check.ts new file mode 100644 index 000000000000..1a92d00c4215 --- /dev/null +++ b/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/check.ts @@ -0,0 +1,102 @@ +import type { AppsCheckResponseType } from "@dust-tt/client"; +import { AppsCheckRequestSchema } from "@dust-tt/client"; +import type { WithAPIErrorResponse } from "@dust-tt/types"; +import { concurrentExecutor, CoreAPI } from "@dust-tt/types"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { fromError } from "zod-validation-error"; + +import { withPublicAPIAuthentication } from "@app/lib/api/auth_wrappers"; +import config from "@app/lib/api/config"; +import { withResourceFetchingFromRoute } from "@app/lib/api/resource_wrappers"; +import type { Authenticator } from "@app/lib/auth"; +import { AppResource } from "@app/lib/resources/app_resource"; +import type { SpaceResource } from "@app/lib/resources/space_resource"; +import logger from "@app/logger/logger"; +import { apiError } from "@app/logger/withlogging"; + +/** + * @ignoreswagger + * System API key only endpoint. Undocumented. + */ +async function handler( + req: NextApiRequest, + res: NextApiResponse>, + auth: Authenticator, + { space }: { space: SpaceResource } +): Promise { + if (!auth.isSystemKey()) { + return apiError(req, res, { + status_code: 403, + api_error: { + type: "invalid_oauth_token_error", + message: "Only system keys are allowed to use this endpoint.", + }, + }); + } + + if (!space.canRead(auth)) { + return apiError(req, res, { + status_code: 404, + api_error: { + type: "space_not_found", + message: "The space you requested was not found.", + }, + }); + } + + switch (req.method) { + case "POST": + const r = AppsCheckRequestSchema.safeParse(req.body); + + if (r.error) { + return apiError(req, res, { + status_code: 400, + api_error: { + type: "invalid_request_error", + message: fromError(r.error).toString(), + }, + }); + } + + const coreAPI = new CoreAPI(config.getCoreAPIConfig(), logger); + const apps = await concurrentExecutor( + r.data.apps, + async (appRequest) => { + const app = await AppResource.fetchById(auth, appRequest.appId); + if (!app) { + return { ...appRequest, deployed: false }; + } + const coreSpec = await coreAPI.getSpecification({ + projectId: app.dustAPIProjectId, + specificationHash: appRequest.appHash, + }); + if (coreSpec.isErr()) { + return { ...appRequest, deployed: false }; + } + + return { ...appRequest, deployed: true }; + }, + { concurrency: 5 } + ); + + res.status(200).json({ + apps, + }); + return; + + default: + return apiError(req, res, { + status_code: 405, + api_error: { + type: "method_not_supported_error", + message: "The method passed is not supported, GET is expected.", + }, + }); + } +} + +export default withPublicAPIAuthentication( + withResourceFetchingFromRoute(handler, { + space: { requireCanRead: true }, + }) +); diff --git a/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/export.ts b/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/export.ts new file mode 100644 index 000000000000..7a416617ce66 --- /dev/null +++ b/front/pages/api/v1/w/[wId]/spaces/[spaceId]/apps/export.ts @@ -0,0 +1,87 @@ +import type { GetAppsResponseType } from "@dust-tt/client"; +import type { WithAPIErrorResponse } from "@dust-tt/types"; +import { concurrentExecutor } from "@dust-tt/types"; +import type { NextApiRequest, NextApiResponse } from "next"; + +import { withPublicAPIAuthentication } from "@app/lib/api/auth_wrappers"; +import { getDatasetHash, getDatasets } from "@app/lib/api/datasets"; +import { withResourceFetchingFromRoute } from "@app/lib/api/resource_wrappers"; +import type { Authenticator } from "@app/lib/auth"; +import { AppResource } from "@app/lib/resources/app_resource"; +import type { SpaceResource } from "@app/lib/resources/space_resource"; +import { apiError } from "@app/logger/withlogging"; + +/** + * @ignoreswagger + * System API key only endpoint. Undocumented. + */ +async function handler( + req: NextApiRequest, + res: NextApiResponse>, + auth: Authenticator, + { space }: { space: SpaceResource } +): Promise { + if (!auth.isSystemKey()) { + return apiError(req, res, { + status_code: 403, + api_error: { + type: "invalid_oauth_token_error", + message: "Only system keys are allowed to use this endpoint.", + }, + }); + } + + if (!space.canRead(auth)) { + return apiError(req, res, { + status_code: 404, + api_error: { + type: "space_not_found", + message: "The space you requested was not found.", + }, + }); + } + + switch (req.method) { + case "GET": + const apps = await AppResource.listBySpace(auth, space); + + const enhancedApps = await concurrentExecutor( + apps.filter((app) => app.canRead(auth)), + async (app) => { + const datasetsFromFront = await getDatasets(auth, app.toJSON()); + const datasets = []; + for (const dataset of datasetsFromFront) { + const fromCore = await getDatasetHash( + auth, + app, + dataset.name, + "latest" + ); + datasets.push(fromCore ?? dataset); + } + return { ...app.toJSON(), datasets }; + }, + { concurrency: 5 } + ); + + res.status(200).json({ + apps: enhancedApps, + }); + return; + + default: + return apiError(req, res, { + status_code: 405, + api_error: { + type: "method_not_supported_error", + message: "The method passed is not supported, GET is expected.", + }, + }); + } +} + +export default withPublicAPIAuthentication( + withResourceFetchingFromRoute(handler, { + space: { requireCanRead: true }, + }) +); diff --git a/front/pages/poke/[wId]/spaces/[spaceId]/index.tsx b/front/pages/poke/[wId]/spaces/[spaceId]/index.tsx index a1ad2f0c56fb..175886d58c03 100644 --- a/front/pages/poke/[wId]/spaces/[spaceId]/index.tsx +++ b/front/pages/poke/[wId]/spaces/[spaceId]/index.tsx @@ -8,6 +8,7 @@ import type { ReactElement } from "react"; import { DataSourceViewsDataTable } from "@app/components/poke/data_source_views/table"; import { MembersDataTable } from "@app/components/poke/members/table"; +import { PluginList } from "@app/components/poke/plugins/PluginList"; import { ViewSpaceViewTable } from "@app/components/poke/spaces/view"; import { getMembers } from "@app/lib/api/workspace"; import { withSuperUserAuthRequirements } from "@app/lib/iam/session"; @@ -78,6 +79,13 @@ export default function SpacePage({
+
diff --git a/front/pages/w/[wId]/spaces/[spaceId]/apps/[aId]/specification.tsx b/front/pages/w/[wId]/spaces/[spaceId]/apps/[aId]/specification.tsx index 688d5f8f45d9..e88bdd73bf0a 100644 --- a/front/pages/w/[wId]/spaces/[spaceId]/apps/[aId]/specification.tsx +++ b/front/pages/w/[wId]/spaces/[spaceId]/apps/[aId]/specification.tsx @@ -20,6 +20,7 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ readOnly: boolean; app: AppType; specification: string; + specificationFromCore: { created: number; data: string; hash: string } | null; }>(async (context, auth) => { const owner = auth.workspace(); const subscription = auth.subscription(); @@ -50,6 +51,26 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ }; } + let specificationFromCore = null; + const specificationFromCoreHash = context.query?.hash; + + if ( + specificationFromCoreHash && + typeof specificationFromCoreHash === "string" + ) { + const coreSpec = await coreAPI.getSpecification({ + projectId: app.dustAPIProjectId, + specificationHash: specificationFromCoreHash, + }); + + if (coreSpec.isOk()) { + specificationFromCore = { + ...coreSpec.value.specification, + hash: specificationFromCoreHash, + }; + } + } + const latestDatasets = {} as { [key: string]: string }; for (const d in datasets.value.datasets) { latestDatasets[d] = datasets.value.datasets[d][0].hash; @@ -67,6 +88,7 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ readOnly, app: app.toJSON(), specification: spec, + specificationFromCore, }, }; }); @@ -76,6 +98,7 @@ export default function Specification({ subscription, app, specification, + specificationFromCore, }: InferGetServerSidePropsType) { const router = useRouter(); @@ -113,8 +136,19 @@ export default function Specification({ )} -
- {specification} +
+

Current specifications :

+
+ {specification} +
+ {specificationFromCore && ( + <> +

Saved specifications {specificationFromCore.hash}:

+
+ {specificationFromCore.data} +
+ + )}
diff --git a/front/public/swagger.json b/front/public/swagger.json index 7477c469ded6..85dfb622fde6 100644 --- a/front/public/swagger.json +++ b/front/public/swagger.json @@ -3296,8 +3296,8 @@ }, "/api/v1/w/{wId}/spaces": { "get": { - "summary": "List Workspace Spaces", - "description": "Retrieves a list of spaces for the authenticated workspace.", + "summary": "List Spaces accessible.", + "description": "Retrieves a list of accessible spaces for the authenticated workspace.", "tags": [ "Spaces" ], diff --git a/sdks/js/src/index.ts b/sdks/js/src/index.ts index 31bc68e0d7a9..f8e38d37881a 100644 --- a/sdks/js/src/index.ts +++ b/sdks/js/src/index.ts @@ -7,6 +7,7 @@ import type { AgentErrorEvent, AgentMessageSuccessEvent, APIError, + AppsCheckRequestType, CancelMessageGenerationRequestType, ConversationPublicType, DataSourceViewType, @@ -32,6 +33,7 @@ import type { } from "./types"; import { APIErrorSchema, + AppsCheckResponseSchema, CancelMessageGenerationResponseSchema, CreateConversationResponseSchema, DataSourceViewResponseSchema, @@ -41,6 +43,7 @@ import { FileUploadUrlRequestSchema, GetActiveMemberEmailsInWorkspaceResponseSchema, GetAgentConfigurationsResponseSchema, + GetAppsResponseSchema, GetConversationResponseSchema, GetConversationsResponseSchema, GetDataSourcesResponseSchema, @@ -1038,6 +1041,35 @@ export class DustAPI { return new Ok(r.value.dataSourceView); } + async exportApps({ appSpaceId }: { appSpaceId: string }) { + const res = await this.request({ + method: "GET", + path: `spaces/${appSpaceId}/apps/export`, + }); + + const r = await this._resultFromResponse(GetAppsResponseSchema, res); + + if (r.isErr()) { + return r; + } + return new Ok(r.value.apps); + } + + async checkApps(apps: AppsCheckRequestType, appSpaceId: string) { + const res = await this.request({ + method: "POST", + path: `spaces/${appSpaceId}/apps/check`, + body: apps, + }); + + const r = await this._resultFromResponse(AppsCheckResponseSchema, res); + + if (r.isErr()) { + return r; + } + return new Ok(r.value.apps); + } + private async _fetchWithError( url: string, config?: AxiosRequestConfig diff --git a/sdks/js/src/types.ts b/sdks/js/src/types.ts index c9a9928b6090..1b83aa6ae914 100644 --- a/sdks/js/src/types.ts +++ b/sdks/js/src/types.ts @@ -1462,6 +1462,26 @@ const SpaceTypeSchema = z.object({ updatedAt: z.number(), }); +const DatasetSchemaEntryType = FlexibleEnumSchema< + "string" | "number" | "boolean" | "json" +>(); + +const DatasetSchema = z.object({ + name: z.string(), + description: z.string().nullable(), + data: z.array(z.record(z.any())).nullable().optional(), + schema: z + .array( + z.object({ + key: z.string(), + type: DatasetSchemaEntryType, + description: z.string().nullable(), + }) + ) + .nullable() + .optional(), +}); + const AppTypeSchema = z.object({ id: ModelIdSchema, sId: z.string(), @@ -1472,8 +1492,11 @@ const AppTypeSchema = z.object({ savedRun: z.string().nullable(), dustAPIProjectId: z.string(), space: SpaceTypeSchema, + datasets: z.array(DatasetSchema).optional(), }); +export type ApiAppType = z.infer; + export const RunAppResponseSchema = z.object({ run: RunTypeSchema, }); @@ -1757,7 +1780,11 @@ export type ValidateMemberResponseType = z.infer< typeof ValidateMemberResponseSchema >; -const GetAppsResponseSchema = z.object({ +export const GetAppsResponseSchema = z.object({ + apps: AppTypeSchema.array(), +}); + +export const PostAppsRequestSchema = z.object({ apps: AppTypeSchema.array(), }); @@ -2321,3 +2348,26 @@ export function getTitleFromRetrievedDocument( return document.documentId; } + +export const AppsCheckRequestSchema = z.object({ + apps: z.array( + z.object({ + appId: z.string(), + appHash: z.string(), + }) + ), +}); + +export type AppsCheckRequestType = z.infer; + +export const AppsCheckResponseSchema = z.object({ + apps: z.array( + z.object({ + appId: z.string(), + appHash: z.string(), + deployed: z.boolean(), + }) + ), +}); + +export type AppsCheckResponseType = z.infer; From 41b82744aaaeb4c103b57b9dbca7e647a2796819 Mon Sep 17 00:00:00 2001 From: Edouard Wautier <4435185+Duncid@users.noreply.github.com> Date: Tue, 14 Jan 2025 15:13:56 +0100 Subject: [PATCH 4/9] Fixing input align (Sidebar) (#9959) * Fixing alignement issues * Patching --- front/tailwind.config.js | 6 +++--- sparkle/package-lock.json | 4 ++-- sparkle/package.json | 2 +- sparkle/src/components/Input.tsx | 4 ++-- sparkle/src/components/SearchInput.tsx | 2 +- sparkle/tailwind.config.js | 14 +++++++------- 6 files changed, 16 insertions(+), 16 deletions(-) diff --git a/front/tailwind.config.js b/front/tailwind.config.js index 695072d73175..b2e35715da97 100644 --- a/front/tailwind.config.js +++ b/front/tailwind.config.js @@ -233,9 +233,9 @@ module.exports = { primary: { DEFAULT: colors.slate[800], light: { DEFAULT: colors.slate[700], dark: colors.slate[300] }, - dark: { DEFAULT: colors.slate[950], dark: colors.slate[50] }, + dark: { DEFAULT: colors.slate[950], dark: "#F6F8FB" }, muted: { DEFAULT: colors.slate[400], dark: colors.slate[600] }, - 950: { DEFAULT: colors.slate[950], dark: colors.slate[50] }, + 950: { DEFAULT: colors.slate[950], dark: "#F6F8FB" }, 900: { DEFAULT: colors.slate[900], dark: colors.slate[100] }, 800: { DEFAULT: colors.slate[800], dark: colors.slate[200] }, 700: { DEFAULT: colors.slate[700], dark: colors.slate[300] }, @@ -304,7 +304,7 @@ module.exports = { 300: { DEFAULT: colors.slate[300], dark: colors.slate[600] }, }, element: { - 950: { DEFAULT: colors.slate[950], dark: colors.slate[50] }, + 950: { DEFAULT: colors.slate[950], dark: "#F6F8FB" }, 900: { DEFAULT: colors.slate[900], dark: colors.slate[100] }, 800: { DEFAULT: colors.slate[700], dark: colors.slate[200] }, 700: { DEFAULT: colors.slate[500], dark: colors.slate[300] }, diff --git a/sparkle/package-lock.json b/sparkle/package-lock.json index 5ee3879a3bd6..33dccfff2dc6 100644 --- a/sparkle/package-lock.json +++ b/sparkle/package-lock.json @@ -1,12 +1,12 @@ { "name": "@dust-tt/sparkle", - "version": "0.2.363", + "version": "0.2.364", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@dust-tt/sparkle", - "version": "0.2.363", + "version": "0.2.364", "license": "ISC", "dependencies": { "@emoji-mart/data": "^1.1.2", diff --git a/sparkle/package.json b/sparkle/package.json index 19cb50bd0fe6..3b1cb5a1e708 100644 --- a/sparkle/package.json +++ b/sparkle/package.json @@ -1,6 +1,6 @@ { "name": "@dust-tt/sparkle", - "version": "0.2.363", + "version": "0.2.364", "scripts": { "build": "rm -rf dist && npm run tailwind && npm run build:esm && npm run build:cjs", "tailwind": "tailwindcss -i ./src/styles/tailwind.css -o dist/sparkle.css", diff --git a/sparkle/src/components/Input.tsx b/sparkle/src/components/Input.tsx index 3001d481110d..6624079d0d16 100644 --- a/sparkle/src/components/Input.tsx +++ b/sparkle/src/components/Input.tsx @@ -33,7 +33,7 @@ const messageVariantStyles: Record = { const stateVariantStyles: Record = { default: - "s-border-border-dark/50 s-ring-highlight/0 focus-visible:s-border-border-focus focus-visible:s-outline-none focus-visible:s-ring-highlight/10", + "s-border-border-dark s-ring-highlight/0 focus-visible:s-border-border-focus focus-visible:s-outline-none focus-visible:s-ring-highlight/10", disabled: "disabled:s-cursor-not-allowed disabled:s-text-muted-foreground", error: "s-border-border-warning/30 s-ring-warning/0 focus-visible:s-border-border-warning focus-visible:s-outline-none focus-visible:s-ring-warning/10", @@ -50,7 +50,7 @@ const messageVariant = cva("", { const inputStyleClasses = cva( cn( - "s-text-sm s-bg-background s-rounded-xl s-bg-muted-background s-flex s-h-9 s-w-full s-px-3 s-py-1.5 ", + "s-text-sm s-rounded-xl s-bg-muted-background s-flex s-h-9 s-w-full s-px-3 s-py-1.5 ", "s-border focus-visible:s-ring", "file:s-border-0 file:s-bg-transparent file:s-text-sm file:s-font-medium file:s-text-foreground", "placeholder:s-text-muted-foreground" diff --git a/sparkle/src/components/SearchInput.tsx b/sparkle/src/components/SearchInput.tsx index 09d9429308a4..3effb2282b64 100644 --- a/sparkle/src/components/SearchInput.tsx +++ b/sparkle/src/components/SearchInput.tsx @@ -32,7 +32,7 @@ export const SearchInput = forwardRef( }; return ( -
+
Date: Tue, 14 Jan 2025 15:15:10 +0100 Subject: [PATCH 5/9] use the upsertArgs documentId if passed (#9962) --- front/lib/api/files/upsert.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/front/lib/api/files/upsert.ts b/front/lib/api/files/upsert.ts index 6f94fe30d4cf..96419ca881a3 100644 --- a/front/lib/api/files/upsert.ts +++ b/front/lib/api/files/upsert.ts @@ -208,12 +208,12 @@ const upsertDocumentToDatasource: ProcessingFunction = async ({ }) => { // Use the file id as the document id to make it easy to track the document back to the file. const sourceUrl = file.getPrivateUrl(auth); - + const documentId = upsertArgs?.document_id ?? file.sId; // Use the file sId as a fallback to make it easy to track the table back to the file. const upsertDocumentRes = await upsertDocument({ - document_id: file.sId, + document_id: documentId, source_url: sourceUrl, text: content, - parents: [file.sId], + parents: [documentId], tags: [`title:${file.fileName}`, `fileId:${file.sId}`], light_document_output: true, dataSource, From 76c86e50de7c1a1d541bb239399e40f62ee56f88 Mon Sep 17 00:00:00 2001 From: Henry Fontanier Date: Tue, 14 Jan 2025 15:16:32 +0100 Subject: [PATCH 6/9] fix> tracker builder (#9963) Co-authored-by: Henry Fontanier --- .../pages/api/w/[wId]/spaces/[spaceId]/trackers/[tId]/index.ts | 2 +- front/pages/api/w/[wId]/spaces/[spaceId]/trackers/index.ts | 2 +- front/pages/w/[wId]/assistant/labs/trackers/[tId]/index.tsx | 2 +- front/pages/w/[wId]/assistant/labs/trackers/index.tsx | 2 +- front/pages/w/[wId]/assistant/labs/trackers/new.tsx | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/[tId]/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/[tId]/index.ts index 32711f1484a8..12e0b34e7eff 100644 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/[tId]/index.ts +++ b/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/[tId]/index.ts @@ -35,7 +35,7 @@ async function handler( const flags = await getFeatureFlags(owner); if ( !flags.includes("labs_trackers") || - !auth.isAdmin() || + !auth.isBuilder() || !space.canRead(auth) ) { return apiError(req, res, { diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/index.ts index ec4d930e5726..519c376b4a06 100644 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/index.ts +++ b/front/pages/api/w/[wId]/spaces/[spaceId]/trackers/index.ts @@ -72,7 +72,7 @@ async function handler( const flags = await getFeatureFlags(owner); if ( !flags.includes("labs_trackers") || - !auth.isAdmin() || + !auth.isBuilder() || !space.canRead(auth) ) { return apiError(req, res, { diff --git a/front/pages/w/[wId]/assistant/labs/trackers/[tId]/index.tsx b/front/pages/w/[wId]/assistant/labs/trackers/[tId]/index.tsx index a5c5ff6975e9..9c28bcd64ac5 100644 --- a/front/pages/w/[wId]/assistant/labs/trackers/[tId]/index.tsx +++ b/front/pages/w/[wId]/assistant/labs/trackers/[tId]/index.tsx @@ -52,7 +52,7 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ } const flags = await getFeatureFlags(owner); - if (!flags.includes("labs_trackers") || !auth.isAdmin()) { + if (!flags.includes("labs_trackers") || !auth.isBuilder()) { return { notFound: true, }; diff --git a/front/pages/w/[wId]/assistant/labs/trackers/index.tsx b/front/pages/w/[wId]/assistant/labs/trackers/index.tsx index 40c55fa03e7f..06dd8be915d1 100644 --- a/front/pages/w/[wId]/assistant/labs/trackers/index.tsx +++ b/front/pages/w/[wId]/assistant/labs/trackers/index.tsx @@ -55,7 +55,7 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ } const flags = await getFeatureFlags(owner); - if (!flags.includes("labs_trackers") || !auth.isAdmin()) { + if (!flags.includes("labs_trackers") || !auth.isBuilder()) { return { notFound: true, }; diff --git a/front/pages/w/[wId]/assistant/labs/trackers/new.tsx b/front/pages/w/[wId]/assistant/labs/trackers/new.tsx index 65903c2119b9..6f6bbfb5de24 100644 --- a/front/pages/w/[wId]/assistant/labs/trackers/new.tsx +++ b/front/pages/w/[wId]/assistant/labs/trackers/new.tsx @@ -35,7 +35,7 @@ export const getServerSideProps = withDefaultUserAuthRequirements<{ } const flags = await getFeatureFlags(owner); - if (!flags.includes("labs_trackers") || !auth.isAdmin()) { + if (!flags.includes("labs_trackers") || !auth.isBuilder()) { return { notFound: true, }; From d5654bc13fd409821fbab1a857a94615215d3424 Mon Sep 17 00:00:00 2001 From: Lucas Massemin Date: Tue, 14 Jan 2025 15:41:23 +0100 Subject: [PATCH 7/9] Lucas/fix file upload parents (#9957) * Fixed wrong tableId in parents when updating a table * slightly improved typing, still not good enough * Got rid of unused endpoint and function * Removed types for endpoint * turned name into document_id in upsertArgs * linter spread fix * more linter fixes --- front/lib/api/files/upsert.ts | 49 +++++-- front/lib/swr/data_source_view_tables.ts | 52 +------ .../data_sources/[dsId]/tables/index.ts | 134 ------------------ .../front/api_handlers/public/data_sources.ts | 12 -- 4 files changed, 42 insertions(+), 205 deletions(-) delete mode 100644 front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts diff --git a/front/lib/api/files/upsert.ts b/front/lib/api/files/upsert.ts index 96419ca881a3..3b51839cd053 100644 --- a/front/lib/api/files/upsert.ts +++ b/front/lib/api/files/upsert.ts @@ -20,6 +20,10 @@ import { pipeline } from "stream/promises"; import { runAction } from "@app/lib/actions/server"; import config from "@app/lib/api/config"; +import type { + UpsertDocumentArgs, + UpsertTableArgs, +} from "@app/lib/api/data_sources"; import { upsertDocument, upsertTable } from "@app/lib/api/data_sources"; import type { Authenticator } from "@app/lib/auth"; import type { DustError } from "@app/lib/error"; @@ -208,7 +212,11 @@ const upsertDocumentToDatasource: ProcessingFunction = async ({ }) => { // Use the file id as the document id to make it easy to track the document back to the file. const sourceUrl = file.getPrivateUrl(auth); - const documentId = upsertArgs?.document_id ?? file.sId; // Use the file sId as a fallback to make it easy to track the table back to the file. + let documentId = file.sId; + if (upsertArgs && "document_id" in upsertArgs) { + documentId = upsertArgs.document_id; + } + const { title: upsertTitle, ...restArgs } = upsertArgs ?? {}; const upsertDocumentRes = await upsertDocument({ document_id: documentId, source_url: sourceUrl, @@ -219,10 +227,10 @@ const upsertDocumentToDatasource: ProcessingFunction = async ({ dataSource, auth, mime_type: file.contentType, - title: file.fileName, + title: upsertTitle ?? file.fileName, // Used to override defaults. - ...(upsertArgs ?? {}), + ...restArgs, }); if (upsertDocumentRes.isErr()) { @@ -244,7 +252,12 @@ const upsertTableToDatasource: ProcessingFunction = async ({ dataSource, upsertArgs, }) => { - const tableId = upsertArgs?.tableId ?? file.sId; // Use the file sId as a fallback for the table_id to make it easy to track the table back to the file. + // Use the file sId as the table id to make it easy to track the table back to the file. + let tableId = file.sId; + if (upsertArgs && "tableId" in upsertArgs) { + tableId = upsertArgs.tableId ?? tableId; + } + const { title: upsertTitle, ...restArgs } = upsertArgs ?? {}; const upsertTableRes = await upsertTable({ tableId, name: slugify(file.fileName), @@ -257,11 +270,11 @@ const upsertTableToDatasource: ProcessingFunction = async ({ dataSource, auth, useAppForHeaderDetection: true, - title: file.fileName, + title: upsertTitle ?? file.fileName, mimeType: file.contentType, // Used to override defaults, for manual file uploads where some fields are user-defined. - ...(upsertArgs ?? {}), + ...restArgs, }); if (upsertTableRes.isErr()) { @@ -287,7 +300,17 @@ type ProcessingFunction = ({ file: FileResource; content: string; dataSource: DataSourceResource; - upsertArgs?: Record; + upsertArgs?: + | Pick + | Pick< + UpsertTableArgs, + | "name" + | "title" + | "description" + | "tableId" + | "tags" + | "useAppForHeaderDetection" + >; }) => Promise>; const getProcessingFunction = ({ @@ -403,7 +426,17 @@ export async function processAndUpsertToDataSource( }: { file: FileResource; optionalContent?: string; - upsertArgs?: Record; + upsertArgs?: + | Pick + | Pick< + UpsertTableArgs, + | "name" + | "title" + | "description" + | "tableId" + | "tags" + | "useAppForHeaderDetection" + >; } ): Promise< Result< diff --git a/front/lib/swr/data_source_view_tables.ts b/front/lib/swr/data_source_view_tables.ts index 1f6f857d1c87..5664fdc9ca16 100644 --- a/front/lib/swr/data_source_view_tables.ts +++ b/front/lib/swr/data_source_view_tables.ts @@ -1,9 +1,6 @@ import { useSendNotification } from "@dust-tt/sparkle"; import type { DataSourceViewType, LightWorkspaceType } from "@dust-tt/types"; -import type { - PatchDataSourceTableRequestBody, - PostDataSourceTableRequestBody, -} from "@dust-tt/types"; +import type { PatchDataSourceTableRequestBody } from "@dust-tt/types"; import { useMemo } from "react"; import type { Fetcher } from "swr"; @@ -15,7 +12,6 @@ import { } from "@app/lib/swr/swr"; import type { ListTablesResponseBody } from "@app/pages/api/w/[wId]/spaces/[spaceId]/data_source_views/[dsvId]/tables"; import type { GetDataSourceViewTableResponseBody } from "@app/pages/api/w/[wId]/spaces/[spaceId]/data_source_views/[dsvId]/tables/[tableId]"; -import type { PostTableResponseBody } from "@app/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables"; import type { PatchTableResponseBody } from "@app/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/[tableId]"; export function useDataSourceViewTable({ @@ -133,49 +129,3 @@ export function useUpdateDataSourceViewTable( return doUpdate; } - -export function useCreateDataSourceTable( - owner: LightWorkspaceType, - dataSourceView: DataSourceViewType -) { - const { mutateRegardlessOfQueryParams: mutateContentNodes } = - useDataSourceViewContentNodes({ - owner, - dataSourceView, - disabled: true, // Needed just to mutate - }); - const sendNotification = useSendNotification(); - - const doCreate = async (body: PostDataSourceTableRequestBody) => { - const tableUrl = `/api/w/${owner.sId}/spaces/${dataSourceView.spaceId}/data_sources/${dataSourceView.dataSource.sId}/tables`; - const res = await fetch(tableUrl, { - method: "POST", - body: JSON.stringify(body), - headers: { - "Content-Type": "application/json", - }, - }); - if (!res.ok) { - const errorData = await getErrorFromResponse(res); - sendNotification({ - type: "error", - title: "Error creating table", - description: `Error: ${errorData.message}`, - }); - return null; - } else { - void mutateContentNodes(); - - sendNotification({ - type: "success", - title: "Table created", - description: "Table has been created", - }); - - const response: PostTableResponseBody = await res.json(); - return response.table; - } - }; - - return doCreate; -} diff --git a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts b/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts deleted file mode 100644 index 08bba7a8e192..000000000000 --- a/front/pages/api/w/[wId]/spaces/[spaceId]/data_sources/[dsId]/tables/index.ts +++ /dev/null @@ -1,134 +0,0 @@ -import type { CoreAPITable, WithAPIErrorResponse } from "@dust-tt/types"; -import { PostDataSourceTableRequestBodySchema } from "@dust-tt/types"; -import { isLeft } from "fp-ts/lib/Either"; -import * as reporter from "io-ts-reporters"; -import type { NextApiRequest, NextApiResponse } from "next"; - -import { withSessionAuthenticationForWorkspace } from "@app/lib/api/auth_wrappers"; -import { upsertTable } from "@app/lib/api/data_sources"; -import { withResourceFetchingFromRoute } from "@app/lib/api/resource_wrappers"; -import type { Authenticator } from "@app/lib/auth"; -import { DataSourceResource } from "@app/lib/resources/data_source_resource"; -import type { SpaceResource } from "@app/lib/resources/space_resource"; -import { generateRandomModelSId } from "@app/lib/resources/string_ids"; -import { apiError } from "@app/logger/withlogging"; - -export const config = { - api: { - bodyParser: { - sizeLimit: "50mb", - }, - }, -}; - -export type PostTableResponseBody = { - table?: CoreAPITable; -}; - -async function handler( - req: NextApiRequest, - res: NextApiResponse>, - auth: Authenticator, - { space }: { space: SpaceResource } -): Promise { - const { dsId } = req.query; - if (typeof dsId !== "string") { - return apiError(req, res, { - status_code: 400, - api_error: { - type: "invalid_request_error", - message: "Invalid path parameters.", - }, - }); - } - - const dataSource = await DataSourceResource.fetchByNameOrId(auth, dsId); - if ( - !dataSource || - !dataSource.canRead(auth) || - dataSource.space.sId !== space.sId - ) { - return apiError(req, res, { - status_code: 404, - api_error: { - type: "data_source_not_found", - message: "The data source you requested was not found.", - }, - }); - } - - switch (req.method) { - case "POST": - if (!dataSource.canWrite(auth)) { - return apiError(req, res, { - status_code: 403, - api_error: { - type: "data_source_auth_error", - message: "You are not allowed to update data in this data source.", - }, - }); - } - - if (dataSource.connectorId) { - return apiError(req, res, { - status_code: 403, - api_error: { - type: "data_source_auth_error", - message: "You cannot upsert a document on a managed data source.", - }, - }); - } - - const bodyValidation = PostDataSourceTableRequestBodySchema.decode( - req.body - ); - - if (isLeft(bodyValidation)) { - const pathError = reporter.formatValidationErrors(bodyValidation.left); - return apiError(req, res, { - status_code: 400, - api_error: { - type: "invalid_request_error", - message: `Invalid request body: ${pathError}`, - }, - }); - } - - const tableId = generateRandomModelSId(); - const upsertRes = await upsertTable({ - ...bodyValidation.right, - async: bodyValidation.right.async ?? false, - dataSource, - auth, - tableId, - }); - - if (upsertRes.isErr()) { - return apiError(req, res, { - status_code: 500, - api_error: { - type: "internal_server_error", - message: "There was an error upserting the document.", - }, - }); - } - - res.status(201).json({ - table: upsertRes.value?.table, - }); - return; - default: - return apiError(req, res, { - status_code: 405, - api_error: { - type: "method_not_supported_error", - message: - "The method passed is not supported, GET, POST or DELETE is expected.", - }, - }); - } -} - -export default withSessionAuthenticationForWorkspace( - withResourceFetchingFromRoute(handler, { space: { requireCanWrite: true } }) -); diff --git a/types/src/front/api_handlers/public/data_sources.ts b/types/src/front/api_handlers/public/data_sources.ts index 515265aa072c..7c5c4abf3a69 100644 --- a/types/src/front/api_handlers/public/data_sources.ts +++ b/types/src/front/api_handlers/public/data_sources.ts @@ -81,18 +81,6 @@ export type PatchDataSourceTableRequestBody = t.TypeOf< typeof PatchDataSourceTableRequestBodySchema >; -// Post and Patch require the same request body -export type PostDataSourceTableRequestBody = t.TypeOf< - typeof PatchDataSourceTableRequestBodySchema ->; - -export const PostDataSourceTableRequestBodySchema = t.intersection([ - PatchDataSourceTableRequestBodySchema, - t.type({ - csv: t.string, - }), -]); - export const UpsertTableFromCsvRequestSchema = t.intersection([ t.type({ name: t.string, From 0af896f27d1d2cbab78489a95b8616525d5b5a2c Mon Sep 17 00:00:00 2001 From: Alban Dumouilla Date: Tue, 14 Jan 2025 15:57:30 +0100 Subject: [PATCH 8/9] Change scope for Google Transcripts to drive.meet.readonly (#7230) * Add useCase scope change in oauth for Google Drive * lint * Almost there but API responds not found * Re,ove tooshort * Adding back history record check --- front/lib/api/oauth.ts | 20 +++++++++++++------- front/temporal/labs/activities.ts | 5 +++++ front/temporal/labs/utils/google.ts | 10 ++++++++++ 3 files changed, 28 insertions(+), 7 deletions(-) diff --git a/front/lib/api/oauth.ts b/front/lib/api/oauth.ts index 724b0a90e048..a30f00fc4187 100644 --- a/front/lib/api/oauth.ts +++ b/front/lib/api/oauth.ts @@ -36,7 +36,10 @@ function finalizeUriForProvider(provider: OAuthProvider): string { const PROVIDER_STRATEGIES: Record< OAuthProvider, { - setupUri: (connection: OAuthConnectionType) => string; + setupUri: ( + connection: OAuthConnectionType, + useCase?: OAuthUseCase + ) => string; codeFromQuery: (query: ParsedUrlQuery) => string | null; connectionIdFromQuery: (query: ParsedUrlQuery) => string | null; isExtraConfigValid: (extraConfig: Record) => boolean; @@ -67,11 +70,14 @@ const PROVIDER_STRATEGIES: Record< }, }, google_drive: { - setupUri: (connection) => { - const scopes = [ - "https://www.googleapis.com/auth/drive.metadata.readonly", - "https://www.googleapis.com/auth/drive.readonly", - ]; + setupUri: (connection, useCase?) => { + const scopes = + useCase === "labs_transcripts" + ? ["https://www.googleapis.com/auth/drive.meet.readonly"] + : [ + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", + ]; const qs = querystring.stringify({ response_type: "code", client_id: config.getOAuthGoogleDriveClientId(), @@ -341,7 +347,7 @@ export async function createConnectionAndGetSetupUrl( const connection = cRes.value.connection; - return new Ok(PROVIDER_STRATEGIES[provider].setupUri(connection)); + return new Ok(PROVIDER_STRATEGIES[provider].setupUri(connection, useCase)); } export async function finalizeConnection( diff --git a/front/temporal/labs/activities.ts b/front/temporal/labs/activities.ts index b1dceb707d65..8b325950896c 100644 --- a/front/temporal/labs/activities.ts +++ b/front/temporal/labs/activities.ts @@ -207,6 +207,11 @@ export async function processTranscriptActivity( let transcriptContent = ""; let userParticipated = true; + localLogger.info( + {}, + "[processTranscriptActivity] No history found. Proceeding." + ); + switch (transcriptsConfiguration.provider) { case "google_drive": const googleResult = await retrieveGoogleTranscriptContent( diff --git a/front/temporal/labs/utils/google.ts b/front/temporal/labs/utils/google.ts index a664f6369480..dbc2e4cc4adb 100644 --- a/front/temporal/labs/utils/google.ts +++ b/front/temporal/labs/utils/google.ts @@ -42,6 +42,11 @@ export async function retrieveRecentGoogleTranscripts( fields: "files(id, name)", }); + logger.info( + { files: files.data.files }, + "[retrieveRecentGoogleTranscripts] Retrieved files." + ); + const { files: filesData } = files.data; if (!filesData || filesData.length === 0) { logger.info({}, "[retrieveRecentGoogleTranscripts] No new files found."); @@ -107,6 +112,11 @@ export async function retrieveGoogleTranscriptContent( fields: "name", }); + localLogger.info( + { fileId, metadataRes: metadataRes.data }, + "Retrieved metadata for Google document." + ); + try { const contentRes = await drive.files.export({ fileId: fileId, From 06c378eda7aad344df73c8cfa4ad05f0d119589b Mon Sep 17 00:00:00 2001 From: Philippe Rolet Date: Tue, 14 Jan 2025 16:45:53 +0100 Subject: [PATCH 9/9] Fix: upsert source_url also in upsert_data_source_node (#9970) Description --- Fixes an omission of PR #9883 Risks --- low Deploy --- core --- core/src/stores/postgres.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/core/src/stores/postgres.rs b/core/src/stores/postgres.rs index 7b2e56fa58ad..c1f93d19e433 100644 --- a/core/src/stores/postgres.rs +++ b/core/src/stores/postgres.rs @@ -166,14 +166,14 @@ impl PostgresStore { let stmt = tx .prepare( "INSERT INTO data_sources_nodes \ - (id, data_source, created, node_id, timestamp, title, mime_type, parents, \ + (id, data_source, created, node_id, timestamp, title, mime_type, parents, source_url, \ document, \"table\", folder) \ - VALUES (DEFAULT, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \ + VALUES (DEFAULT, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) \ ON CONFLICT (data_source, node_id) DO UPDATE \ SET timestamp = EXCLUDED.timestamp, title = EXCLUDED.title, \ mime_type = EXCLUDED.mime_type, parents = EXCLUDED.parents, \ document = EXCLUDED.document, \"table\" = EXCLUDED.\"table\", \ - folder = EXCLUDED.folder \ + folder = EXCLUDED.folder, source_url = EXCLUDED.source_url \ RETURNING id", ) .await?; @@ -189,6 +189,7 @@ impl PostgresStore { &upsert_params.title, &upsert_params.mime_type, &upsert_params.parents, + &upsert_params.source_url, &document_row_id, &table_row_id, &folder_row_id,