diff --git a/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx b/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx
index eeff9c8afd7f3..f7267d6d21855 100644
--- a/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx
+++ b/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx
@@ -37,7 +37,9 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({
}) {
const { http } = useKibana().services;
- const modelName = knowledgeBase.status.value?.model_name;
+ const modelId = knowledgeBase.status.value?.endpoint?.service_settings?.model_id;
+ const deploymentState = knowledgeBase.status.value?.model_stats?.deployment_state;
+ const allocationState = knowledgeBase.status.value?.model_stats?.allocation_state;
return (
- {!knowledgeBase.status.value?.deployment_state ? (
+ {!deploymentState ? (
-
{' '}
{modelName},
+ modelId: {modelId},
}}
/>
) : null}
- {knowledgeBase.status.value?.deployment_state &&
- knowledgeBase.status.value.deployment_state !== 'started' ? (
+ {deploymentState && deploymentState !== 'started' ? (
-
{' '}
{modelName},
- deploymentState: (
- {knowledgeBase.status.value?.deployment_state}
- ),
+ modelId: {modelId},
+ deploymentState: {deploymentState},
}}
/>
) : null}
- {knowledgeBase.status.value?.allocation_state &&
- knowledgeBase.status.value.allocation_state !== 'fully_allocated' ? (
+ {allocationState && allocationState !== 'fully_allocated' ? (
-
{' '}
{modelName},
- allocationState: (
- {knowledgeBase.status.value?.allocation_state}
- ),
+ modelId: {modelId},
+ allocationState: {allocationState},
}}
/>
@@ -114,9 +110,9 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({
;
+ status: AbortableAsyncState>;
isInstalling: boolean;
installError?: Error;
install: () => Promise;
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/config.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/config.ts
index 4d0b9fef3f2f4..4df8891bd06fc 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/config.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/config.ts
@@ -9,7 +9,7 @@ import { schema, type TypeOf } from '@kbn/config-schema';
export const config = schema.object({
enabled: schema.boolean({ defaultValue: true }),
- modelId: schema.maybe(schema.string()),
+ modelId: schema.maybe(schema.string()), // TODO: Remove
scope: schema.maybe(schema.oneOf([schema.literal('observability'), schema.literal('search')])),
enableKnowledgeBase: schema.boolean({ defaultValue: true }),
});
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/context.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/context.ts
index fd57968617187..80ddf3cbc0a0d 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/context.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/context.ts
@@ -24,8 +24,8 @@ export function registerContextFunction({
client,
functions,
resources,
- isKnowledgeBaseAvailable,
-}: FunctionRegistrationParameters & { isKnowledgeBaseAvailable: boolean }) {
+ isKnowledgeBaseReady,
+}: FunctionRegistrationParameters & { isKnowledgeBaseReady: boolean }) {
functions.registerFunction(
{
name: CONTEXT_FUNCTION_NAME,
@@ -54,7 +54,7 @@ export function registerContextFunction({
...(dataWithinTokenLimit.length ? { data_on_screen: dataWithinTokenLimit } : {}),
};
- if (!isKnowledgeBaseAvailable) {
+ if (!isKnowledgeBaseReady) {
return { content };
}
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/index.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/index.ts
index 0313d29d3b209..ce4c8e59fbae2 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/index.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/functions/index.ts
@@ -87,7 +87,7 @@ export const registerFunctions: RegistrationCallback = async ({
);
}
- const { ready: isReady } = await client.getKnowledgeBaseStatus();
+ const { ready: isKnowledgeBaseReady } = await client.getKnowledgeBaseStatus();
functions.registerInstruction(({ availableFunctionNames }) => {
const instructions: string[] = [];
@@ -109,7 +109,7 @@ export const registerFunctions: RegistrationCallback = async ({
Data that is compact enough automatically gets included in the response for the "${CONTEXT_FUNCTION_NAME}" function.`);
}
- if (isReady) {
+ if (isKnowledgeBaseReady) {
if (availableFunctionNames.includes(SUMMARIZE_FUNCTION_NAME)) {
instructions.push(`You can use the "${SUMMARIZE_FUNCTION_NAME}" function to store new information you have learned in a knowledge database.
Only use this function when the user asks for it.
@@ -129,11 +129,11 @@ export const registerFunctions: RegistrationCallback = async ({
return instructions.map((instruction) => dedent(instruction));
});
- if (isReady) {
+ if (isKnowledgeBaseReady) {
registerSummarizationFunction(registrationParameters);
}
- registerContextFunction({ ...registrationParameters, isKnowledgeBaseAvailable: isReady });
+ registerContextFunction({ ...registrationParameters, isKnowledgeBaseReady });
registerElasticsearchFunction(registrationParameters);
const request = registrationParameters.resources.request;
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/plugin.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/plugin.ts
index 3bdff9eb17606..98a6232563054 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/plugin.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/plugin.ts
@@ -36,6 +36,7 @@ import { registerFunctions } from './functions';
import { recallRankingEvent } from './analytics/recall_ranking';
import { initLangtrace } from './service/client/instrumentation/init_langtrace';
import { aiAssistantCapabilities } from '../common/capabilities';
+import { registerMigrateKnowledgeBaseEntriesTask } from './service/task_manager_definitions/register_migrate_knowledge_base_entries_task';
export class ObservabilityAIAssistantPlugin
implements
@@ -114,7 +115,8 @@ export class ObservabilityAIAssistantPlugin
}) as ObservabilityAIAssistantRouteHandlerResources['plugins'];
// Using once to make sure the same model ID is used during service init and Knowledge base setup
- const getModelId = once(async () => {
+ const getSearchConnectorModelId = once(async () => {
+ // TODO: Remove this once the modelId is removed from the config
const configModelId = this.config.modelId;
if (configModelId) {
return configModelId;
@@ -156,11 +158,18 @@ export class ObservabilityAIAssistantPlugin
const service = (this.service = new ObservabilityAIAssistantService({
logger: this.logger.get('service'),
core,
- taskManager: plugins.taskManager,
- getModelId,
+ getSearchConnectorModelId,
enableKnowledgeBase: this.config.enableKnowledgeBase,
}));
+ registerMigrateKnowledgeBaseEntriesTask({
+ core,
+ taskManager: plugins.taskManager,
+ logger: this.logger,
+ }).catch((error) => {
+ this.logger.error(`Failed to register migrate knowledge base entries task: ${error}`);
+ });
+
service.register(registerFunctions);
registerServerRoutes({
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/knowledge_base/route.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/knowledge_base/route.ts
index 0f1852c0e396c..50ce85e3578e9 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/knowledge_base/route.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/knowledge_base/route.ts
@@ -5,14 +5,16 @@
* 2.0.
*/
-import type {
- MlDeploymentAllocationState,
- MlDeploymentState,
-} from '@elastic/elasticsearch/lib/api/types';
import pLimit from 'p-limit';
import { notImplemented } from '@hapi/boom';
import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils';
import * as t from 'io-ts';
+import {
+ InferenceInferenceEndpointInfo,
+ MlDeploymentAllocationState,
+ MlDeploymentState,
+} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+import moment from 'moment';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
import { Instruction, KnowledgeBaseEntry, KnowledgeBaseEntryRole } from '../../../common/types';
@@ -21,44 +23,86 @@ const getKnowledgeBaseStatus = createObservabilityAIAssistantServerRoute({
options: {
tags: ['access:ai_assistant'],
},
- handler: async (
- resources
- ): Promise<{
- enabled: boolean;
+ handler: async ({
+ service,
+ request,
+ }): Promise<{
+ errorMessage?: string;
ready: boolean;
- error?: any;
- deployment_state?: MlDeploymentState;
- allocation_state?: MlDeploymentAllocationState;
- model_name?: string;
+ enabled: boolean;
+ endpoint?: Partial;
+ model_stats?: {
+ deployment_state: MlDeploymentState | undefined;
+ allocation_state: MlDeploymentAllocationState | undefined;
+ };
}> => {
- const client = await resources.service.getClient({ request: resources.request });
+ const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
- return await client.getKnowledgeBaseStatus();
+ return client.getKnowledgeBaseStatus();
},
});
const setupKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: t.partial({
+ query: t.partial({
+ model_id: t.string,
+ }),
+ }),
options: {
tags: ['access:ai_assistant'],
timeout: {
- idleSocket: 20 * 60 * 1000, // 20 minutes
+ idleSocket: moment.duration(20, 'minutes').asMilliseconds(),
},
},
- handler: async (resources): Promise<{}> => {
+ handler: async (resources): Promise => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
- await client.setupKnowledgeBase();
+ const { model_id: modelId } = resources.params?.query ?? {};
+
+ return await client.setupKnowledgeBase(modelId);
+ },
+});
+
+const resetKnowledgeBase = createObservabilityAIAssistantServerRoute({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/reset',
+ options: {
+ tags: ['access:ai_assistant'],
+ },
+ handler: async (resources): Promise<{ result: string }> => {
+ const client = await resources.service.getClient({ request: resources.request });
+
+ if (!client) {
+ throw notImplemented();
+ }
+
+ await client.resetKnowledgeBase();
+
+ return { result: 'success' };
+ },
+});
+
+const semanticTextMigrationKnowledgeBase = createObservabilityAIAssistantServerRoute({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/semantic_text_migration',
+ options: {
+ tags: ['access:ai_assistant'],
+ },
+ handler: async (resources): Promise => {
+ const client = await resources.service.getClient({ request: resources.request });
+
+ if (!client) {
+ throw notImplemented();
+ }
- return {};
+ return client.migrateKnowledgeBaseToSemanticText();
},
});
@@ -225,8 +269,8 @@ const importKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({
throw notImplemented();
}
- const status = await client.getKnowledgeBaseStatus();
- if (!status.ready) {
+ const { ready } = await client.getKnowledgeBaseStatus();
+ if (!ready) {
throw new Error('Knowledge base is not ready');
}
@@ -252,7 +296,9 @@ const importKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({
});
export const knowledgeBaseRoutes = {
+ ...semanticTextMigrationKnowledgeBase,
...setupKnowledgeBase,
+ ...resetKnowledgeBase,
...getKnowledgeBaseStatus,
...getKnowledgeBaseEntries,
...saveKnowledgeBaseUserInstruction,
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/register_routes.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/register_routes.ts
index cd1bccdda6734..1a6140968c925 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/register_routes.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/register_routes.ts
@@ -9,13 +9,14 @@ import type { Logger } from '@kbn/logging';
import { registerRoutes } from '@kbn/server-route-repository';
import { getGlobalObservabilityAIAssistantServerRouteRepository } from './get_global_observability_ai_assistant_route_repository';
import type { ObservabilityAIAssistantRouteHandlerResources } from './types';
+import { ObservabilityAIAssistantPluginStartDependencies } from '../types';
export function registerServerRoutes({
core,
logger,
dependencies,
}: {
- core: CoreSetup;
+ core: CoreSetup;
logger: Logger;
dependencies: Omit<
ObservabilityAIAssistantRouteHandlerResources,
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/types.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/types.ts
index c0b37b7142a83..b817328d22c64 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/types.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/routes/types.ts
@@ -68,6 +68,7 @@ export interface ObservabilityAIAssistantRouteHandlerResources {
export interface ObservabilityAIAssistantRouteCreateOptions {
options: {
timeout?: {
+ payload?: number;
idleSocket?: number;
};
tags: Array<'access:ai_assistant'>;
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/client/index.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/client/index.ts
index 048bbd2d362c2..2bd2fdcf22462 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/client/index.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/client/index.ts
@@ -80,6 +80,7 @@ import {
LangtraceServiceProvider,
withLangtraceChatCompleteSpan,
} from './operators/with_langtrace_chat_complete_span';
+import { runSemanticTextKnowledgeBaseMigration } from '../task_manager_definitions/register_migrate_knowledge_base_entries_task';
const MAX_FUNCTION_CALLS = 8;
@@ -721,11 +722,24 @@ export class ObservabilityAIAssistantClient {
};
getKnowledgeBaseStatus = () => {
- return this.dependencies.knowledgeBaseService.status();
+ return this.dependencies.knowledgeBaseService.getStatus();
};
- setupKnowledgeBase = () => {
- return this.dependencies.knowledgeBaseService.setup();
+ setupKnowledgeBase = (modelId: string | undefined) => {
+ const { esClient } = this.dependencies;
+ return this.dependencies.knowledgeBaseService.setup(esClient, modelId);
+ };
+
+ resetKnowledgeBase = () => {
+ const { esClient } = this.dependencies;
+ return this.dependencies.knowledgeBaseService.reset(esClient);
+ };
+
+ migrateKnowledgeBaseToSemanticText = () => {
+ return runSemanticTextKnowledgeBaseMigration({
+ esClient: this.dependencies.esClient,
+ logger: this.dependencies.logger,
+ });
};
addUserInstruction = async ({
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/index.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/index.ts
index eb7eab19340ce..6dcfbf1796501 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/index.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/index.ts
@@ -10,7 +10,6 @@ import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-pl
import type { CoreSetup, CoreStart, KibanaRequest, Logger } from '@kbn/core/server';
import type { SecurityPluginStart } from '@kbn/security-plugin/server';
import { getSpaceIdFromPath } from '@kbn/spaces-plugin/common';
-import type { TaskManagerSetupContract } from '@kbn/task-manager-plugin/server';
import { once } from 'lodash';
import type { AssistantScope } from '@kbn/ai-assistant-common';
import { ObservabilityAIAssistantScreenContextRequest } from '../../common/types';
@@ -43,16 +42,13 @@ export const resourceNames = {
conversations: getResourceName('index-template-conversations'),
kb: getResourceName('index-template-kb'),
},
- pipelines: {
- kb: getResourceName('kb-ingest-pipeline'),
- },
};
export class ObservabilityAIAssistantService {
private readonly core: CoreSetup;
private readonly logger: Logger;
- private readonly getModelId: () => Promise;
- public kbService?: KnowledgeBaseService;
+ private readonly getSearchConnectorModelId: () => Promise;
+ private kbService?: KnowledgeBaseService;
private enableKnowledgeBase: boolean;
private readonly registrations: RegistrationCallback[] = [];
@@ -60,36 +56,28 @@ export class ObservabilityAIAssistantService {
constructor({
logger,
core,
- taskManager,
- getModelId,
+ getSearchConnectorModelId,
enableKnowledgeBase,
}: {
logger: Logger;
core: CoreSetup;
- taskManager: TaskManagerSetupContract;
- getModelId: () => Promise;
+ getSearchConnectorModelId: () => Promise;
enableKnowledgeBase: boolean;
}) {
this.core = core;
this.logger = logger;
- this.getModelId = getModelId;
+ this.getSearchConnectorModelId = getSearchConnectorModelId;
this.enableKnowledgeBase = enableKnowledgeBase;
- this.allowInit();
- }
-
- getKnowledgeBaseStatus() {
- return this.init().then(() => {
- return this.kbService!.status();
- });
+ this.resetInit();
}
init = async () => {};
- private allowInit = () => {
+ private resetInit = () => {
this.init = once(async () => {
return this.doInit().catch((error) => {
- this.allowInit();
+ this.resetInit(); // reset the once flag if an error occurs
throw error;
});
});
@@ -97,18 +85,18 @@ export class ObservabilityAIAssistantService {
private doInit = async () => {
try {
- const [coreStart, pluginsStart] = await this.core.getStartServices();
+ this.logger.debug('Setting up index assets');
+ const [coreStart] = await this.core.getStartServices();
- const elserModelId = await this.getModelId();
+ const { asInternalUser } = coreStart.elasticsearch.client;
- const esClient = coreStart.elasticsearch.client;
- await esClient.asInternalUser.cluster.putComponentTemplate({
+ await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.conversations,
template: conversationComponentTemplate,
});
- await esClient.asInternalUser.indices.putIndexTemplate({
+ await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.conversations,
composed_of: [resourceNames.componentTemplate.conversations],
create: false,
@@ -119,18 +107,13 @@ export class ObservabilityAIAssistantService {
auto_expand_replicas: '0-1',
hidden: true,
},
- mappings: {
- _meta: {
- model: elserModelId,
- },
- },
},
});
const conversationAliasName = resourceNames.aliases.conversations;
await createConcreteWriteIndex({
- esClient: esClient.asInternalUser,
+ esClient: asInternalUser,
logger: this.logger,
totalFieldsLimit: 10000,
indexPatterns: {
@@ -143,34 +126,15 @@ export class ObservabilityAIAssistantService {
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
});
- await esClient.asInternalUser.cluster.putComponentTemplate({
+ // Knowledge base: component template
+ await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.kb,
template: kbComponentTemplate,
});
- await esClient.asInternalUser.ingest.putPipeline({
- id: resourceNames.pipelines.kb,
- processors: [
- {
- inference: {
- model_id: elserModelId,
- target_field: 'ml',
- field_map: {
- text: 'text_field',
- },
- inference_config: {
- // @ts-expect-error
- text_expansion: {
- results_field: 'tokens',
- },
- },
- },
- },
- ],
- });
-
- await esClient.asInternalUser.indices.putIndexTemplate({
+ // Knowledge base: index template
+ await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.kb,
composed_of: [resourceNames.componentTemplate.kb],
create: false,
@@ -186,8 +150,9 @@ export class ObservabilityAIAssistantService {
const kbAliasName = resourceNames.aliases.kb;
+ // Knowledge base: write index
await createConcreteWriteIndex({
- esClient: esClient.asInternalUser,
+ esClient: asInternalUser,
logger: this.logger,
totalFieldsLimit: 10000,
indexPatterns: {
@@ -202,15 +167,16 @@ export class ObservabilityAIAssistantService {
this.kbService = new KnowledgeBaseService({
logger: this.logger.get('kb'),
- esClient,
- taskManagerStart: pluginsStart.taskManager,
- getModelId: this.getModelId,
+ esClient: {
+ asInternalUser,
+ },
+ getSearchConnectorModelId: this.getSearchConnectorModelId,
enabled: this.enableKnowledgeBase,
});
this.logger.info('Successfully set up index assets');
} catch (error) {
- this.logger.error(`Failed to initialize service: ${error.message}`);
+ this.logger.error(`Failed setting up index assets: ${error.message}`);
this.logger.debug(error);
throw error;
}
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/inference_endpoint.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/inference_endpoint.ts
new file mode 100644
index 0000000000000..1d09311dbd6ea
--- /dev/null
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/inference_endpoint.ts
@@ -0,0 +1,104 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { errors } from '@elastic/elasticsearch';
+import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
+import { Logger } from '@kbn/logging';
+import moment from 'moment';
+
+export const AI_ASSISTANT_KB_INFERENCE_ID = 'ai_assistant_kb_inference';
+
+export async function createInferenceEndpoint({
+ esClient,
+ logger,
+ modelId = '.elser_model_2',
+}: {
+ esClient: {
+ asCurrentUser: ElasticsearchClient;
+ };
+ logger: Logger;
+ modelId: string | undefined;
+}) {
+ try {
+ logger.debug(`Creating inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}"`);
+
+ return await esClient.asCurrentUser.inference.put(
+ {
+ inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
+ task_type: 'sparse_embedding',
+ inference_config: {
+ service: 'elasticsearch',
+ service_settings: {
+ model_id: modelId,
+ adaptive_allocations: { enabled: true },
+ num_threads: 1,
+ },
+ task_settings: {},
+ },
+ },
+ {
+ requestTimeout: moment.duration(2, 'minutes').asMilliseconds(),
+ }
+ );
+ } catch (e) {
+ logger.error(
+ `Failed to create inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}": ${e.message}`
+ );
+ throw e;
+ }
+}
+
+export async function deleteInferenceEndpoint({
+ esClient,
+ logger,
+}: {
+ esClient: {
+ asCurrentUser: ElasticsearchClient;
+ };
+ logger: Logger;
+}) {
+ try {
+ const response = await esClient.asCurrentUser.inference.delete({
+ inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
+ force: true,
+ });
+
+ return response;
+ } catch (e) {
+ logger.error(`Failed to delete inference endpoint: ${e.message}`);
+ throw e;
+ }
+}
+
+export async function getInferenceEndpoint({
+ esClient,
+ logger,
+}: {
+ esClient: { asInternalUser: ElasticsearchClient };
+ logger: Logger;
+}) {
+ try {
+ const response = await esClient.asInternalUser.inference.get({
+ inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
+ });
+
+ if (response.endpoints.length > 0) {
+ return response.endpoints[0];
+ }
+ } catch (e) {
+ logger.error(`Failed to fetch inference endpoint: ${e.message}`);
+ throw e;
+ }
+}
+
+export function isInferenceEndpointMissingOrUnavailable(error: Error) {
+ return (
+ error instanceof errors.ResponseError &&
+ (error.body?.error?.type === 'resource_not_found_exception' ||
+ error.body?.error?.type === 'status_exception')
+ );
+}
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/kb_component_template.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/kb_component_template.ts
index b1b2d3293a234..6cf89b0c9e22d 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/kb_component_template.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/kb_component_template.ts
@@ -6,6 +6,7 @@
*/
import { ClusterComponentTemplate } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+import { AI_ASSISTANT_KB_INFERENCE_ID } from './inference_endpoint';
const keyword = {
type: 'keyword' as const,
@@ -58,6 +59,14 @@ export const kbComponentTemplate: ClusterComponentTemplate['component_template']
},
namespace: keyword,
text,
+ semantic_text: {
+ type: 'semantic_text',
+ inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
+ // @ts-expect-error: @elastic/elasticsearch does not have this type yet
+ model_settings: {
+ task_type: 'sparse_embedding',
+ },
+ },
'ml.tokens': {
type: 'rank_features',
},
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts
index 92ce3a4a7e03b..66a49cdc29bee 100644
--- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts
@@ -4,15 +4,12 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
-import { errors } from '@elastic/elasticsearch';
-import { serverUnavailable, gatewayTimeout, badRequest } from '@hapi/boom';
+
+import { serverUnavailable } from '@hapi/boom';
import type { ElasticsearchClient, IUiSettingsClient } from '@kbn/core/server';
import type { Logger } from '@kbn/logging';
-import type { TaskManagerStartContract } from '@kbn/task-manager-plugin/server';
-import pRetry from 'p-retry';
import { orderBy } from 'lodash';
import { encode } from 'gpt-tokenizer';
-import { MlTrainedModelDeploymentNodesStats } from '@elastic/elasticsearch/lib/api/types';
import { resourceNames } from '..';
import {
Instruction,
@@ -22,13 +19,21 @@ import {
} from '../../../common/types';
import { getAccessQuery } from '../util/get_access_query';
import { getCategoryQuery } from '../util/get_category_query';
+import {
+ AI_ASSISTANT_KB_INFERENCE_ID,
+ createInferenceEndpoint,
+ deleteInferenceEndpoint,
+ getInferenceEndpoint,
+ isInferenceEndpointMissingOrUnavailable,
+} from '../inference_endpoint';
import { recallFromConnectors } from './recall_from_connectors';
interface Dependencies {
- esClient: { asInternalUser: ElasticsearchClient };
+ esClient: {
+ asInternalUser: ElasticsearchClient;
+ };
logger: Logger;
- taskManagerStart: TaskManagerStartContract;
- getModelId: () => Promise;
+ getSearchConnectorModelId: () => Promise;
enabled: boolean;
}
@@ -40,20 +45,6 @@ export interface RecalledEntry {
labels?: Record;
}
-function isModelMissingOrUnavailableError(error: Error) {
- return (
- error instanceof errors.ResponseError &&
- (error.body?.error?.type === 'resource_not_found_exception' ||
- error.body?.error?.type === 'status_exception')
- );
-}
-function isCreateModelValidationError(error: Error) {
- return (
- error instanceof errors.ResponseError &&
- error.statusCode === 400 &&
- error.body?.error?.type === 'action_request_validation_exception'
- );
-}
function throwKnowledgeBaseNotReady(body: any) {
throw serverUnavailable(`Knowledge base is not ready yet`, body);
}
@@ -61,202 +52,64 @@ function throwKnowledgeBaseNotReady(body: any) {
export class KnowledgeBaseService {
constructor(private readonly dependencies: Dependencies) {}
- setup = async () => {
- this.dependencies.logger.debug('Setting up knowledge base');
- if (!this.dependencies.enabled) {
- return;
- }
- const elserModelId = await this.dependencies.getModelId();
-
- const retryOptions = { factor: 1, minTimeout: 10000, retries: 12 };
- const getModelInfo = async () => {
- return await this.dependencies.esClient.asInternalUser.ml.getTrainedModels({
- model_id: elserModelId,
- include: 'definition_status',
- });
- };
-
- const isModelInstalledAndReady = async () => {
- try {
- const getResponse = await getModelInfo();
- this.dependencies.logger.debug(
- () => 'Model definition status:\n' + JSON.stringify(getResponse.trained_model_configs[0])
- );
-
- return Boolean(getResponse.trained_model_configs[0]?.fully_defined);
- } catch (error) {
- if (isModelMissingOrUnavailableError(error)) {
- return false;
- }
-
- throw error;
- }
- };
-
- const installModelIfDoesNotExist = async () => {
- const modelInstalledAndReady = await isModelInstalledAndReady();
- if (!modelInstalledAndReady) {
- await installModel();
- }
- };
-
- const installModel = async () => {
- this.dependencies.logger.info(`Installing ${elserModelId} model`);
- try {
- await this.dependencies.esClient.asInternalUser.ml.putTrainedModel(
- {
- model_id: elserModelId,
- input: {
- field_names: ['text_field'],
- },
- wait_for_completion: true,
- },
- { requestTimeout: '20m' }
- );
- } catch (error) {
- if (isCreateModelValidationError(error)) {
- throw badRequest(error);
- } else {
- throw error;
- }
- }
- this.dependencies.logger.info(`Finished installing ${elserModelId} model`);
- };
-
- const pollForModelInstallCompleted = async () => {
- await pRetry(async () => {
- this.dependencies.logger.info(`Polling installation of ${elserModelId} model`);
- const modelInstalledAndReady = await isModelInstalledAndReady();
- if (!modelInstalledAndReady) {
- throwKnowledgeBaseNotReady({
- message: 'Model is not fully defined',
- });
- }
- }, retryOptions);
- };
- await installModelIfDoesNotExist();
- await pollForModelInstallCompleted();
+ async setup(
+ esClient: {
+ asCurrentUser: ElasticsearchClient;
+ asInternalUser: ElasticsearchClient;
+ },
+ modelId: string | undefined
+ ) {
+ await deleteInferenceEndpoint({ esClient, logger: this.dependencies.logger }).catch((e) => {}); // ensure existing inference endpoint is deleted
+ return createInferenceEndpoint({ esClient, logger: this.dependencies.logger, modelId });
+ }
+ async reset(esClient: { asCurrentUser: ElasticsearchClient }) {
try {
- await this.dependencies.esClient.asInternalUser.ml.startTrainedModelDeployment({
- model_id: elserModelId,
- wait_for: 'fully_allocated',
- });
+ await deleteInferenceEndpoint({ esClient, logger: this.dependencies.logger });
} catch (error) {
- this.dependencies.logger.debug(`Error starting ${elserModelId} model deployment`);
- this.dependencies.logger.debug(error);
- if (!isModelMissingOrUnavailableError(error)) {
- throw error;
- }
- }
-
- await pRetry(async () => {
- const response = await this.dependencies.esClient.asInternalUser.ml.getTrainedModelsStats({
- model_id: elserModelId,
- });
-
- const isReady = response.trained_model_stats.some((stats) =>
- (stats.deployment_stats?.nodes as unknown as MlTrainedModelDeploymentNodesStats[]).some(
- (node) => node.routing_state.routing_state === 'started'
- )
- );
-
- if (isReady) {
+ if (isInferenceEndpointMissingOrUnavailable(error)) {
return;
}
-
- this.dependencies.logger.debug(`${elserModelId} model is not allocated yet`);
- this.dependencies.logger.debug(() => JSON.stringify(response));
-
- throw gatewayTimeout();
- }, retryOptions);
-
- this.dependencies.logger.info(`${elserModelId} model is ready`);
- };
-
- status = async () => {
- this.dependencies.logger.debug('Checking model status');
- if (!this.dependencies.enabled) {
- return { ready: false, enabled: false };
- }
- const elserModelId = await this.dependencies.getModelId();
-
- try {
- const modelStats = await this.dependencies.esClient.asInternalUser.ml.getTrainedModelsStats({
- model_id: elserModelId,
- });
- const elserModelStats = modelStats.trained_model_stats[0];
- const deploymentState = elserModelStats.deployment_stats?.state;
- const allocationState = elserModelStats.deployment_stats?.allocation_status.state;
- const ready = deploymentState === 'started' && allocationState === 'fully_allocated';
-
- this.dependencies.logger.debug(
- `Model deployment state: ${deploymentState}, allocation state: ${allocationState}, ready: ${ready}`
- );
-
- return {
- ready,
- deployment_state: deploymentState,
- allocation_state: allocationState,
- model_name: elserModelId,
- enabled: true,
- };
- } catch (error) {
- this.dependencies.logger.debug(
- `Failed to get status for model "${elserModelId}" due to ${error.message}`
- );
-
- return {
- error: error instanceof errors.ResponseError ? error.body.error : String(error),
- ready: false,
- enabled: true,
- model_name: elserModelId,
- };
+ throw error;
}
- };
+ }
private async recallFromKnowledgeBase({
queries,
categories,
namespace,
user,
- modelId,
}: {
queries: Array<{ text: string; boost?: number }>;
categories?: string[];
namespace: string;
user?: { name: string };
- modelId: string;
}): Promise {
- const esQuery = {
- bool: {
- should: queries.map(({ text, boost = 1 }) => ({
- text_expansion: {
- 'ml.tokens': {
- model_text: text,
- model_id: modelId,
- boost,
- },
- },
- })),
- filter: [
- ...getAccessQuery({
- user,
- namespace,
- }),
- ...getCategoryQuery({ categories }),
-
- // exclude user instructions
- { bool: { must_not: { term: { type: KnowledgeBaseType.UserInstruction } } } },
- ],
- },
- };
-
const response = await this.dependencies.esClient.asInternalUser.search<
Pick & { doc_id?: string }
>({
index: [resourceNames.aliases.kb],
- query: esQuery,
+ query: {
+ bool: {
+ should: queries.map(({ text, boost = 1 }) => ({
+ semantic: {
+ field: 'semantic_text',
+ query: text,
+ boost,
+ },
+ })),
+ filter: [
+ ...getAccessQuery({
+ user,
+ namespace,
+ }),
+ ...getCategoryQuery({ categories }),
+
+ // exclude user instructions
+ { bool: { must_not: { term: { type: KnowledgeBaseType.UserInstruction } } } },
+ ],
+ },
+ },
size: 20,
_source: {
includes: ['text', 'is_correction', 'labels', 'doc_id', 'title'],
@@ -295,7 +148,7 @@ export class KnowledgeBaseService {
this.dependencies.logger.debug(
() => `Recalling entries from KB for queries: "${JSON.stringify(queries)}"`
);
- const modelId = await this.dependencies.getModelId();
+ const modelId = await this.dependencies.getSearchConnectorModelId();
const [documentsFromKb, documentsFromConnectors] = await Promise.all([
this.recallFromKnowledgeBase({
@@ -303,9 +156,8 @@ export class KnowledgeBaseService {
queries,
categories,
namespace,
- modelId,
}).catch((error) => {
- if (isModelMissingOrUnavailableError(error)) {
+ if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
}
throw error;
@@ -462,7 +314,7 @@ export class KnowledgeBaseService {
})),
};
} catch (error) {
- if (isModelMissingOrUnavailableError(error)) {
+ if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
}
throw error;
@@ -552,14 +404,14 @@ export class KnowledgeBaseService {
document: {
'@timestamp': new Date().toISOString(),
...doc,
+ semantic_text: doc.text,
user,
namespace,
},
- pipeline: resourceNames.pipelines.kb,
refresh: 'wait_for',
});
} catch (error) {
- if (error instanceof errors.ResponseError && error.body.error.type === 'status_exception') {
+ if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
}
throw error;
@@ -576,10 +428,66 @@ export class KnowledgeBaseService {
return Promise.resolve();
} catch (error) {
- if (isModelMissingOrUnavailableError(error)) {
+ if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
}
throw error;
}
};
+
+ getStatus = async () => {
+ let errorMessage = '';
+ const endpoint = await getInferenceEndpoint({
+ esClient: this.dependencies.esClient,
+ logger: this.dependencies.logger,
+ }).catch((error) => {
+ if (!isInferenceEndpointMissingOrUnavailable(error)) {
+ throw error;
+ }
+ this.dependencies.logger.error(`Failed to get inference endpoint: ${error.message}`);
+ errorMessage = error.message;
+ });
+
+ const enabled = this.dependencies.enabled;
+ if (!endpoint) {
+ return { ready: false, enabled, errorMessage };
+ }
+
+ const modelId = endpoint.service_settings?.model_id;
+ const modelStats = await this.dependencies.esClient.asInternalUser.ml
+ .getTrainedModelsStats({ model_id: modelId })
+ .catch((error) => {
+ this.dependencies.logger.error(`Failed to get model stats: ${error.message}`);
+ errorMessage = error.message;
+ });
+
+ if (!modelStats) {
+ return { ready: false, enabled, errorMessage };
+ }
+
+ const elserModelStats = modelStats.trained_model_stats.find(
+ (stats) => stats.deployment_stats?.deployment_id === AI_ASSISTANT_KB_INFERENCE_ID
+ );
+ const deploymentState = elserModelStats?.deployment_stats?.state;
+ const allocationState = elserModelStats?.deployment_stats?.allocation_status.state;
+ const allocationCount =
+ elserModelStats?.deployment_stats?.allocation_status.allocation_count ?? 0;
+ const ready =
+ deploymentState === 'started' && allocationState === 'fully_allocated' && allocationCount > 0;
+
+ this.dependencies.logger.debug(
+ `Model deployment state: ${deploymentState}, allocation state: ${allocationState}, ready: ${ready}`
+ );
+
+ return {
+ endpoint,
+ ready,
+ enabled,
+ model_stats: {
+ allocation_count: allocationCount,
+ deployment_state: deploymentState,
+ allocation_state: allocationState,
+ },
+ };
+ };
}
diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/task_manager_definitions/register_migrate_knowledge_base_entries_task.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/task_manager_definitions/register_migrate_knowledge_base_entries_task.ts
new file mode 100644
index 0000000000000..3df125ab2ba2d
--- /dev/null
+++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/task_manager_definitions/register_migrate_knowledge_base_entries_task.ts
@@ -0,0 +1,149 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
+import pLimit from 'p-limit';
+import { TaskManagerSetupContract } from '@kbn/task-manager-plugin/server';
+import type { CoreSetup, Logger } from '@kbn/core/server';
+import pRetry from 'p-retry';
+import { KnowledgeBaseEntry } from '../../../common';
+import { resourceNames } from '..';
+import { getInferenceEndpoint } from '../inference_endpoint';
+import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
+
+const TASK_ID = 'obs-ai-assistant:knowledge-base-migration-task-id';
+const TASK_TYPE = 'obs-ai-assistant:knowledge-base-migration';
+
+// This task will re-index all knowledge base entries without `semantic_text` field
+// to ensure the field is populated with the correct embeddings.
+// After the migration we will no longer need to use the `ml.tokens` field.
+export async function registerMigrateKnowledgeBaseEntriesTask({
+ taskManager,
+ logger,
+ core,
+}: {
+ taskManager: TaskManagerSetupContract;
+ logger: Logger;
+ core: CoreSetup;
+}) {
+ logger.debug(`Register task "${TASK_TYPE}"`);
+
+ const [coreStart, pluginsStart] = await core.getStartServices();
+
+ taskManager.registerTaskDefinitions({
+ [TASK_TYPE]: {
+ title: 'Migrate AI Assistant Knowledge Base',
+ description: `Migrates AI Assistant knowledge base entries`,
+ timeout: '1h',
+ maxAttempts: 5,
+ createTaskRunner() {
+ return {
+ async run() {
+ logger.debug(`Run task: "${TASK_TYPE}"`);
+
+ const esClient = { asInternalUser: coreStart.elasticsearch.client.asInternalUser };
+ await runSemanticTextKnowledgeBaseMigration({ esClient, logger });
+ },
+ };
+ },
+ },
+ });
+
+ logger.debug(`Scheduled task: "${TASK_TYPE}"`);
+ await pluginsStart.taskManager.ensureScheduled({
+ id: TASK_ID,
+ taskType: TASK_TYPE,
+ scope: ['aiAssistant'],
+ params: {},
+ state: {},
+ });
+}
+
+export async function runSemanticTextKnowledgeBaseMigration({
+ esClient,
+ logger,
+}: {
+ esClient: { asInternalUser: ElasticsearchClient };
+ logger: Logger;
+}) {
+ logger.debug('Knowledge base migration: Running migration');
+
+ try {
+ const response = await esClient.asInternalUser.search({
+ size: 100,
+ track_total_hits: true,
+ index: [resourceNames.aliases.kb],
+ query: {
+ bool: {
+ must_not: {
+ exists: {
+ field: 'semantic_text',
+ },
+ },
+ },
+ },
+ _source: {
+ excludes: ['ml.tokens'],
+ },
+ });
+
+ if (response.hits.hits.length === 0) {
+ logger.debug('Knowledge base migration: No remaining entries to migrate');
+ return;
+ }
+
+ logger.debug(`Knowledge base migration: Found ${response.hits.hits.length} entries to migrate`);
+
+ await waitForInferenceEndpoint({ esClient, logger });
+
+ // Limit the number of concurrent requests to avoid overloading the cluster
+ const limiter = pLimit(10);
+ const promises = response.hits.hits.map((hit) => {
+ return limiter(() => {
+ if (!hit._source || !hit._id) {
+ return;
+ }
+
+ return esClient.asInternalUser.update({
+ index: resourceNames.aliases.kb,
+ id: hit._id,
+ body: {
+ doc: {
+ ...hit._source,
+ semantic_text: hit._source.text,
+ },
+ },
+ });
+ });
+ });
+
+ await Promise.all(promises);
+ logger.debug(`Knowledge base migration: Migrated ${promises.length} entries`);
+ await runSemanticTextKnowledgeBaseMigration({ esClient, logger });
+ } catch (e) {
+ logger.error('Knowledge base migration: Failed to migrate entries');
+ logger.error(e);
+ }
+}
+
+async function waitForInferenceEndpoint({
+ esClient,
+ logger,
+}: {
+ esClient: { asInternalUser: ElasticsearchClient };
+ logger: Logger;
+}) {
+ return pRetry(
+ async () => {
+ const endpoint = await getInferenceEndpoint({ esClient, logger });
+ if (!endpoint) {
+ throw new Error('Inference endpoint not yet ready');
+ }
+ },
+ { retries: 20, factor: 2 }
+ );
+}
diff --git a/x-pack/plugins/translations/translations/fr-FR.json b/x-pack/plugins/translations/translations/fr-FR.json
index 4b6d44deda847..1fe5a7108be5a 100644
--- a/x-pack/plugins/translations/translations/fr-FR.json
+++ b/x-pack/plugins/translations/translations/fr-FR.json
@@ -10096,16 +10096,10 @@
"xpack.aiAssistant.technicalPreviewBadgeDescription": "GTP4 est nécessaire pour bénéficier d'une meilleure expérience avec les appels de fonctions (par exemple lors de la réalisation d'analyse de la cause d'un problème, de la visualisation de données et autres). GPT3.5 peut fonctionner pour certains des workflows les plus simples comme les explications d'erreurs ou pour bénéficier d'une expérience comparable à ChatGPT au sein de Kibana à partir du moment où les appels de fonctions ne sont pas fréquents.",
"xpack.aiAssistant.userExecutedFunctionEvent": "a exécuté la fonction {functionName}",
"xpack.aiAssistant.userSuggestedFunctionEvent": "a demandé la fonction {functionName}",
- "xpack.aiAssistant.welcomeMessage.div.checkTrainedModelsToLabel": "{retryInstallingLink} ou vérifiez {trainedModelsLink} pour vous assurer que {modelName} est déployé et en cours d'exécution.",
"xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel": "Configuration de la base de connaissances",
"xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel": "Inspecter les problèmes",
- "xpack.aiAssistant.welcomeMessage.issuesDescriptionListTitleLabel": "Problèmes",
"xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel": "La base de connaissances a été installée avec succès",
- "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "Le modèle {modelName} n'est pas déployé",
- "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "L'état d'allocation de {modelName} est {allocationState}",
- "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "L'état de déploiement de {modelName} est {deploymentState}",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "Installer la base de connaissances",
- "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "Modèles entraînés",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "Nous configurons votre base de connaissances. Cette opération peut prendre quelques minutes. Vous pouvez continuer à utiliser l'Assistant lors de ce processus.",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "Impossible de charger les connecteurs",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "Vous n'avez pas les autorisations requises pour charger les connecteurs",
diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json
index ff6abfbfdb28d..cbfb0957bb29f 100644
--- a/x-pack/plugins/translations/translations/ja-JP.json
+++ b/x-pack/plugins/translations/translations/ja-JP.json
@@ -10086,16 +10086,10 @@
"xpack.aiAssistant.technicalPreviewBadgeDescription": "関数呼び出し(根本原因分析やデータの視覚化など)を使用する際に、より一貫性のあるエクスペリエンスを実現するために、GPT4が必要です。GPT3.5は、エラーの説明などのシンプルなワークフローの一部や、頻繁な関数呼び出しの使用が必要とされないKibana内のエクスペリエンスなどのChatGPTで機能します。",
"xpack.aiAssistant.userExecutedFunctionEvent": "関数{functionName}を実行しました",
"xpack.aiAssistant.userSuggestedFunctionEvent": "関数{functionName}を要求しました",
- "xpack.aiAssistant.welcomeMessage.div.checkTrainedModelsToLabel": "{retryInstallingLink}か、{trainedModelsLink}を確認して、{modelName}がデプロイされ、実行中であることを確かめてください。",
"xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel": "ナレッジベースをセットアップ中",
"xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel": "問題を検査",
- "xpack.aiAssistant.welcomeMessage.issuesDescriptionListTitleLabel": "問題",
"xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel": "ナレッジベースは正常にインストールされました",
- "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "モデル\"{modelName}\"はデプロイされていません",
- "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "\"{modelName}\"の割り当て状態は{allocationState}です",
- "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "\"{modelName}\"のデプロイ状態は{deploymentState}です",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "ナレッジベースをインストール",
- "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "学習済みモデル",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "ナレッジベースをセットアップしています。これには数分かかる場合があります。この処理の実行中には、アシスタントを使用し続けることができます。",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "コネクターを読み込めませんでした",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "コネクターを取得するために必要な権限が不足しています",
diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json
index 8ea9fba9ba0c1..78d8d4abb48b8 100644
--- a/x-pack/plugins/translations/translations/zh-CN.json
+++ b/x-pack/plugins/translations/translations/zh-CN.json
@@ -10104,16 +10104,10 @@
"xpack.aiAssistant.technicalPreviewBadgeDescription": "需要 GPT4 以在使用函数调用时(例如,执行根本原因分析、数据可视化等时候)获得更加一致的体验。GPT3.5 可作用于某些更简单的工作流(如解释错误),或在 Kibana 中获得不需要频繁使用函数调用的与 ChatGPT 类似的体验。",
"xpack.aiAssistant.userExecutedFunctionEvent": "已执行函数 {functionName}",
"xpack.aiAssistant.userSuggestedFunctionEvent": "已请求函数 {functionName}",
- "xpack.aiAssistant.welcomeMessage.div.checkTrainedModelsToLabel": "{retryInstallingLink} 或检查 {trainedModelsLink},确保 {modelName} 已部署并正在运行。",
"xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel": "正在设置知识库",
"xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel": "检查问题",
- "xpack.aiAssistant.welcomeMessage.issuesDescriptionListTitleLabel": "问题",
"xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel": "已成功安装知识库",
- "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "未部署模型 {modelName}",
- "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "{modelName} 的分配状态为 {allocationState}",
- "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "{modelName} 的部署状态为 {deploymentState}",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "安装知识库",
- "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "已训练模型",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "我们正在设置您的知识库。这可能需要若干分钟。此进程处于运行状态时,您可以继续使用该助手。",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "无法加载连接器",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "缺少获取连接器所需的权限",
@@ -28345,7 +28339,6 @@
"xpack.maps.source.esSearch.convertToGeoJsonErrorMsg": "无法将搜索响应转换成 geoJson 功能集合,错误:{errorMsg}",
"xpack.maps.source.esSearch.descendingLabel": "降序",
"xpack.maps.source.esSearch.extentFilterLabel": "在可见地图区域中动态筛留数据",
- "xpack.maps.source.esSearch.fieldNotFoundMsg": "在索引模式“{indexPatternName}”中找不到“{fieldName}”。",
"xpack.maps.source.esSearch.geofieldLabel": "地理空间字段",
"xpack.maps.source.esSearch.geoFieldLabel": "地理空间字段",
"xpack.maps.source.esSearch.geoFieldTypeLabel": "地理空间字段类型",
diff --git a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz
new file mode 100644
index 0000000000000..ac64be04d3a23
Binary files /dev/null and b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz differ
diff --git a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json
new file mode 100644
index 0000000000000..68501ded3d887
--- /dev/null
+++ b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json
@@ -0,0 +1,102 @@
+{
+ "type": "index",
+ "value": {
+ "aliases": {
+ ".kibana-observability-ai-assistant-kb": {
+ "is_write_index": true
+ }
+ },
+ "index": ".kibana-observability-ai-assistant-kb-000001",
+ "mappings": {
+ "dynamic": "false",
+ "properties": {
+ "@timestamp": {
+ "type": "date"
+ },
+ "confidence": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "conversation": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "last_updated": {
+ "type": "date"
+ },
+ "title": {
+ "type": "text"
+ }
+ }
+ },
+ "doc_id": {
+ "fielddata": true,
+ "type": "text"
+ },
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "is_correction": {
+ "type": "boolean"
+ },
+ "labels": {
+ "dynamic": "true",
+ "type": "object"
+ },
+ "ml": {
+ "properties": {
+ "tokens": {
+ "type": "rank_features"
+ }
+ }
+ },
+ "namespace": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "public": {
+ "type": "boolean"
+ },
+ "text": {
+ "type": "text"
+ },
+ "title": {
+ "fields": {
+ "keyword": {
+ "ignore_above": 256,
+ "type": "keyword"
+ }
+ },
+ "type": "text"
+ },
+ "type": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "user": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
+ },
+ "settings": {
+ "index": {
+ "auto_expand_replicas": "0-1",
+ "hidden": "true",
+ "number_of_replicas": "0",
+ "number_of_shards": "1"
+ }
+ }
+ }
+}
diff --git a/x-pack/test/observability_ai_assistant_api_integration/configs/index.ts b/x-pack/test/observability_ai_assistant_api_integration/configs/index.ts
index 74f0016f009f7..4a39c31b0a3a7 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/configs/index.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/configs/index.ts
@@ -31,7 +31,7 @@ export const observabilityAIAssistantFtrConfigs = {
__dirname,
'../../../../test/analytics/plugins/analytics_ftr_helpers'
),
- 'xpack.observabilityAIAssistant.modelId': SUPPORTED_TRAINED_MODELS.TINY_ELSER.name,
+ 'xpack.observabilityAIAssistant.modelId': SUPPORTED_TRAINED_MODELS.TINY_ELSER.name, // TODO: Remove
},
},
};
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/complete/functions/summarize.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/complete/functions/summarize.spec.ts
index 34da4270f7721..ae5ebdc19d44f 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/complete/functions/summarize.spec.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/complete/functions/summarize.spec.ts
@@ -15,8 +15,10 @@ import {
deleteActionConnector,
} from '../../../common/action_connectors';
import {
+ TINY_ELSER,
clearKnowledgeBase,
createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
deleteKnowledgeBaseModel,
} from '../../knowledge_base/helpers';
@@ -34,8 +36,13 @@ export default function ApiTest({ getService }: FtrProviderContext) {
before(async () => {
await createKnowledgeBaseModel(ml);
await observabilityAIAssistantAPIClient
- .editor({
+ .admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
})
.expect(200);
@@ -72,6 +79,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
await deleteActionConnector({ supertest, connectorId, log });
await deleteKnowledgeBaseModel(ml);
await clearKnowledgeBase(es);
+ await deleteInferenceEndpoint({ es });
});
it('persists entry in knowledge base', async () => {
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts
index 91286dab811fc..fa1f15ddca4cd 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts
@@ -6,6 +6,7 @@
*/
import { Client } from '@elastic/elasticsearch';
+import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint';
import { MachineLearningProvider } from '../../../api_integration/services/ml';
import { SUPPORTED_TRAINED_MODELS } from '../../../functional/services/ml/api';
@@ -54,3 +55,19 @@ export async function clearConversations(es: Client) {
refresh: true,
});
}
+
+export async function deleteInferenceEndpoint({
+ es,
+ name = AI_ASSISTANT_KB_INFERENCE_ID,
+}: {
+ es: Client;
+ name?: string;
+}) {
+ return es.transport.request({
+ method: 'DELETE',
+ path: `_inference/sparse_embedding/${name}`,
+ querystring: {
+ force: true,
+ },
+ });
+}
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base.spec.ts
index 27659f62ad579..8d8c2e2417686 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base.spec.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base.spec.ts
@@ -8,7 +8,13 @@
import expect from '@kbn/expect';
import { type KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common';
import { FtrProviderContext } from '../../common/ftr_provider_context';
-import { clearKnowledgeBase, createKnowledgeBaseModel, deleteKnowledgeBaseModel } from './helpers';
+import {
+ TINY_ELSER,
+ clearKnowledgeBase,
+ createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
+ deleteKnowledgeBaseModel,
+} from './helpers';
export default function ApiTest({ getService }: FtrProviderContext) {
const ml = getService('ml');
@@ -20,12 +26,20 @@ export default function ApiTest({ getService }: FtrProviderContext) {
await createKnowledgeBaseModel(ml);
await observabilityAIAssistantAPIClient
- .editor({ endpoint: 'POST /internal/observability_ai_assistant/kb/setup' })
+ .admin({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
+ })
.expect(200);
});
after(async () => {
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
await clearKnowledgeBase(es);
});
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_migration.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_migration.spec.ts
new file mode 100644
index 0000000000000..46638d8eebe19
--- /dev/null
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_migration.spec.ts
@@ -0,0 +1,160 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { orderBy } from 'lodash';
+import expect from '@kbn/expect';
+import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint';
+import { SearchResponse } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common';
+import { FtrProviderContext } from '../../common/ftr_provider_context';
+import {
+ deleteKnowledgeBaseModel,
+ createKnowledgeBaseModel,
+ clearKnowledgeBase,
+ deleteInferenceEndpoint,
+ TINY_ELSER,
+} from './helpers';
+
+export default function ApiTest({ getService }: FtrProviderContext) {
+ const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');
+ const esArchiver = getService('esArchiver');
+ const es = getService('es');
+ const ml = getService('ml');
+
+ const archive =
+ 'x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15';
+
+ describe('When there are knowledge base entries (from 8.15 or earlier) that does not contain semantic_text embeddings', () => {
+ before(async () => {
+ await clearKnowledgeBase(es);
+ await esArchiver.load(archive);
+ await createKnowledgeBaseModel(ml);
+ await observabilityAIAssistantAPIClient
+ .admin({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
+ })
+ .expect(200);
+ });
+
+ after(async () => {
+ await clearKnowledgeBase(es);
+ await esArchiver.unload(archive);
+ await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
+ });
+
+ async function getKnowledgeBaseEntries() {
+ const res = (await es.search({
+ index: '.kibana-observability-ai-assistant-kb*',
+ body: {
+ query: {
+ match_all: {},
+ },
+ },
+ })) as SearchResponse<
+ KnowledgeBaseEntry & {
+ semantic_text: {
+ text: string;
+ inference: { inference_id: string; chunks: Array<{ text: string; embeddings: any }> };
+ };
+ }
+ >;
+
+ return res.hits.hits;
+ }
+
+ describe('before migrating', () => {
+ it('the docs do not have semantic_text embeddings', async () => {
+ const hits = await getKnowledgeBaseEntries();
+ const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text);
+ expect(hasSemanticTextEmbeddings).to.be(false);
+ });
+ });
+
+ describe('after migrating', () => {
+ before(async () => {
+ await observabilityAIAssistantAPIClient
+ .editor({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/semantic_text_migration',
+ })
+ .expect(200);
+ });
+
+ it('the docs have semantic_text embeddings', async () => {
+ const hits = await getKnowledgeBaseEntries();
+ const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text);
+ expect(hasSemanticTextEmbeddings).to.be(true);
+
+ expect(
+ orderBy(hits, '_source.title').map(({ _source }) => {
+ const { text, inference } = _source?.semantic_text!;
+
+ return {
+ text,
+ inferenceId: inference.inference_id,
+ chunkCount: inference.chunks.length,
+ };
+ })
+ ).to.eql([
+ {
+ text: 'To infinity and beyond!',
+ inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
+ chunkCount: 1,
+ },
+ {
+ text: "The user's favourite color is blue.",
+ inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
+ chunkCount: 1,
+ },
+ ]);
+ });
+
+ it('returns entries correctly via API', async () => {
+ await observabilityAIAssistantAPIClient
+ .editor({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/semantic_text_migration',
+ })
+ .expect(200);
+
+ const res = await observabilityAIAssistantAPIClient
+ .editor({
+ endpoint: 'GET /internal/observability_ai_assistant/kb/entries',
+ params: {
+ query: {
+ query: '',
+ sortBy: 'title',
+ sortDirection: 'asc',
+ },
+ },
+ })
+ .expect(200);
+
+ expect(
+ res.body.entries.map(({ title, text, role, type }) => ({ title, text, role, type }))
+ ).to.eql([
+ {
+ role: 'user_entry',
+ title: 'Toy Story quote',
+ type: 'contextual',
+ text: 'To infinity and beyond!',
+ },
+ {
+ role: 'assistant_summarization',
+ title: "User's favourite color",
+ type: 'contextual',
+ text: "The user's favourite color is blue.",
+ },
+ ]);
+ });
+ });
+ });
+}
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_setup.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_setup.spec.ts
index 77f010d851f3c..b8cacaaa58351 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_setup.spec.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_setup.spec.ts
@@ -7,30 +7,58 @@
import expect from '@kbn/expect';
import { FtrProviderContext } from '../../common/ftr_provider_context';
-import { deleteKnowledgeBaseModel, createKnowledgeBaseModel } from './helpers';
+import {
+ deleteKnowledgeBaseModel,
+ createKnowledgeBaseModel,
+ TINY_ELSER,
+ deleteInferenceEndpoint,
+} from './helpers';
export default function ApiTest({ getService }: FtrProviderContext) {
const ml = getService('ml');
+ const es = getService('es');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');
describe('/internal/observability_ai_assistant/kb/setup', () => {
- it('returns empty object when successful', async () => {
+ it('returns model info when successful', async () => {
await createKnowledgeBaseModel(ml);
const res = await observabilityAIAssistantAPIClient
- .editor({
+ .admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
})
.expect(200);
- expect(res.body).to.eql({});
+
+ expect(res.body.service_settings.model_id).to.be('pt_tiny_elser');
+ expect(res.body.inference_id).to.be('ai_assistant_kb_inference');
+
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
});
- it('returns bad request if model cannot be installed', async () => {
- await observabilityAIAssistantAPIClient
- .editor({
+ it('returns error message if model is not deployed', async () => {
+ const res = await observabilityAIAssistantAPIClient
+ .admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
})
- .expect(400);
+ .expect(500);
+
+ // @ts-expect-error
+ expect(res.body.message).to.include.string(
+ 'No known trained model with model_id [pt_tiny_elser]'
+ );
+
+ // @ts-expect-error
+ expect(res.body.statusCode).to.be(500);
});
});
}
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_status.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_status.spec.ts
index 6561c416f02cf..76ad2d06e344a 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_status.spec.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_status.spec.ts
@@ -7,38 +7,66 @@
import expect from '@kbn/expect';
import { FtrProviderContext } from '../../common/ftr_provider_context';
-import { deleteKnowledgeBaseModel, createKnowledgeBaseModel, TINY_ELSER } from './helpers';
+import {
+ deleteKnowledgeBaseModel,
+ createKnowledgeBaseModel,
+ TINY_ELSER,
+ deleteInferenceEndpoint,
+} from './helpers';
export default function ApiTest({ getService }: FtrProviderContext) {
const ml = getService('ml');
+ const es = getService('es');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');
describe('/internal/observability_ai_assistant/kb/status', () => {
- before(async () => {
+ beforeEach(async () => {
await createKnowledgeBaseModel(ml);
await observabilityAIAssistantAPIClient
- .editor({
+ .admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
})
.expect(200);
});
- after(async () => {
- await deleteKnowledgeBaseModel(ml);
+ afterEach(async () => {
+ await deleteKnowledgeBaseModel(ml).catch((e) => {});
+ await deleteInferenceEndpoint({ es }).catch((e) => {});
});
it('returns correct status after knowledge base is setup', async () => {
+ const res = await observabilityAIAssistantAPIClient
+ .editor({ endpoint: 'GET /internal/observability_ai_assistant/kb/status' })
+ .expect(200);
+
+ expect(res.body.ready).to.be(true);
+ expect(res.body.enabled).to.be(true);
+ expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER.id);
+ });
+
+ it('returns correct status after model is deleted', async () => {
+ await deleteKnowledgeBaseModel(ml);
+
const res = await observabilityAIAssistantAPIClient
.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
})
.expect(200);
- expect(res.body.deployment_state).to.eql('started');
- expect(res.body.model_name).to.eql(TINY_ELSER.id);
+
+ expect(res.body.ready).to.be(false);
+ expect(res.body.enabled).to.be(true);
+ expect(res.body.errorMessage).to.include.string(
+ 'No known trained model with model_id [pt_tiny_elser]'
+ );
});
- it('returns correct status after elser is stopped', async () => {
- await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true);
+ it('returns correct status after inference endpoint is deleted', async () => {
+ await deleteInferenceEndpoint({ es });
const res = await observabilityAIAssistantAPIClient
.editor({
@@ -46,11 +74,11 @@ export default function ApiTest({ getService }: FtrProviderContext) {
})
.expect(200);
- expect(res.body).to.eql({
- ready: false,
- model_name: TINY_ELSER.id,
- enabled: true,
- });
+ expect(res.body.ready).to.be(false);
+ expect(res.body.enabled).to.be(true);
+ expect(res.body.errorMessage).to.include.string(
+ 'Inference endpoint not found [ai_assistant_kb_inference]'
+ );
});
});
}
diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
index 58c6b4d38be4f..6ea2b279fd386 100644
--- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
+++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
@@ -12,9 +12,11 @@ import { CONTEXT_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/se
import { Instruction } from '@kbn/observability-ai-assistant-plugin/common/types';
import { FtrProviderContext } from '../../common/ftr_provider_context';
import {
+ TINY_ELSER,
clearConversations,
clearKnowledgeBase,
createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
deleteKnowledgeBaseModel,
} from './helpers';
import { getConversationCreatedEvent } from '../conversations/helpers';
@@ -34,14 +36,21 @@ export default function ApiTest({ getService }: FtrProviderContext) {
describe('Knowledge base user instructions', () => {
before(async () => {
await createKnowledgeBaseModel(ml);
-
await observabilityAIAssistantAPIClient
- .editor({ endpoint: 'POST /internal/observability_ai_assistant/kb/setup' })
+ .admin({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
+ })
.expect(200);
});
after(async () => {
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
await clearKnowledgeBase(es);
await clearConversations(es);
});
diff --git a/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts
index 7a5a51ae58b6a..300fec6aa45aa 100644
--- a/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts
+++ b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts
@@ -8,8 +8,10 @@
import expect from '@kbn/expect';
import { subj as testSubjSelector } from '@kbn/test-subj-selector';
import {
+ TINY_ELSER,
clearKnowledgeBase,
createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
deleteKnowledgeBaseModel,
} from '../../../observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
import { ObservabilityAIAssistantApiClient } from '../../../observability_ai_assistant_api_integration/common/observability_ai_assistant_api_client';
@@ -56,7 +58,14 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
await Promise.all([
// setup the knowledge base
observabilityAIAssistantAPIClient
- .editor({ endpoint: 'POST /internal/observability_ai_assistant/kb/setup' })
+ .admin({
+ endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
+ })
.expect(200),
// login as editor
@@ -65,7 +74,12 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte
});
after(async () => {
- await Promise.all([deleteKnowledgeBaseModel(ml), clearKnowledgeBase(es), ui.auth.logout()]);
+ await Promise.all([
+ deleteKnowledgeBaseModel(ml),
+ deleteInferenceEndpoint({ es }),
+ clearKnowledgeBase(es),
+ ui.auth.logout(),
+ ]);
});
describe('when the LLM calls the "summarize" function for two different users', () => {
diff --git a/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts b/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
index c8056c2ee205e..88ef256b353e6 100644
--- a/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
+++ b/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
@@ -151,6 +151,7 @@ export default function ({ getService }: FtrProviderContext) {
'fleet:update_agent_tags:retry',
'fleet:upgrade_action:retry',
'logs-data-telemetry',
+ 'obs-ai-assistant:knowledge-base-migration',
'osquery:telemetry-configs',
'osquery:telemetry-packs',
'osquery:telemetry-saved-queries',
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts
index 36a60f29cb6ce..14078f228c7c8 100644
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts
+++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts
@@ -29,6 +29,6 @@ export default createTestConfig({
// https://github.com/elastic/project-controller/blob/main/internal/project/observability/config/elasticsearch.yml
esServerArgs: ['xpack.ml.dfa.enabled=false'],
kbnServerArgs: [
- `--xpack.observabilityAIAssistant.modelId=${SUPPORTED_TRAINED_MODELS.TINY_ELSER.name}`,
+ `--xpack.observabilityAIAssistant.modelId=${SUPPORTED_TRAINED_MODELS.TINY_ELSER.name}`, // TODO: Remove
],
});
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts
deleted file mode 100644
index 6affeeb861295..0000000000000
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import { MachineLearningProvider } from '@kbn/test-suites-xpack/api_integration/services/ml';
-import { SUPPORTED_TRAINED_MODELS } from '@kbn/test-suites-xpack/functional/services/ml/api';
-
-export const TINY_ELSER = {
- ...SUPPORTED_TRAINED_MODELS.TINY_ELSER,
- id: SUPPORTED_TRAINED_MODELS.TINY_ELSER.name,
-};
-
-export async function deleteKnowledgeBaseModel(ml: ReturnType) {
- await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true);
- await ml.api.deleteTrainedModelES(TINY_ELSER.id);
- await ml.testResources.cleanMLSavedObjects();
-}
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts
index ce46939c365be..8f3569c9fc959 100644
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts
+++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts
@@ -9,8 +9,9 @@ import expect from '@kbn/expect';
import {
clearKnowledgeBase,
createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
+ deleteKnowledgeBaseModel,
} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
-import { deleteKnowledgeBaseModel } from './helpers';
import { FtrProviderContext } from '../../common/ftr_provider_context';
import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services';
@@ -24,7 +25,6 @@ export default function ApiTest({ getService }: FtrProviderContext) {
// TODO: https://github.com/elastic/kibana/issues/192886
describe.skip('Knowledge base', function () {
- // TODO: https://github.com/elastic/kibana/issues/192757
this.tags(['skipMKI']);
let roleAuthc: RoleCredentials;
let internalReqHeader: InternalRequestHeader;
@@ -36,6 +36,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
after(async () => {
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc);
});
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts
index ad5dd32f5c0b4..a792b01b0e2cb 100644
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts
+++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts
@@ -6,49 +6,80 @@
*/
import expect from '@kbn/expect';
-import { createKnowledgeBaseModel } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
-import { deleteKnowledgeBaseModel } from './helpers';
+import {
+ createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
+ TINY_ELSER,
+ deleteKnowledgeBaseModel,
+} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
+
import { FtrProviderContext } from '../../common/ftr_provider_context';
import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services';
export default function ApiTest({ getService }: FtrProviderContext) {
const ml = getService('ml');
+ const es = getService('es');
const svlUserManager = getService('svlUserManager');
const svlCommonApi = getService('svlCommonApi');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');
describe('/internal/observability_ai_assistant/kb/setup', function () {
- // TODO: https://github.com/elastic/kibana/issues/192757
this.tags(['skipMKI']);
let roleAuthc: RoleCredentials;
let internalReqHeader: InternalRequestHeader;
+
before(async () => {
- roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor');
+ await deleteKnowledgeBaseModel(ml).catch(() => {});
+ await deleteInferenceEndpoint({ es }).catch(() => {});
+
+ roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin');
internalReqHeader = svlCommonApi.getInternalRequestHeader();
});
+
after(async () => {
await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc);
});
+
it('returns empty object when successful', async () => {
await createKnowledgeBaseModel(ml);
const res = await observabilityAIAssistantAPIClient
.slsUser({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
roleAuthc,
internalReqHeader,
})
.expect(200);
- expect(res.body).to.eql({});
+
+ expect(res.body.service_settings.model_id).to.be('pt_tiny_elser');
+ expect(res.body.inference_id).to.be('ai_assistant_kb_inference');
+
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
});
+
it('returns bad request if model cannot be installed', async () => {
- await observabilityAIAssistantAPIClient
+ const res = await observabilityAIAssistantAPIClient
.slsUser({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
roleAuthc,
internalReqHeader,
})
- .expect(400);
+ .expect(500);
+
+ // @ts-expect-error
+ expect(res.body.message).to.include.string(
+ 'No known trained model with model_id [pt_tiny_elser]'
+ );
});
});
}
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts
index 60e7c743bbbbb..a7a7b55cf4e02 100644
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts
+++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts
@@ -7,31 +7,39 @@
import expect from '@kbn/expect';
import {
+ deleteInferenceEndpoint,
createKnowledgeBaseModel,
TINY_ELSER,
+ deleteKnowledgeBaseModel,
} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
-import { deleteKnowledgeBaseModel } from './helpers';
+import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint';
import { FtrProviderContext } from '../../common/ftr_provider_context';
import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services';
export default function ApiTest({ getService }: FtrProviderContext) {
const ml = getService('ml');
+ const es = getService('es');
const svlUserManager = getService('svlUserManager');
const svlCommonApi = getService('svlCommonApi');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');
describe('/internal/observability_ai_assistant/kb/status', function () {
- // TODO: https://github.com/elastic/kibana/issues/192757
this.tags(['skipMKI']);
let roleAuthc: RoleCredentials;
let internalReqHeader: InternalRequestHeader;
+
before(async () => {
- roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor');
+ roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin');
internalReqHeader = svlCommonApi.getInternalRequestHeader();
await createKnowledgeBaseModel(ml);
await observabilityAIAssistantAPIClient
.slsUser({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
+ params: {
+ query: {
+ model_id: TINY_ELSER.id,
+ },
+ },
roleAuthc,
internalReqHeader,
})
@@ -40,6 +48,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
after(async () => {
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es, name: AI_ASSISTANT_KB_INFERENCE_ID }).catch((err) => {});
await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc);
});
@@ -51,12 +60,14 @@ export default function ApiTest({ getService }: FtrProviderContext) {
internalReqHeader,
})
.expect(200);
- expect(res.body.deployment_state).to.eql('started');
- expect(res.body.model_name).to.eql(TINY_ELSER.id);
+
+ expect(res.body.enabled).to.be(true);
+ expect(res.body.ready).to.be(true);
+ expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER.id);
});
it('returns correct status after elser is stopped', async () => {
- await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true);
+ await deleteInferenceEndpoint({ es, name: AI_ASSISTANT_KB_INFERENCE_ID });
const res = await observabilityAIAssistantAPIClient
.slsUser({
@@ -66,11 +77,8 @@ export default function ApiTest({ getService }: FtrProviderContext) {
})
.expect(200);
- expect(res.body).to.eql({
- ready: false,
- model_name: TINY_ELSER.id,
- enabled: true,
- });
+ expect(res.body.enabled).to.be(true);
+ expect(res.body.ready).to.be(false);
});
});
}
diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
index 4181b6a14ffde..fc9864ac8768c 100644
--- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
+++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts
@@ -13,13 +13,14 @@ import {
clearConversations,
clearKnowledgeBase,
createKnowledgeBaseModel,
+ deleteInferenceEndpoint,
+ deleteKnowledgeBaseModel,
} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers';
import { getConversationCreatedEvent } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/conversations/helpers';
import {
LlmProxy,
createLlmProxy,
} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy';
-import { deleteKnowledgeBaseModel } from './helpers';
import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors';
import { FtrProviderContext } from '../../common/ftr_provider_context';
import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services';
@@ -59,6 +60,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
after(async () => {
await deleteKnowledgeBaseModel(ml);
+ await deleteInferenceEndpoint({ es });
await clearKnowledgeBase(es);
await clearConversations(es);
await svlUserManager.invalidateM2mApiKeyWithRoleScope(johnRoleAuthc);