diff --git a/src/config.ts b/src/config.ts index a5406945..fd32fa03 100644 --- a/src/config.ts +++ b/src/config.ts @@ -38,7 +38,8 @@ export default { model: 'chat-bison', minimumBalance: 0, isEnabled: Boolean(parseInt(process.env.LLMS_ENABLED ?? '1')), - prefixes: { bardPrefix: [',', 'b.', 'B.'] } + prefixes: { bardPrefix: [',', 'b.', 'B.'] }, + pdfUrl: process.env.PDF_URL ?? '' }, openAi: { dalle: { diff --git a/src/modules/llms/api/pdfHandler.ts b/src/modules/llms/api/pdfHandler.ts new file mode 100644 index 00000000..7e016ebd --- /dev/null +++ b/src/modules/llms/api/pdfHandler.ts @@ -0,0 +1,40 @@ +import axios, { AxiosError } from 'axios' +import config from '../../../config' +import { type ChatConversation } from '../../types' + +export interface PdfCompletion { + completion: ChatConversation | undefined + prompt: string + price: number +} + +export const handlePdf = async (prompt: string): Promise => { + try { + const data = { question: prompt } + const url = `${config.llms.pdfUrl}/ask` + const response = await axios.post(url, data) + if (response) { + console.log(response.data) + return { + completion: { + content: response.data.response, + role: 'system' + }, + prompt, + price: response.data.cost + } + } + return { + completion: undefined, + prompt, + price: 0 + } + } catch (error: any) { + if (error instanceof AxiosError) { + console.log(error.code) + console.log(error.message) + console.log(error.stack) + } + throw error + } +} diff --git a/src/modules/llms/helpers.ts b/src/modules/llms/helpers.ts index 9ce29d82..ededb702 100644 --- a/src/modules/llms/helpers.ts +++ b/src/modules/llms/helpers.ts @@ -13,7 +13,8 @@ import { type Message } from 'grammy/out/types' export const SupportedCommands = { bardF: { name: 'bard' }, - bard: { name: 'b' } + bard: { name: 'b' }, + pdf: { name: 'pdf' } } export const MAX_TRIES = 3 diff --git a/src/modules/llms/index.ts b/src/modules/llms/index.ts index 9d7f13c4..12474158 100644 --- a/src/modules/llms/index.ts +++ b/src/modules/llms/index.ts @@ -27,6 +27,7 @@ import { vertexCompletion } from './api/vertex' import { type LlmCompletion, llmCompletion } from './api/liteLlm' import { LlmsModelsEnum } from './types' import * as Sentry from '@sentry/node' +import { handlePdf } from './api/pdfHandler' export class LlmsBot implements PayableBot { public readonly module = 'LlmsBot' private readonly logger: Logger @@ -82,6 +83,10 @@ export class LlmsBot implements PayableBot { return } + if (ctx.hasCommand(SupportedCommands.pdf.name)) { + await this.onPdfHandler(ctx) + return + } this.logger.warn('### unsupported command') ctx.session.analytics.sessionState = SessionState.Error await sendMessage(ctx, '### unsupported command').catch(async (e) => { @@ -102,6 +107,40 @@ export class LlmsBot implements PayableBot { ) } + private async onPdfHandler (ctx: OnMessageContext | OnCallBackQueryData): Promise { + if (!ctx.chat?.id) { + throw new Error('internal error') + } + try { + const { chatConversation } = ctx.session.llms + const msgId = ( + await ctx.reply('...', { message_thread_id: ctx.message?.message_thread_id }) + ).message_id + const prompt = ctx.match as string + const response = await handlePdf(prompt) + if (response.completion) { + await ctx.api.editMessageText( + ctx.chat.id, + msgId, + response.completion.content + ).catch(async (e: any) => { await this.onError(ctx, e) }) + if ( + !(await this.payments.pay(ctx as OnMessageContext, response.price)) + ) { + await this.onNotBalanceMessage(ctx) + return + } + chatConversation.push({ + content: prompt, + role: 'user' + }) + chatConversation.push(response.completion) + } + } catch (e) { + await this.onError(ctx, e) + } + } + private async promptGen (data: ChatPayload): Promise<{ price: number, chat: ChatConversation[] }> { const { conversation, ctx, model } = data if (!ctx.chat?.id) {