diff --git a/src/bot.ts b/src/bot.ts index 32b337f..225c736 100644 --- a/src/bot.ts +++ b/src/bot.ts @@ -56,6 +56,7 @@ import { now } from './utils/perf' import { VoiceToVoiceGPTBot } from './modules/voice-to-voice-gpt' // import { VoiceCommand } from './modules/voice-command' import { createInitialSessionData } from './helpers' +import { LlamaAgent } from './modules/subagents' Events.EventEmitter.defaultMaxListeners = 30 @@ -198,11 +199,11 @@ const voiceMemo = new VoiceMemo() const walletConnect = new WalletConnect() const payments = new BotPayments() const schedule = new BotSchedule(bot) -// const openAiBot = new OpenAIBot(payments) -const openAiBot = new OpenAIBot(payments) +const llamaAgent = new LlamaAgent(payments, 'llamaService') +const openAiBot = new OpenAIBot(payments, [llamaAgent]) const dalleBot = new DalleBot(payments) const claudeBot = new ClaudeBot(payments) -const vertexBot = new VertexBot(payments) +const vertexBot = new VertexBot(payments, [llamaAgent]) const oneCountryBot = new OneCountryBot(payments) const translateBot = new TranslateBot() const telegramPayments = new TelegramPayments(payments) @@ -358,6 +359,11 @@ const onMessage = async (ctx: OnMessageContext): Promise => { if (!ctx.message.forward_origin) { await assignFreeCredits(ctx) + if (llamaAgent.isSupportedEvent(ctx)) { + await llamaAgent.onEvent(ctx) + return + } + if (telegramPayments.isSupportedEvent(ctx)) { await telegramPayments.onEvent(ctx) return diff --git a/src/modules/llms/llmsBase.ts b/src/modules/llms/llmsBase.ts index e0bd732..5d5004c 100644 --- a/src/modules/llms/llmsBase.ts +++ b/src/modules/llms/llmsBase.ts @@ -80,6 +80,10 @@ export abstract class LlmsBase implements PayableBot { protected abstract hasPrefix (prompt: string): string + addSubagents (subagents: SubagentBase[]): void { + this.subagents = subagents + } + protected getSession (ctx: OnMessageContext | OnCallBackQueryData): LlmsSessionData { return (ctx.session[this.sessionDataKey as keyof BotSessionData] as LlmsSessionData) } diff --git a/src/modules/llms/openaiBot.ts b/src/modules/llms/openaiBot.ts index 91aa099..0367448 100644 --- a/src/modules/llms/openaiBot.ts +++ b/src/modules/llms/openaiBot.ts @@ -24,12 +24,11 @@ import { chatCompletion, streamChatCompletion } from './api/openai' -import { LlamaAgent } from '../subagents' +import { type SubagentBase } from '../subagents' export class OpenAIBot extends LlmsBase { - constructor (payments: BotPayments) { - super(payments, 'OpenAIBot', 'chatGpt') - this.subagents.push(new LlamaAgent(payments, 'llamaAgent')) + constructor (payments: BotPayments, subagents?: SubagentBase[]) { + super(payments, 'OpenAIBot', 'chatGpt', subagents) if (!config.openAi.dalle.isEnabled) { this.logger.warn('DALLĀ·E 2 Image Bot is disabled in config') } diff --git a/src/modules/llms/vertexBot.ts b/src/modules/llms/vertexBot.ts index e57b931..eacf6d3 100644 --- a/src/modules/llms/vertexBot.ts +++ b/src/modules/llms/vertexBot.ts @@ -15,11 +15,10 @@ import { LlmsModelsEnum } from './utils/types' import { LlmsBase } from './llmsBase' import { vertexCompletion, vertexStreamCompletion } from './api/vertex' -import { LlamaAgent } from '../subagents' +import { type SubagentBase } from '../subagents' export class VertexBot extends LlmsBase { - constructor (payments: BotPayments) { - super(payments, 'VertexBot', 'llms') - this.subagents.push(new LlamaAgent(payments, 'llamaAgent')) + constructor (payments: BotPayments, subagents?: SubagentBase[]) { + super(payments, 'VertexBot', 'llms', subagents) } public getEstimatedPrice (ctx: any): number { diff --git a/src/modules/subagents/llamaSubagent.ts b/src/modules/subagents/llamaSubagent.ts index e71b79d..22031ed 100644 --- a/src/modules/subagents/llamaSubagent.ts +++ b/src/modules/subagents/llamaSubagent.ts @@ -31,8 +31,7 @@ export class LlamaAgent extends SubagentBase { public isSupportedEvent ( ctx: OnMessageContext | OnCallBackQueryData ): boolean { - const hasPdf = this.isSupportedPdfReply(ctx) - return !!hasPdf || this.isSupportedPdfFile(ctx) + return this.isSupportedPdfFile(ctx) } public isSupportedSubagent (ctx: OnMessageContext | OnCallBackQueryData): boolean { @@ -46,6 +45,7 @@ export class LlamaAgent extends SubagentBase { public async run (ctx: OnMessageContext | OnCallBackQueryData, msg: ChatConversation): Promise { const urls = this.isSupportedUrl(ctx) + const fileName = this.isSupportedPdfReply(ctx) const id = msg.id ?? 0 if (ctx.chat?.id) { if (urls && urls?.length > 0) { @@ -64,6 +64,19 @@ export class LlamaAgent extends SubagentBase { collection.agentId = id await this.queryUrlCollection(ctx, urls[0], msg.content as string) } + } else if (fileName !== '') { + const collection = ctx.session.collections.activeCollections.find(c => c.fileName === fileName) + if (!collection) { + if (!ctx.session.collections.isProcessingQueue) { + ctx.session.collections.isProcessingQueue = true + await this.onCheckCollectionStatus(ctx).then(() => { + ctx.session.collections.isProcessingQueue = false + }) + } + } else { + collection.agentId = id + await this.queryUrlCollection(ctx, collection.url, msg.content as string) + } } } return { @@ -327,10 +340,15 @@ export class LlamaAgent extends SubagentBase { ) { await this.onNotBalanceMessage(ctx) } else { + const context = collection.collectionType === 'URL' + ? this.completionContext.replace('%AGENT_OUTPUT%', response.completion) + .replace('%URL%', collection.url) + : appText.llamaPDFContext.replace('%AGENT_OUTPUT%', response.completion) + .replace('%FILE%', collection.fileName ?? '') session.running.push({ id: collection.agentId ?? 0, name: this.name, - completion: this.completionContext.replace('%AGENT_OUTPUT%', response.completion).replace('%URL%', collection.url), + completion: context, status: SubagentStatus.DONE }) } diff --git a/src/utils/text.ts b/src/utils/text.ts index ce3e4ba..45f8634 100644 --- a/src/utils/text.ts +++ b/src/utils/text.ts @@ -13,15 +13,6 @@ Adjust image size or how many images are generated`, 'Your credits: $CREDITS ONE tokens. To recharge, send ONE to `$WALLET_ADDRESS`', maliciousPrompt: 'Your prompt has been flagged for potentially generating illegal or malicious content. If you believe there has been a mistake, please reach out to support.', - llamaURLContext: 'Based on the information gathered from the URL (%URL%): %AGENT_OUTPUT%' - // financialContext: 'Based of the financial data from this company (%COMPANY%): %AGENT_OUTPUT%' - // please provide a detailed explanation: - - // %AGENT_OUTPUT%` - // ''' - // 'This is the web crawler context of the given URL(https://deepmind.google/technologies/gemini/#gemini-1.0): %COMPLETION%' + llamaURLContext: 'Based on the information gathered from the URL (%URL%): %AGENT_OUTPUT%', + llamaPDFContext: 'Based on the information gathered from the PDF file (%FILE%): %AGENT_OUTPUT%' } - -// Edit an Image -// To edit the image using OpenAi API, reply to a message in our chat with a picture and -// write the prompt. Also, you can upload a photo and write the prompt in the caption.