Skip to content

Commit

Permalink
improve agent processing performance
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Apr 15, 2024
1 parent f455545 commit da64b5f
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 46 deletions.
39 changes: 13 additions & 26 deletions src/modules/llms/llmsBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ import {
RequestState,
type BotSessionData,
type LlmsSessionData,
type SubagentResult,
SubagentStatus
type SubagentResult
} from '../types'
import { appText } from '../../utils/text'
import { chatService } from '../../database/services'
Expand Down Expand Up @@ -86,10 +85,19 @@ export abstract class LlmsBase implements PayableBot {
}

protected async runSubagents (ctx: OnMessageContext | OnCallBackQueryData, msg: ChatConversation): Promise<void> {
const result = await Promise.all(this.subagents.map(async (agent: SubagentBase) =>
const session = this.getSession(ctx)
await Promise.all(this.subagents.map(async (agent: SubagentBase) =>
await agent.run(ctx, msg)))
const agentsCompletion = result.filter(agent => agent.status === SubagentStatus.PROCESSING)
await this.onAgentRequestHandler(ctx, msg, agentsCompletion)
const agentsCompletion = SubagentBase.getRunningSubagents(ctx, msg.id ?? 0)
if (agentsCompletion && agentsCompletion.length > 0) {
session.requestQueue.push(msg)
if (!session.isProcessingQueue) {
session.isProcessingQueue = true
await this.onChatRequestHandler(ctx, true).then(() => {
session.isProcessingQueue = false
})
}
}
}

isSupportedSubagent (ctx: OnMessageContext | OnCallBackQueryData): number {
Expand Down Expand Up @@ -141,27 +149,6 @@ export abstract class LlmsBase implements PayableBot {
}
}

async onAgentRequestHandler (ctx: OnMessageContext | OnCallBackQueryData, msg: ChatConversation, subagents: SubagentResult[]): Promise<void> {
const session = this.getSession(ctx)
await Promise.all(subagents.map(async (subagent: SubagentResult) => {
for (const agent of this.subagents) {
if (agent.name === subagent.name) {
await agent.onCheckAgentStatus(ctx)
}
}
}))
const agentsCompletion = SubagentBase.getRunningSubagents(ctx, msg.id ?? 0)
if (agentsCompletion && agentsCompletion.length > 0) {
session.requestQueue.push(msg)
if (!session.isProcessingQueue) {
session.isProcessingQueue = true
await this.onChatRequestHandler(ctx, true).then(() => {
session.isProcessingQueue = false
})
}
}
}

async onChatRequestHandler (ctx: OnMessageContext | OnCallBackQueryData, stream: boolean): Promise<void> {
const session = this.getSession(ctx)
while (session.requestQueue.length > 0) {
Expand Down
24 changes: 4 additions & 20 deletions src/modules/subagents/llamaSubagent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,11 @@ export class LlamaAgent extends SubagentBase {
}

public async run (ctx: OnMessageContext | OnCallBackQueryData, msg: ChatConversation): Promise<SubagentResult> {
const session = this.getSession(ctx)
const urls = this.isSupportedUrl(ctx)
const id = msg.id ?? 0
if (ctx.chat?.id) {
if (urls && urls?.length > 0) {
const collection = ctx.session.collections.activeCollections.find(c => c.url === urls[0])
let collection = ctx.session.collections.activeCollections.find(c => c.url === urls[0])
if (!collection) {
await this.addUrlToCollection(ctx, ctx.chat?.id, urls[0], msg.content as string)
if (!ctx.session.collections.isProcessingQueue) {
Expand All @@ -59,24 +58,12 @@ export class LlamaAgent extends SubagentBase {
ctx.session.collections.isProcessingQueue = false
})
}
} else {
collection = ctx.session.collections.activeCollections.find(c => c.url === urls[0])
}
if (collection) {
collection.agentId = id
await this.queryUrlCollection(ctx, urls[0], msg.content as string)
}
const agent: SubagentResult = {
id,
name: this.name,
completion: '',
status: SubagentStatus.PROCESSING
}
session.subagentsRequestQueue.push(agent)
if (!session.isProcessingQueue) {
session.isProcessingQueue = true
await this.onCheckAgentStatus(ctx).then(() => {
session.isProcessingQueue = false
})
}
return agent
}
}
return {
Expand Down Expand Up @@ -267,9 +254,6 @@ export class LlamaAgent extends SubagentBase {
{ link_preview_options: { is_disabled: true } })
.catch(async (e) => { await this.onError(ctx, e) })
}
await this.queryUrlCollection(ctx, collection.url, collection.prompt ?? '')
// await this.queryUrlCollection(ctx, collection.url ?? '',
// collection.prompt ?? 'summary')
}
} else if (result.price < 0) {
if (collection.msgId) {
Expand Down

0 comments on commit da64b5f

Please sign in to comment.