Skip to content

Commit

Permalink
add tools logic
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Apr 22, 2024
1 parent 02962a3 commit a54ce98
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 29 deletions.
3 changes: 2 additions & 1 deletion src/modules/llms/api/athropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ const API_ENDPOINT = config.llms.apiEndpoint // 'http://127.0.0.1:5000' // confi

export const anthropicCompletion = async (
conversation: ChatConversation[],
hastools: boolean,
model = LlmsModelsEnum.CLAUDE_OPUS
): Promise<LlmCompletion> => {
logger.info(`Handling ${model} completion`)
Expand All @@ -31,7 +32,7 @@ export const anthropicCompletion = async (
messages: conversation.filter(c => c.model === model)
.map(m => { return { content: m.content, role: m.role } })
}
const url = `${API_ENDPOINT}/anthropic/completions`
const url = `${API_ENDPOINT}/anthropic/completions${hastools ? '/tools' : ''}`
const response = await axios.post(url, data)
const respJson = JSON.parse(response.data)
if (response) {
Expand Down
17 changes: 11 additions & 6 deletions src/modules/llms/claudeBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ export class ClaudeBot extends LlmsBase {
SupportedCommands.opus,
SupportedCommands.opusShort,
SupportedCommands.claudeShort,
SupportedCommands.claudeShortTools,
SupportedCommands.claudeSonnet,
SupportedCommands.sonnet,
SupportedCommands.sonnetShort,
Expand Down Expand Up @@ -75,9 +76,10 @@ export class ClaudeBot extends LlmsBase {

async chatCompletion (
conversation: ChatConversation[],
model: LlmsModelsEnum
model: LlmsModelsEnum,
hasTools: boolean
): Promise<LlmCompletion> {
return await anthropicCompletion(conversation, model)
return await anthropicCompletion(conversation, hasTools, model)
}

public async onEvent (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
Expand All @@ -87,23 +89,26 @@ export class ClaudeBot extends LlmsBase {
this.logger.warn(`### unsupported command ${ctx.message?.text}`)
return
}

if (ctx.hasCommand([SupportedCommands.claudeShortTools])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, false, true)
return
}
if (ctx.hasCommand([
SupportedCommands.claudeOpus,
SupportedCommands.opus,
SupportedCommands.opusShort,
SupportedCommands.claudeShort]) ||
(hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')
) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, true)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, false, false) // true)
return
}
if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET, true)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET, true, false)
return
}
if (ctx.hasCommand([SupportedCommands.claudeHaiku, SupportedCommands.haikuShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU, false)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU, false, false)
}
}
}
23 changes: 12 additions & 11 deletions src/modules/llms/llmsBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ export abstract class LlmsBase implements PayableBot {

protected abstract chatCompletion (
conversation: ChatConversation[],
model: LlmsModelsEnum
model: LlmsModelsEnum,
usesTools: boolean
): Promise<LlmCompletion>

protected abstract hasPrefix (prompt: string): string
Expand Down Expand Up @@ -109,7 +110,7 @@ export abstract class LlmsBase implements PayableBot {
session.requestQueue.push(msg)
if (!session.isProcessingQueue) {
session.isProcessingQueue = true
await this.onChatRequestHandler(ctx, true).then(() => {
await this.onChatRequestHandler(ctx, true, false).then(() => {
session.isProcessingQueue = false
})
}
Expand All @@ -126,7 +127,7 @@ export abstract class LlmsBase implements PayableBot {
return supportedAgents
}

async onChat (ctx: OnMessageContext | OnCallBackQueryData, model: string, stream: boolean): Promise<void> {
async onChat (ctx: OnMessageContext | OnCallBackQueryData, model: string, stream: boolean, usesTools: boolean): Promise<void> {
const session = this.getSession(ctx)
try {
if (this.botSuspended) {
Expand All @@ -146,7 +147,7 @@ export abstract class LlmsBase implements PayableBot {
})
if (!session.isProcessingQueue) {
session.isProcessingQueue = true
await this.onChatRequestHandler(ctx, stream).then(() => {
await this.onChatRequestHandler(ctx, stream, usesTools).then(() => {
session.isProcessingQueue = false
})
}
Expand All @@ -165,7 +166,7 @@ export abstract class LlmsBase implements PayableBot {
}
}

async onChatRequestHandler (ctx: OnMessageContext | OnCallBackQueryData, stream: boolean): Promise<void> {
async onChatRequestHandler (ctx: OnMessageContext | OnCallBackQueryData, stream: boolean, usesTools: boolean): Promise<void> {
const session = this.getSession(ctx)
while (session.requestQueue.length > 0) {
try {
Expand Down Expand Up @@ -223,9 +224,9 @@ export abstract class LlmsBase implements PayableBot {
}
let result: { price: number, chat: ChatConversation[] } = { price: 0, chat: [] }
if (stream) {
result = await this.completionGen(payload)
result = await this.completionGen(payload, usesTools)
} else {
result = await this.promptGen(payload)
result = await this.promptGen(payload, usesTools)
}
session.chatConversation = [...result.chat]
if (
Expand Down Expand Up @@ -259,7 +260,7 @@ export abstract class LlmsBase implements PayableBot {
)
}

private async completionGen (data: ChatPayload, msgId?: number, outputFormat = 'text'): Promise< { price: number, chat: ChatConversation[] }> {
private async completionGen (data: ChatPayload, usesTools: boolean, msgId?: number, outputFormat = 'text'): Promise< { price: number, chat: ChatConversation[] }> {
const { conversation, ctx, model } = data
try {
if (!msgId) {
Expand Down Expand Up @@ -304,7 +305,7 @@ export abstract class LlmsBase implements PayableBot {
}
}
} else {
const response = await this.chatCompletion(conversation, model as LlmsModelsEnum)
const response = await this.chatCompletion(conversation, model as LlmsModelsEnum, usesTools)
conversation.push({
role: 'assistant',
content: response.completion?.content ?? '',
Expand All @@ -326,7 +327,7 @@ export abstract class LlmsBase implements PayableBot {
}
}

private async promptGen (data: ChatPayload): Promise<{ price: number, chat: ChatConversation[] }> {
private async promptGen (data: ChatPayload, usesTools: boolean): Promise<{ price: number, chat: ChatConversation[] }> {
const { conversation, ctx, model } = data
if (!ctx.chat?.id) {
throw new Error('internal error')
Expand All @@ -335,7 +336,7 @@ export abstract class LlmsBase implements PayableBot {
await ctx.reply('...', { message_thread_id: ctx.message?.message_thread_id })
).message_id
ctx.chatAction = 'typing'
const response = await this.chatCompletion(conversation, model as LlmsModelsEnum)
const response = await this.chatCompletion(conversation, model as LlmsModelsEnum, usesTools)
if (response.completion) {
await ctx.api.editMessageText(
ctx.chat.id,
Expand Down
2 changes: 1 addition & 1 deletion src/modules/llms/llmsBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ export class LlmsBot extends LlmsBase {
}
if (ctx.hasCommand(SupportedCommands.j2Ultra)) {
this.updateSessionModel(ctx, LlmsModelsEnum.J2_ULTRA)
await this.onChat(ctx, LlmsModelsEnum.J2_ULTRA, false)
await this.onChat(ctx, LlmsModelsEnum.J2_ULTRA, false, false)
}
}
}
12 changes: 6 additions & 6 deletions src/modules/llms/openaiBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,12 @@ export class OpenAIBot extends LlmsBase {
ctx.chat?.type === 'private')
) {
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true, false)
return
}

if (ctx.hasCommand([SupportedCommands.pdf, SupportedCommands.ctx]) && this.checkModel(ctx)) {
await this.onChat(ctx, ctx.session.currentModel, true)
await this.onChat(ctx, ctx.session.currentModel, true, false)
return
}

Expand All @@ -149,19 +149,19 @@ export class OpenAIBot extends LlmsBase {
) {
await this.onStop(ctx)
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true, false)
return
}

if (ctx.hasCommand(SupportedCommands.ask35)) {
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_35_TURBO_16K)
await this.onChat(ctx, LlmsModelsEnum.GPT_35_TURBO_16K, true)
await this.onChat(ctx, LlmsModelsEnum.GPT_35_TURBO_16K, true, false)
return
}

if (ctx.hasCommand(SupportedCommands.ask32)) {
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4_32K)
await this.onChat(ctx, LlmsModelsEnum.GPT_4_32K, true)
await this.onChat(ctx, LlmsModelsEnum.GPT_4_32K, true, false)
return
}

Expand All @@ -172,7 +172,7 @@ export class OpenAIBot extends LlmsBase {

if (ctx.chat?.type === 'private' || session.isFreePromptChatGroups) {
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true)
await this.onChat(ctx, LlmsModelsEnum.GPT_4, true, false)
return
}

Expand Down
1 change: 1 addition & 0 deletions src/modules/llms/utils/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export enum SupportedCommands {
opus = 'opus',
opusShort = 'o',
claudeSonnet = 'claudes',
claudeShortTools = 'ct',
claudeShort = 'c',
sonnet = 'sonnet',
sonnetShort = 's',
Expand Down
8 changes: 4 additions & 4 deletions src/modules/llms/vertexBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,22 +83,22 @@ export class VertexBot extends LlmsBase {
}
if (ctx.hasCommand([SupportedCommands.bard, SupportedCommands.bardF]) || hasBardPrefix(ctx.message?.text ?? '')) {
this.updateSessionModel(ctx, LlmsModelsEnum.BISON)
await this.onChat(ctx, LlmsModelsEnum.BISON, false)
await this.onChat(ctx, LlmsModelsEnum.BISON, false, false)
return
}
if (ctx.hasCommand([SupportedCommands.gemini, SupportedCommands.gShort]) || (hasGeminiPrefix(ctx.message?.text ?? '') !== '')) {
this.updateSessionModel(ctx, LlmsModelsEnum.GEMINI)
await this.onChat(ctx, LlmsModelsEnum.GEMINI, true)
await this.onChat(ctx, LlmsModelsEnum.GEMINI, true, false)
return
}
if (ctx.hasCommand([SupportedCommands.gemini15, SupportedCommands.g15short])) {
this.updateSessionModel(ctx, LlmsModelsEnum.GEMINI_15)
await this.onChat(ctx, LlmsModelsEnum.GEMINI_15, true)
await this.onChat(ctx, LlmsModelsEnum.GEMINI_15, true, false)
// return
}

if (ctx.hasCommand([SupportedCommands.pdf, SupportedCommands.ctx]) && this.checkModel(ctx)) {
await this.onChat(ctx, ctx.session.currentModel, true)
await this.onChat(ctx, ctx.session.currentModel, true, false)
}
}
}

0 comments on commit a54ce98

Please sign in to comment.