Skip to content

Commit

Permalink
optimize anthropic tool call + add sonnet tool command + fix new comm…
Browse files Browse the repository at this point in the history
…and issue
  • Loading branch information
fegloff committed Apr 29, 2024
1 parent a54ce98 commit 3dbb85b
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 21 deletions.
69 changes: 66 additions & 3 deletions src/modules/llms/api/athropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import config from '../../../config'
import { type OnCallBackQueryData, type OnMessageContext, type ChatConversation } from '../../types'
import { type LlmCompletion } from './llmApi'
import { LlmsModelsEnum } from '../utils/types'
import { sleep } from '../../sd-images/utils'

const logger = pino({
name: 'anthropic - llmsBot',
Expand All @@ -20,7 +21,6 @@ const API_ENDPOINT = config.llms.apiEndpoint // 'http://127.0.0.1:5000' // confi

export const anthropicCompletion = async (
conversation: ChatConversation[],
hastools: boolean,
model = LlmsModelsEnum.CLAUDE_OPUS
): Promise<LlmCompletion> => {
logger.info(`Handling ${model} completion`)
Expand All @@ -32,14 +32,13 @@ export const anthropicCompletion = async (
messages: conversation.filter(c => c.model === model)
.map(m => { return { content: m.content, role: m.role } })
}
const url = `${API_ENDPOINT}/anthropic/completions${hastools ? '/tools' : ''}`
const url = `${API_ENDPOINT}/anthropic/completions`
const response = await axios.post(url, data)
const respJson = JSON.parse(response.data)
if (response) {
const totalInputTokens = respJson.usage.input_tokens
const totalOutputTokens = respJson.usage.output_tokens
const completion = respJson.content

return {
completion: {
content: completion[0].text,
Expand Down Expand Up @@ -162,3 +161,67 @@ export const anthropicStreamCompletion = async (
outputTokens: parseInt(totalOutputTokens, 10)
}
}

export const toolsChatCompletion = async (
conversation: ChatConversation[],
model = LlmsModelsEnum.CLAUDE_OPUS
): Promise<LlmCompletion> => {
logger.info(`Handling ${model} completion`)
const input = {
model,
stream: false,
system: config.openAi.chatGpt.chatCompletionContext,
max_tokens: +config.openAi.chatGpt.maxTokens,
messages: conversation.filter(c => c.model === model)
.map(m => { return { content: m.content, role: m.role } })
}
const url = `${API_ENDPOINT}/anthropic/completions/tools`
const response = await axios.post(url, input)
const respJson = response.data
if (respJson) {
const toolId = respJson.id
let data
let counter = 1
while (true) {
const resp = await axios.get(`${API_ENDPOINT}/anthropic/completions/tools/${toolId}`)
data = resp.data
if (data.status === 'DONE' || counter > 20) {
break
}
counter++
await sleep(3000)
}
console.log('here', data.status, counter)
if (data.status === 'DONE' && !data.error && counter < 20) {
const totalInputTokens = data.data.usage.input_tokens
const totalOutputTokens = data.data.usage.output_tokens
const completion = data.data.content
return {
completion: {
content: completion[0].text,
role: 'assistant',
model
},
usage: totalOutputTokens + totalInputTokens,
price: 0,
inputTokens: totalInputTokens,
outputTokens: totalOutputTokens
}
} else {
return {
completion: {
content: 'Timeout error',
role: 'assistant',
model
},
usage: 0,
price: 0
}
}
}
return {
completion: undefined,
usage: 0,
price: 0
}
}
32 changes: 27 additions & 5 deletions src/modules/llms/claudeBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,12 @@ import {
} from '../types'
import {
hasClaudeOpusPrefix,
isMentioned,
SupportedCommands
} from './utils/helpers'
import { type LlmCompletion } from './api/llmApi'
import { LlmsModelsEnum } from './utils/types'

import { anthropicCompletion, anthropicStreamCompletion } from './api/athropic'
import { anthropicCompletion, anthropicStreamCompletion, toolsChatCompletion } from './api/athropic'
import { LlmsBase } from './llmsBase'

const models = [
Expand All @@ -33,17 +32,21 @@ export class ClaudeBot extends LlmsBase {
public isSupportedEvent (
ctx: OnMessageContext | OnCallBackQueryData
): boolean {
const hasCommand = ctx.hasCommand([SupportedCommands.claudeOpus,
const hasCommand = ctx.hasCommand([
SupportedCommands.claudeOpus,
SupportedCommands.opus,
SupportedCommands.opusShort,
SupportedCommands.claudeShort,
SupportedCommands.claudeShortTools,
SupportedCommands.sonnetShorTools,
SupportedCommands.sonnetTools,
SupportedCommands.claudeSonnet,
SupportedCommands.sonnet,
SupportedCommands.sonnetShort,
SupportedCommands.claudeHaiku,
SupportedCommands.haikuShort])
if (isMentioned(ctx)) {

if (ctx.hasCommand(SupportedCommands.new) && this.checkModel(ctx)) {
return true
}
const chatPrefix = this.hasPrefix(ctx.message?.text ?? '')
Expand Down Expand Up @@ -79,7 +82,10 @@ export class ClaudeBot extends LlmsBase {
model: LlmsModelsEnum,
hasTools: boolean
): Promise<LlmCompletion> {
return await anthropicCompletion(conversation, hasTools, model)
if (hasTools) {
return await toolsChatCompletion(conversation, model)
}
return await anthropicCompletion(conversation, model)
}

public async onEvent (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
Expand All @@ -90,24 +96,40 @@ export class ClaudeBot extends LlmsBase {
return
}
if (ctx.hasCommand([SupportedCommands.claudeShortTools])) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_OPUS)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, false, true)
return
}
if (ctx.hasCommand([SupportedCommands.sonnetTools, SupportedCommands.sonnetShorTools])) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_SONNET)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET, false, true)
return
}
if (
(ctx.hasCommand(SupportedCommands.new) && this.checkModel(ctx))
) {
await this.onStop(ctx)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, true, false)
return
}
if (ctx.hasCommand([
SupportedCommands.claudeOpus,
SupportedCommands.opus,
SupportedCommands.opusShort,
SupportedCommands.claudeShort]) ||
(hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')
) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_OPUS)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, false, false) // true)
return
}
if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_SONNET)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET, true, false)
return
}
if (ctx.hasCommand([SupportedCommands.claudeHaiku, SupportedCommands.haikuShort])) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_HAIKU)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU, false, false)
}
}
Expand Down
13 changes: 6 additions & 7 deletions src/modules/llms/llmsBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -344,14 +344,12 @@ export abstract class LlmsBase implements PayableBot {
response.completion.content as string
)
conversation.push(response.completion)
// const price = getPromptPrice(completion, data);
// this.logger.info(
// `streamChatCompletion result = tokens: ${
// price.promptTokens + price.completionTokens
// } | ${model} | price: ${price}¢`
// );
const price = getPromptPrice(response, data)
this.logger.info(
`chatCompletion result = tokens: ${price.promptTokens + price.completionTokens} | ${model} | price: ${price.price}¢` // }
)
return {
price: 0,
price: price.price,
chat: conversation
}
}
Expand Down Expand Up @@ -380,6 +378,7 @@ export abstract class LlmsBase implements PayableBot {

async onStop (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
const session = this.getSession(ctx)
console.log('SESSION', session)
for (const c of ctx.session.collections.activeCollections) {
this.logger.info(`Deleting collection ${c.collectionName}`)
await deleteCollection(c.collectionName)
Expand Down
10 changes: 5 additions & 5 deletions src/modules/llms/openaiBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,14 @@ export class OpenAIBot extends LlmsBase {
SupportedCommands.gpt,
SupportedCommands.ask32,
SupportedCommands.ask35,
SupportedCommands.new,
SupportedCommands.last
])

if (ctx.hasCommand(SupportedCommands.new) && this.checkModel(ctx)) {
return true
}
if (isMentioned(ctx)) {
return true
}

const chatPrefix = this.hasPrefix(ctx.message?.text ?? '')
if (chatPrefix !== '') {
return true
Expand Down Expand Up @@ -143,9 +143,9 @@ export class OpenAIBot extends LlmsBase {
}

if (
ctx.hasCommand(SupportedCommands.new) ||
(ctx.hasCommand(SupportedCommands.new) ||
hasNewPrefix(ctx.message?.text ?? '') ||
(ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private')
(ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private') && this.checkModel(ctx))
) {
await this.onStop(ctx)
this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4)
Expand Down
4 changes: 3 additions & 1 deletion src/modules/llms/utils/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ export enum SupportedCommands {
opus = 'opus',
opusShort = 'o',
claudeSonnet = 'claudes',
claudeShortTools = 'ct',
claudeShortTools = 'ctool',
claudeShort = 'c',
sonnet = 'sonnet',
sonnetTools = 'sonnett',
sonnetShorTools = 'stool',
sonnetShort = 's',
claudeHaiku = 'haiku',
haikuShort = 'h',
Expand Down

0 comments on commit 3dbb85b

Please sign in to comment.