Skip to content

Commit

Permalink
add llmsBase class, and vertexBot, claudeBot, llmBot derived classes
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Apr 5, 2024
1 parent 28304b7 commit cd4f284
Show file tree
Hide file tree
Showing 8 changed files with 734 additions and 45 deletions.
17 changes: 17 additions & 0 deletions src/bot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ import { VoiceMemo } from './modules/voice-memo'
// import { QRCodeBot } from './modules/qrcode/QRCodeBot'
// import { SDImagesBot } from './modules/sd-images'
import { OpenAIBot } from './modules/open-ai'
import { ClaudeBot } from './modules/llms/claudeBot'
import { VertexBot } from './modules/llms/vertexBot'
import { OneCountryBot } from './modules/1country'
import { WalletConnect } from './modules/walletconnect'
import { BotPayments } from './modules/payment'
Expand Down Expand Up @@ -248,6 +250,8 @@ const walletConnect = new WalletConnect()
const payments = new BotPayments()
const schedule = new BotSchedule(bot)
const openAiBot = new OpenAIBot(payments)
const claudeBot = new ClaudeBot(payments)
const vertexBot = new VertexBot(payments)
const oneCountryBot = new OneCountryBot(payments)
const translateBot = new TranslateBot()
const telegramPayments = new TelegramPayments(payments)
Expand Down Expand Up @@ -372,6 +376,8 @@ const PayableBots: Record<string, PayableBotConfig> = {
textToSpeech: { bot: textToSpeechBot },
voiceToVoiceGPTBot: { bot: voiceToVoiceGPTBot },
voiceToText: { bot: voiceToTextBot },
claudeBot: { bot: claudeBot },
vertexBot: { bot: vertexBot },
openAiBot: {
enabled: (ctx: OnMessageContext) => ctx.session.openAi.imageGen.isEnabled,
bot: openAiBot
Expand Down Expand Up @@ -467,6 +473,16 @@ const onCallback = async (ctx: OnCallBackQueryData): Promise<void> => {
// return
// }

if (vertexBot.isSupportedEvent(ctx)) {
await vertexBot.onEvent(ctx)
return
}

if (claudeBot.isSupportedEvent(ctx)) {
await claudeBot.onEvent(ctx)
return
}

if (openAiBot.isSupportedEvent(ctx)) {
await openAiBot.onEvent(ctx, (e) => {
logger.error(e)
Expand Down Expand Up @@ -575,6 +591,7 @@ bot.command('love', async (ctx) => {
bot.command('stop', async (ctx) => {
logger.info('/stop command')
await openAiBot.onStop(ctx as OnMessageContext)
await claudeBot.onStop(ctx as OnMessageContext)
ctx.session.translate.enable = false
ctx.session.translate.languages = []
ctx.session.oneCountry.lastDomain = ''
Expand Down
96 changes: 96 additions & 0 deletions src/modules/llms/claudeBot.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import { type BotPayments } from '../payment'
import {
type OnMessageContext,
type OnCallBackQueryData,
type ChatConversation
} from '../types'
import {
hasClaudeOpusPrefix,
isMentioned,
SupportedCommands
} from './helpers'
import { type LlmCompletion } from './api/llmApi'
import { LlmsModelsEnum } from './types'

import { anthropicCompletion, anthropicStreamCompletion } from './api/athropic'
import { LlmsBase } from './llmsBase'
export class ClaudeBot extends LlmsBase {
constructor (payments: BotPayments) {
super(payments, 'ClaudeBot')
}

public getEstimatedPrice (ctx: any): number {
return 0
}

public isSupportedEvent (
ctx: OnMessageContext | OnCallBackQueryData
): boolean {
const hasCommand = ctx.hasCommand([SupportedCommands.claudeOpus,
SupportedCommands.opus,
SupportedCommands.opusShort,
SupportedCommands.claudeShort,
SupportedCommands.claudeSonnet,
SupportedCommands.sonnet,
SupportedCommands.sonnetShort,
SupportedCommands.claudeHaiku,
SupportedCommands.haikuShort])
if (isMentioned(ctx)) {
return true
}
const chatPrefix = this.hasPrefix(ctx.message?.text ?? '')
if (chatPrefix !== '') {
return true
}
return hasCommand
}

hasPrefix (prompt: string): string {
return (
hasClaudeOpusPrefix(prompt)
)
}

async chatStreamCompletion (
conversation: ChatConversation[],
model: LlmsModelsEnum,
ctx: OnMessageContext | OnCallBackQueryData,
msgId: number,
limitTokens: boolean): Promise<LlmCompletion> {
return await anthropicStreamCompletion(
conversation,
model as LlmsModelsEnum,
ctx,
msgId,
true // telegram messages has a character limit
)
}

async chatCompletion (
conversation: ChatConversation[],
model: LlmsModelsEnum
): Promise<LlmCompletion> {
return await anthropicCompletion(conversation, model)
}

public async onEvent (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
ctx.transient.analytics.module = this.module
const isSupportedEvent = this.isSupportedEvent(ctx)
if (!isSupportedEvent && ctx.chat?.type !== 'private') {
this.logger.warn(`### unsupported command ${ctx.message?.text}`)
return
}

if (ctx.hasCommand([SupportedCommands.claudeOpus, SupportedCommands.opus, SupportedCommands.opusShort, SupportedCommands.claudeShort]) || (hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, true)
return
}
if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET, true)
return
}
if (ctx.hasCommand([SupportedCommands.claudeHaiku, SupportedCommands.haikuShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU, false)
}
}
}
10 changes: 5 additions & 5 deletions src/modules/llms/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -221,11 +221,11 @@ export const sendMessage = async (
return await ctx.reply(msg, extras)
}

export const hasPrefix = (prompt: string): string => {
return (
hasBardPrefix(prompt) || hasLlamaPrefix(prompt) || hasClaudeOpusPrefix(prompt) || hasGeminiPrefix(prompt)
)
}
// export const hasPrefix = (prompt: string): string => {
// return (
// hasBardPrefix(prompt) || hasLlamaPrefix(prompt) || hasClaudeOpusPrefix(prompt) || hasGeminiPrefix(prompt)
// )
// }

export const getPromptPrice = (completion: LlmCompletion, data: ChatPayload, updateSession = true): { price: number, promptTokens: number, completionTokens: number } => {
const { ctx, model } = data
Expand Down
78 changes: 40 additions & 38 deletions src/modules/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,7 @@ import {
addUrlToCollection,
getMinBalance,
getPromptPrice,
hasBardPrefix,
hasClaudeOpusPrefix,
hasGeminiPrefix,
hasLlamaPrefix,
hasPrefix,
hasUrl,
isMentioned,
limitPrompt,
Expand Down Expand Up @@ -63,6 +59,12 @@ export class LlmsBot implements PayableBot {
return 0
}

hasPrefix (prompt: string): string {
return (
hasLlamaPrefix(prompt)
)
}

public isSupportedEvent (
ctx: OnMessageContext | OnCallBackQueryData
): boolean {
Expand All @@ -72,7 +74,7 @@ export class LlmsBot implements PayableBot {
if (isMentioned(ctx)) {
return true
}
const chatPrefix = hasPrefix(ctx.message?.text ?? '')
const chatPrefix = this.hasPrefix(ctx.message?.text ?? '')
const hasUrl = this.isSupportedUrlReply(ctx)
const hasPdf = this.isSupportedPdfReply(ctx)
if (chatPrefix !== '') {
Expand Down Expand Up @@ -111,10 +113,10 @@ export class LlmsBot implements PayableBot {
return
}

if (hasBardPrefix(ctx.message?.text ?? '') !== '') {
await this.onPrefix(ctx, LlmsModelsEnum.BISON)
return
}
// if (hasBardPrefix(ctx.message?.text ?? '') !== '') {
// await this.onPrefix(ctx, LlmsModelsEnum.BISON)
// return
// }

if (hasLlamaPrefix(ctx.message?.text ?? '') !== '') {
await this.onCurrentCollection(ctx)
Expand All @@ -126,30 +128,30 @@ export class LlmsBot implements PayableBot {
return
}

if (ctx.hasCommand(SupportedCommands.bard) || ctx.hasCommand(SupportedCommands.bardF)) {
await this.onChat(ctx, LlmsModelsEnum.BISON)
return
}
if (ctx.hasCommand([SupportedCommands.gemini, SupportedCommands.gShort]) || (hasGeminiPrefix(ctx.message?.text ?? '') !== '')) {
await this.onChat(ctx, LlmsModelsEnum.GEMINI)
return
}
if (ctx.hasCommand([SupportedCommands.claudeOpus, SupportedCommands.opus, SupportedCommands.opusShort, SupportedCommands.claudeShort]) || (hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS)
return
}
if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET)
return
}
if (ctx.hasCommand([SupportedCommands.claudeHaiku, SupportedCommands.haikuShort])) {
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU)
return
}
if (ctx.hasCommand(SupportedCommands.bard) || ctx.hasCommand(SupportedCommands.bardF)) {
await this.onChat(ctx, LlmsModelsEnum.BISON)
return
}
// if (ctx.hasCommand(SupportedCommands.bard) || ctx.hasCommand(SupportedCommands.bardF)) {
// await this.onChat(ctx, LlmsModelsEnum.BISON)
// return
// }
// if (ctx.hasCommand([SupportedCommands.gemini, SupportedCommands.gShort]) || (hasGeminiPrefix(ctx.message?.text ?? '') !== '')) {
// await this.onChat(ctx, LlmsModelsEnum.GEMINI)
// return
// }
// if (ctx.hasCommand([SupportedCommands.claudeOpus, SupportedCommands.opus, SupportedCommands.opusShort, SupportedCommands.claudeShort]) || (hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')) {
// await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS)
// return
// }
// if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
// await this.onChat(ctx, LlmsModelsEnum.CLAUDE_SONNET)
// return
// }
// if (ctx.hasCommand([SupportedCommands.claudeHaiku, SupportedCommands.haikuShort])) {
// await this.onChat(ctx, LlmsModelsEnum.CLAUDE_HAIKU)
// return
// }
// if (ctx.hasCommand(SupportedCommands.bard) || ctx.hasCommand(SupportedCommands.bardF)) {
// await this.onChat(ctx, LlmsModelsEnum.BISON)
// return
// }

if (this.isSupportedUrlReply(ctx)) {
await this.onUrlReplyHandler(ctx)
Expand All @@ -166,10 +168,10 @@ export class LlmsBot implements PayableBot {
return
}

if (ctx.hasCommand(SupportedCommands.j2Ultra)) {
await this.onChat(ctx, LlmsModelsEnum.J2_ULTRA)
return
}
// if (ctx.hasCommand(SupportedCommands.j2Ultra)) {
// await this.onChat(ctx, LlmsModelsEnum.J2_ULTRA)
// return
// }

if (ctx.hasCommand(SupportedCommands.ctx)) {
await this.onCurrentCollection(ctx)
Expand Down Expand Up @@ -696,7 +698,7 @@ export class LlmsBot implements PayableBot {
ctx,
SupportedCommands
)
const prefix = hasPrefix(prompt)
const prefix = this.hasPrefix(prompt)
ctx.session.llms.requestQueue.push({
content: await preparePrompt(ctx, prompt.slice(prefix.length)),
model
Expand Down
Loading

0 comments on commit cd4f284

Please sign in to comment.