diff --git a/src/config.ts b/src/config.ts index a917ce3..cbb12ed 100644 --- a/src/config.ts +++ b/src/config.ts @@ -80,15 +80,6 @@ export default { parseInt(process.env.TYPING_STATUS_ENABLED ?? '1') ), model: process.env.OPENAI_MODEL ?? 'gpt-3.5-turbo', - prefixes: { - chatPrefix: process.env.ASK_PREFIX - ? process.env.ASK_PREFIX.split(',') - : ['a.', '.'], // , "?", ">", - newPrefix: process.env.NEW_PREFIX - ? process.env.NEW_PREFIX.split(',') - : ['n.', '..'], - llamaPrefix: ['*'] - }, minimumBalance: parseInt(process.env.MIN_BALANCE ?? '0') } }, diff --git a/src/modules/llms/helpers.ts b/src/modules/llms/helpers.ts index 108daf2..83fa373 100644 --- a/src/modules/llms/helpers.ts +++ b/src/modules/llms/helpers.ts @@ -22,6 +22,7 @@ export const SupportedCommands = { } export const MAX_TRIES = 3 +const LLAMA_PREFIX_LIST = ['*'] export const isMentioned = ( ctx: OnMessageContext | OnCallBackQueryData @@ -40,7 +41,7 @@ export const isMentioned = ( } export const hasLlamaPrefix = (prompt: string): string => { - const prefixList = config.openAi.chatGpt.prefixes.llamaPrefix + const prefixList = LLAMA_PREFIX_LIST for (let i = 0; i < prefixList.length; i++) { if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { return prefixList[i] diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index 29dab13..c45569c 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -4,30 +4,31 @@ import { type ParseMode } from 'grammy/types' import { getChatModel, getChatModelPrice, getTokenNumber } from './api/openAi' import { type Message, type InlineKeyboardMarkup } from 'grammy/out/types' import { isValidUrl } from './utils/web-crawler' -// import { llmAddUrlDocument } from '../llms/api/llmApi' -export const SupportedCommands = { - chat: { name: 'chat' }, - ask: { name: 'ask' }, - vision: { name: 'vision' }, - ask35: { name: 'ask35' }, - new: { name: 'new' }, - gpt4: { name: 'gpt4' }, - ask32: { name: 'ask32' }, - gpt: { name: 'gpt' }, - last: { name: 'last' }, - dalle: { name: 'dalle' }, - dalleImg: { name: 'image' }, - dalleShort: { name: 'img' }, - dalleShorter: { name: 'i' }, - genImgEn: { name: 'genImgEn' }, - on: { name: 'on' }, - off: { name: 'off' } +export enum SupportedCommands { + chat = 'chat', + ask = 'ask', + vision = 'vision', + ask35 = 'ask35', + new = 'new', + gpt4 = 'gpt4', + ask32 = 'ask32', + gpt = 'gpt', + last = 'last', + dalle = 'dalle', + dalleImg = 'image', + dalleShort = 'img', + dalleShorter = 'i', + genImgEn = 'genImgEn', + on = 'on', + off = 'off' } export const MAX_TRIES = 3 -const DALLE_PREFIX_LIST = ['i. ', ',', 'image ', 'd.', 'img '] +export const DALLE_PREFIX_LIST = ['i. ', ',', 'image ', 'd.', 'img '] +export const CHAT_GPT_PREFIX_LIST = ['a.', '.'] +export const NEW_PREFIX_LIST = ['n.', '..'] export const isMentioned = ( ctx: OnMessageContext | OnCallBackQueryData @@ -46,7 +47,7 @@ export const isMentioned = ( } export const hasChatPrefix = (prompt: string): string => { - const prefixList = config.openAi.chatGpt.prefixes.chatPrefix + const prefixList = CHAT_GPT_PREFIX_LIST for (let i = 0; i < prefixList.length; i++) { if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { return prefixList[i] @@ -66,7 +67,7 @@ export const hasDallePrefix = (prompt: string): string => { } export const hasNewPrefix = (prompt: string): string => { - const prefixList = config.openAi.chatGpt.prefixes.newPrefix + const prefixList = NEW_PREFIX_LIST for (let i = 0; i < prefixList.length; i++) { if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { return prefixList[i] diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 0333346..6b8ddb5 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -2,7 +2,6 @@ import { GrammyError, InlineKeyboard } from 'grammy' import OpenAI from 'openai' import { type Logger, pino } from 'pino' -import { getCommandNamePrompt } from '../1country/utils' import { type BotPayments } from '../payment' import { type ChatConversation, @@ -76,7 +75,7 @@ export class OpenAIBot implements PayableBot { ctx: OnMessageContext | OnCallBackQueryData ): boolean { const hasCommand = ctx.hasCommand( - Object.values(SupportedCommands).map((command) => command.name) + Object.values(SupportedCommands).map((command) => command) ) if (isMentioned(ctx)) { return true @@ -104,10 +103,10 @@ export class OpenAIBot implements PayableBot { return 0 } if ( - ctx.hasCommand([SupportedCommands.dalle.name, - SupportedCommands.dalleImg.name, - SupportedCommands.dalleShort.name, - SupportedCommands.dalleShorter.name]) + ctx.hasCommand([SupportedCommands.dalle, + SupportedCommands.dalleImg, + SupportedCommands.dalleShort, + SupportedCommands.dalleShorter]) ) { const imageNumber = ctx.session.openAi.imageGen.numImages const imageSize = ctx.session.openAi.imageGen.imgSize @@ -115,7 +114,7 @@ export class OpenAIBot implements PayableBot { const price = getDalleModelPrice(model, true, imageNumber) // cents return price * priceAdjustment } - if (ctx.hasCommand(SupportedCommands.genImgEn.name)) { + if (ctx.hasCommand(SupportedCommands.genImgEn)) { const imageNumber = ctx.session.openAi.imageGen.numImages const imageSize = ctx.session.openAi.imageGen.imgSize const chatModelName = ctx.session.openAi.chatGpt.model @@ -147,7 +146,7 @@ export class OpenAIBot implements PayableBot { const prompt = ctx.message?.caption ?? ctx.message?.text if (prompt && !isNaN(+prompt)) { // && !isNaN(+prompt) return true - } else if (prompt && (ctx.chat?.type === 'private' || ctx.hasCommand(SupportedCommands.vision.name))) { + } else if (prompt && (ctx.chat?.type === 'private' || ctx.hasCommand(SupportedCommands.vision))) { return true } } @@ -182,7 +181,7 @@ export class OpenAIBot implements PayableBot { } if ( - ctx.hasCommand(SupportedCommands.chat.name) || + ctx.hasCommand(SupportedCommands.chat) || (ctx.message?.text?.startsWith('chat ') && ctx.chat?.type === 'private') ) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 @@ -191,17 +190,16 @@ export class OpenAIBot implements PayableBot { } if ( - ctx.hasCommand(SupportedCommands.new.name) || + ctx.hasCommand(SupportedCommands.new) || (ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private') ) { - ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 await this.onEnd(ctx) await this.onChat(ctx) return } if ( - ctx.hasCommand(SupportedCommands.ask.name) || + ctx.hasCommand(SupportedCommands.ask) || (ctx.message?.text?.startsWith('ask ') && ctx.chat?.type === 'private') ) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 @@ -209,31 +207,31 @@ export class OpenAIBot implements PayableBot { return } - if (ctx.hasCommand(SupportedCommands.ask35.name)) { + if (ctx.hasCommand(SupportedCommands.ask35)) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_35_TURBO_16K await this.onChat(ctx) return } - if (ctx.hasCommand(SupportedCommands.gpt4.name)) { + if (ctx.hasCommand(SupportedCommands.gpt4)) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 await this.onChat(ctx) return } - if (ctx.hasCommand(SupportedCommands.gpt.name)) { + if (ctx.hasCommand(SupportedCommands.gpt)) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 await this.onChat(ctx) return } - if (ctx.hasCommand(SupportedCommands.ask32.name)) { + if (ctx.hasCommand(SupportedCommands.ask32)) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4_32K await this.onChat(ctx) return } - if (ctx.hasCommand(SupportedCommands.vision.name)) { + if (ctx.hasCommand(SupportedCommands.vision)) { const photoUrl = getUrlFromText(ctx) if (photoUrl) { const prompt = ctx.match @@ -252,10 +250,10 @@ export class OpenAIBot implements PayableBot { } if ( - ctx.hasCommand([SupportedCommands.dalle.name, - SupportedCommands.dalleImg.name, - SupportedCommands.dalleShort.name, - SupportedCommands.dalleShorter.name]) || + ctx.hasCommand([SupportedCommands.dalle, + SupportedCommands.dalleImg, + SupportedCommands.dalleShort, + SupportedCommands.dalleShorter]) || (ctx.message?.text?.startsWith('image ') && ctx.chat?.type === 'private') ) { let prompt = (ctx.match ? ctx.match : ctx.message?.text) as string @@ -280,16 +278,16 @@ export class OpenAIBot implements PayableBot { return } - if (ctx.hasCommand(SupportedCommands.last.name)) { + if (ctx.hasCommand(SupportedCommands.last)) { await this.onLast(ctx) return } const text = ctx.message?.text ?? '' - - if (hasNewPrefix(text) !== '') { + const newPrefix = hasNewPrefix(text) + if (newPrefix !== '') { await this.onEnd(ctx) - await this.onPrefix(ctx) + await this.onPrefix(ctx, newPrefix) return } @@ -311,9 +309,9 @@ export class OpenAIBot implements PayableBot { } return } - - if (hasChatPrefix(text) !== '') { - await this.onPrefix(ctx) + const prefix = hasChatPrefix(text) + if (prefix !== '') { + await this.onPrefix(ctx, prefix) return } @@ -426,7 +424,7 @@ export class OpenAIBot implements PayableBot { } } - async onPrefix (ctx: OnMessageContext | OnCallBackQueryData): Promise { + async onPrefix (ctx: OnMessageContext | OnCallBackQueryData, prefix: string): Promise { try { if (this.botSuspended) { ctx.transient.analytics.sessionState = RequestState.Error @@ -436,13 +434,9 @@ export class OpenAIBot implements PayableBot { ctx.transient.analytics.actualResponseTime = now() return } - const { prompt } = getCommandNamePrompt( - ctx, - SupportedCommands - ) - const prefix = hasPrefix(prompt) + const prompt = ctx.message?.text?.slice(prefix.length) ?? '' ctx.session.openAi.chatGpt.requestQueue.push( - await preparePrompt(ctx, prompt.slice(prefix.length)) + await preparePrompt(ctx, prompt) ) if (!ctx.session.openAi.chatGpt.isProcessingQueue) { ctx.session.openAi.chatGpt.isProcessingQueue = true @@ -750,6 +744,7 @@ export class OpenAIBot implements PayableBot { } async onEnd (ctx: OnMessageContext | OnCallBackQueryData): Promise { + ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 ctx.session.openAi.chatGpt.chatConversation = [] ctx.session.openAi.chatGpt.usage = 0 ctx.session.openAi.chatGpt.price = 0