Skip to content

Commit

Permalink
refactor openAI command list + fix onPrefix method that was having is…
Browse files Browse the repository at this point in the history
…sues with new prefix
  • Loading branch information
fegloff committed Jan 18, 2024
1 parent 9c43b03 commit c686b84
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 66 deletions.
9 changes: 0 additions & 9 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,6 @@ export default {
parseInt(process.env.TYPING_STATUS_ENABLED ?? '1')
),
model: process.env.OPENAI_MODEL ?? 'gpt-3.5-turbo',
prefixes: {
chatPrefix: process.env.ASK_PREFIX
? process.env.ASK_PREFIX.split(',')
: ['a.', '.'], // , "?", ">",
newPrefix: process.env.NEW_PREFIX
? process.env.NEW_PREFIX.split(',')
: ['n.', '..'],
llamaPrefix: ['*']
},
minimumBalance: parseInt(process.env.MIN_BALANCE ?? '0')
}
},
Expand Down
3 changes: 2 additions & 1 deletion src/modules/llms/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ export const SupportedCommands = {
}

export const MAX_TRIES = 3
const LLAMA_PREFIX_LIST = ['*']

export const isMentioned = (
ctx: OnMessageContext | OnCallBackQueryData
Expand All @@ -40,7 +41,7 @@ export const isMentioned = (
}

export const hasLlamaPrefix = (prompt: string): string => {
const prefixList = config.openAi.chatGpt.prefixes.llamaPrefix
const prefixList = LLAMA_PREFIX_LIST
for (let i = 0; i < prefixList.length; i++) {
if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) {
return prefixList[i]
Expand Down
43 changes: 22 additions & 21 deletions src/modules/open-ai/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,30 +4,31 @@ import { type ParseMode } from 'grammy/types'
import { getChatModel, getChatModelPrice, getTokenNumber } from './api/openAi'
import { type Message, type InlineKeyboardMarkup } from 'grammy/out/types'
import { isValidUrl } from './utils/web-crawler'
// import { llmAddUrlDocument } from '../llms/api/llmApi'

export const SupportedCommands = {
chat: { name: 'chat' },
ask: { name: 'ask' },
vision: { name: 'vision' },
ask35: { name: 'ask35' },
new: { name: 'new' },
gpt4: { name: 'gpt4' },
ask32: { name: 'ask32' },
gpt: { name: 'gpt' },
last: { name: 'last' },
dalle: { name: 'dalle' },
dalleImg: { name: 'image' },
dalleShort: { name: 'img' },
dalleShorter: { name: 'i' },
genImgEn: { name: 'genImgEn' },
on: { name: 'on' },
off: { name: 'off' }
export enum SupportedCommands {
chat = 'chat',
ask = 'ask',
vision = 'vision',
ask35 = 'ask35',
new = 'new',
gpt4 = 'gpt4',
ask32 = 'ask32',
gpt = 'gpt',
last = 'last',
dalle = 'dalle',
dalleImg = 'image',
dalleShort = 'img',
dalleShorter = 'i',
genImgEn = 'genImgEn',
on = 'on',
off = 'off'
}

export const MAX_TRIES = 3

const DALLE_PREFIX_LIST = ['i. ', ',', 'image ', 'd.', 'img ']
export const DALLE_PREFIX_LIST = ['i. ', ',', 'image ', 'd.', 'img ']
export const CHAT_GPT_PREFIX_LIST = ['a.', '.']
export const NEW_PREFIX_LIST = ['n.', '..']

export const isMentioned = (
ctx: OnMessageContext | OnCallBackQueryData
Expand All @@ -46,7 +47,7 @@ export const isMentioned = (
}

export const hasChatPrefix = (prompt: string): string => {
const prefixList = config.openAi.chatGpt.prefixes.chatPrefix
const prefixList = CHAT_GPT_PREFIX_LIST
for (let i = 0; i < prefixList.length; i++) {
if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) {
return prefixList[i]
Expand All @@ -66,7 +67,7 @@ export const hasDallePrefix = (prompt: string): string => {
}

export const hasNewPrefix = (prompt: string): string => {
const prefixList = config.openAi.chatGpt.prefixes.newPrefix
const prefixList = NEW_PREFIX_LIST
for (let i = 0; i < prefixList.length; i++) {
if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) {
return prefixList[i]
Expand Down
65 changes: 30 additions & 35 deletions src/modules/open-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ import { GrammyError, InlineKeyboard } from 'grammy'
import OpenAI from 'openai'
import { type Logger, pino } from 'pino'

import { getCommandNamePrompt } from '../1country/utils'
import { type BotPayments } from '../payment'
import {
type ChatConversation,
Expand Down Expand Up @@ -76,7 +75,7 @@ export class OpenAIBot implements PayableBot {
ctx: OnMessageContext | OnCallBackQueryData
): boolean {
const hasCommand = ctx.hasCommand(
Object.values(SupportedCommands).map((command) => command.name)
Object.values(SupportedCommands).map((command) => command)
)
if (isMentioned(ctx)) {
return true
Expand Down Expand Up @@ -104,18 +103,18 @@ export class OpenAIBot implements PayableBot {
return 0
}
if (
ctx.hasCommand([SupportedCommands.dalle.name,
SupportedCommands.dalleImg.name,
SupportedCommands.dalleShort.name,
SupportedCommands.dalleShorter.name])
ctx.hasCommand([SupportedCommands.dalle,
SupportedCommands.dalleImg,
SupportedCommands.dalleShort,
SupportedCommands.dalleShorter])
) {
const imageNumber = ctx.session.openAi.imageGen.numImages
const imageSize = ctx.session.openAi.imageGen.imgSize
const model = getDalleModel(imageSize)
const price = getDalleModelPrice(model, true, imageNumber) // cents
return price * priceAdjustment
}
if (ctx.hasCommand(SupportedCommands.genImgEn.name)) {
if (ctx.hasCommand(SupportedCommands.genImgEn)) {
const imageNumber = ctx.session.openAi.imageGen.numImages
const imageSize = ctx.session.openAi.imageGen.imgSize
const chatModelName = ctx.session.openAi.chatGpt.model
Expand Down Expand Up @@ -147,7 +146,7 @@ export class OpenAIBot implements PayableBot {
const prompt = ctx.message?.caption ?? ctx.message?.text
if (prompt && !isNaN(+prompt)) { // && !isNaN(+prompt)
return true
} else if (prompt && (ctx.chat?.type === 'private' || ctx.hasCommand(SupportedCommands.vision.name))) {
} else if (prompt && (ctx.chat?.type === 'private' || ctx.hasCommand(SupportedCommands.vision))) {
return true
}
}
Expand Down Expand Up @@ -182,7 +181,7 @@ export class OpenAIBot implements PayableBot {
}

if (
ctx.hasCommand(SupportedCommands.chat.name) ||
ctx.hasCommand(SupportedCommands.chat) ||
(ctx.message?.text?.startsWith('chat ') && ctx.chat?.type === 'private')
) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
Expand All @@ -191,49 +190,48 @@ export class OpenAIBot implements PayableBot {
}

if (
ctx.hasCommand(SupportedCommands.new.name) ||
ctx.hasCommand(SupportedCommands.new) ||
(ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private')
) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
await this.onEnd(ctx)
await this.onChat(ctx)
return
}

if (
ctx.hasCommand(SupportedCommands.ask.name) ||
ctx.hasCommand(SupportedCommands.ask) ||
(ctx.message?.text?.startsWith('ask ') && ctx.chat?.type === 'private')
) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
await this.onChat(ctx)
return
}

if (ctx.hasCommand(SupportedCommands.ask35.name)) {
if (ctx.hasCommand(SupportedCommands.ask35)) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_35_TURBO_16K
await this.onChat(ctx)
return
}

if (ctx.hasCommand(SupportedCommands.gpt4.name)) {
if (ctx.hasCommand(SupportedCommands.gpt4)) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
await this.onChat(ctx)
return
}

if (ctx.hasCommand(SupportedCommands.gpt.name)) {
if (ctx.hasCommand(SupportedCommands.gpt)) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
await this.onChat(ctx)
return
}

if (ctx.hasCommand(SupportedCommands.ask32.name)) {
if (ctx.hasCommand(SupportedCommands.ask32)) {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4_32K
await this.onChat(ctx)
return
}

if (ctx.hasCommand(SupportedCommands.vision.name)) {
if (ctx.hasCommand(SupportedCommands.vision)) {
const photoUrl = getUrlFromText(ctx)
if (photoUrl) {
const prompt = ctx.match
Expand All @@ -252,10 +250,10 @@ export class OpenAIBot implements PayableBot {
}

if (
ctx.hasCommand([SupportedCommands.dalle.name,
SupportedCommands.dalleImg.name,
SupportedCommands.dalleShort.name,
SupportedCommands.dalleShorter.name]) ||
ctx.hasCommand([SupportedCommands.dalle,
SupportedCommands.dalleImg,
SupportedCommands.dalleShort,
SupportedCommands.dalleShorter]) ||
(ctx.message?.text?.startsWith('image ') && ctx.chat?.type === 'private')
) {
let prompt = (ctx.match ? ctx.match : ctx.message?.text) as string
Expand All @@ -280,16 +278,16 @@ export class OpenAIBot implements PayableBot {
return
}

if (ctx.hasCommand(SupportedCommands.last.name)) {
if (ctx.hasCommand(SupportedCommands.last)) {
await this.onLast(ctx)
return
}

const text = ctx.message?.text ?? ''

if (hasNewPrefix(text) !== '') {
const newPrefix = hasNewPrefix(text)
if (newPrefix !== '') {
await this.onEnd(ctx)
await this.onPrefix(ctx)
await this.onPrefix(ctx, newPrefix)
return
}

Expand All @@ -311,9 +309,9 @@ export class OpenAIBot implements PayableBot {
}
return
}

if (hasChatPrefix(text) !== '') {
await this.onPrefix(ctx)
const prefix = hasChatPrefix(text)
if (prefix !== '') {
await this.onPrefix(ctx, prefix)
return
}

Expand Down Expand Up @@ -426,7 +424,7 @@ export class OpenAIBot implements PayableBot {
}
}

async onPrefix (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
async onPrefix (ctx: OnMessageContext | OnCallBackQueryData, prefix: string): Promise<void> {
try {
if (this.botSuspended) {
ctx.transient.analytics.sessionState = RequestState.Error
Expand All @@ -436,13 +434,9 @@ export class OpenAIBot implements PayableBot {
ctx.transient.analytics.actualResponseTime = now()
return
}
const { prompt } = getCommandNamePrompt(
ctx,
SupportedCommands
)
const prefix = hasPrefix(prompt)
const prompt = ctx.message?.text?.slice(prefix.length) ?? ''
ctx.session.openAi.chatGpt.requestQueue.push(
await preparePrompt(ctx, prompt.slice(prefix.length))
await preparePrompt(ctx, prompt)
)
if (!ctx.session.openAi.chatGpt.isProcessingQueue) {
ctx.session.openAi.chatGpt.isProcessingQueue = true
Expand Down Expand Up @@ -750,6 +744,7 @@ export class OpenAIBot implements PayableBot {
}

async onEnd (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4
ctx.session.openAi.chatGpt.chatConversation = []
ctx.session.openAi.chatGpt.usage = 0
ctx.session.openAi.chatGpt.price = 0
Expand Down

0 comments on commit c686b84

Please sign in to comment.