Skip to content

Commit

Permalink
Merge pull request #327 from harmony-one/vertex-bard-integration
Browse files Browse the repository at this point in the history
bot reply message logic
  • Loading branch information
theofandrich authored Oct 6, 2023
2 parents fd691f6 + fe5af2e commit 3c10c39
Show file tree
Hide file tree
Showing 12 changed files with 691 additions and 511 deletions.
36 changes: 18 additions & 18 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 3 additions & 11 deletions src/bot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ import { OneCountryBot } from './modules/1country'
import { WalletConnect } from './modules/walletconnect'
import { BotPayments } from './modules/payment'
import { BotSchedule } from './modules/schedule'
import { LlmsBot } from './modules/llms'
import { DocumentHandler } from './modules/document-handler'
import config from './config'
import { commandsHelpText, FEEDBACK, LOVE, MODELS, SUPPORT, TERMS, LANG } from './constants'
import prometheusRegister, { PrometheusMetrics } from './metrics/prometheus'
Expand All @@ -43,7 +41,6 @@ import { autoRetry } from '@grammyjs/auto-retry'
import { run } from '@grammyjs/runner'
import { runBotHeartBit } from './monitoring/monitoring'
import { type BotPaymentLog } from './database/stats.service'
// import { getChatMemberInfo } from './modules/open-ai/utils/web-crawler'
import { TelegramPayments } from './modules/telegram_payment'
import * as Sentry from '@sentry/node'
import * as Events from 'events'
Expand Down Expand Up @@ -209,7 +206,9 @@ function createInitialSessionData (): BotSessionData {
collections: {
activeCollections: [],
collectionRequestQueue: [],
isProcessingQueue: false
isProcessingQueue: false,
currentCollection: '',
collectionConversation: []
},
llms: {
model: config.llms.model,
Expand Down Expand Up @@ -244,8 +243,6 @@ const schedule = new BotSchedule(bot)
const openAiBot = new OpenAIBot(payments)
const oneCountryBot = new OneCountryBot(payments)
const translateBot = new TranslateBot()
const llmsBot = new LlmsBot(payments)
const documentBot = new DocumentHandler()
const telegramPayments = new TelegramPayments(payments)
const voiceTranslateBot = new VoiceTranslateBot(payments)
const textToSpeechBot = new TextToSpeechBot(payments)
Expand Down Expand Up @@ -361,18 +358,13 @@ const PayableBots: Record<string, PayableBotConfig> = {
sdImagesBot: { bot: sdImagesBot },
voiceTranslate: { bot: voiceTranslateBot },
voiceMemo: { bot: voiceMemo },
documentBot: { bot: documentBot },
translateBot: { bot: translateBot },
textToSpeech: { bot: textToSpeechBot },
voiceToText: { bot: voiceToTextBot },
openAiBot: {
enabled: (ctx: OnMessageContext) => ctx.session.openAi.imageGen.isEnabled,
bot: openAiBot
},
llmsBot: {
enabled: (ctx: OnMessageContext) => ctx.session.openAi.imageGen.isEnabled,
bot: llmsBot
},
oneCountryBot: { bot: oneCountryBot }
}

Expand Down
2 changes: 1 addition & 1 deletion src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ export default {
? parseInt(process.env.SESSION_TIMEOUT)
: 48, // in hours
llms: {
apiEndpoint: process.env.LLMS_ENDPOINT ?? '',
apiEndpoint: process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000',
wordLimit: 50,
model: 'chat-bison',
minimumBalance: 0,
Expand Down
24 changes: 24 additions & 0 deletions src/modules/1country/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { MAX_TRIES, sendMessage } from '../open-ai/helpers'
import { sleep } from '../sd-images/utils'
import { isValidUrl } from '../open-ai/utils/web-crawler'
import { now } from '../../utils/perf'
import OpenAI from 'openai'

export const SupportedCommands = {
register: { name: 'rent' },
Expand Down Expand Up @@ -525,6 +526,14 @@ export class OneCountryBot implements PayableBot {
return input.replace(/[^a-z0-9-]/g, '').toLowerCase()
}

async onEnd (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
ctx.session.collections.activeCollections = []
ctx.session.collections.collectionConversation = []
ctx.session.collections.collectionRequestQueue = []
ctx.session.collections.currentCollection = ''
ctx.session.collections.isProcessingQueue = false
}

async onError (
ctx: OnMessageContext | OnCallBackQueryData,
ex: any,
Expand Down Expand Up @@ -573,6 +582,21 @@ export class OneCountryBot implements PayableBot {
`On method "${ex.method}" | ${ex.error_code} - ${ex.description}`
)
}
} else if (ex instanceof OpenAI.APIError) {
// 429 RateLimitError
// e.status = 400 || e.code = BadRequestError
this.logger.error(`OPENAI Error ${ex.status}(${ex.code}) - ${ex.message}`)
if (ex.code === 'context_length_exceeded') {
await sendMessage(ctx, ex.message).catch(async (e) => { await this.onError(ctx, e, retryCount - 1) })
ctx.transient.analytics.actualResponseTime = now()
await this.onEnd(ctx)
} else {
await sendMessage(
ctx,
'Error accessing OpenAI (ChatGPT). Please try later'
).catch(async (e) => { await this.onError(ctx, e, retryCount - 1) })
ctx.transient.analytics.actualResponseTime = now()
}
} else {
this.logger.error(`${ex.toString()}`)
await sendMessage(ctx, 'Error handling your request')
Expand Down
Loading

0 comments on commit 3c10c39

Please sign in to comment.