Skip to content

Commit

Permalink
add sonnet 3.5 model + upgrade error logging
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Aug 27, 2024
1 parent 61fbcbd commit 02af912
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 14 deletions.
29 changes: 19 additions & 10 deletions src/modules/errorhandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ import OpenAI from 'openai'
class ErrorHandler {
public maxTries = 3

private writeLog (ctx: OnMessageContext | OnCallBackQueryData, errorMessage: string, logger: Logger): void {
const user = ctx.from.username ? ctx.from.username : ''
const msg = ctx.message?.text
logger.error(`Error msg:: ${errorMessage} | from user ${user} | msg::${msg}`)
}

async onError (
ctx: OnMessageContext | OnCallBackQueryData,
e: any,
Expand All @@ -32,10 +38,9 @@ class ErrorHandler {
}
if (e instanceof GrammyError) {
if (e.error_code === 400 && e.description.includes('not enough rights')) {
await sendMessage(
ctx,
'Error: The bot does not have permission to send photos in chat'
)
const errorMsg = 'Error: The bot does not have permission to send photos in chat'
this.writeLog(ctx, errorMsg, logger)
await sendMessage(ctx, errorMsg)
ctx.transient.analytics.actualResponseTime = now()
} else if (e.error_code === 429) {
const retryAfter = e.parameters.retry_after
Expand All @@ -45,6 +50,7 @@ class ErrorHandler {
: 60
const method = e.method
const errorMessage = `On method "${method}" | ${e.error_code} - ${e.description}`
this.writeLog(ctx, errorMessage, logger)
logger.error(errorMessage)
await sendMessage(
ctx,
Expand All @@ -58,16 +64,16 @@ class ErrorHandler {
}
await sleep(retryAfter * 1000) // wait retryAfter seconds to enable bot
} else {
logger.error(
`On method "${e.method}" | ${e.error_code} - ${e.description}`
)
const errorMsg = `On method "${e.method}" | ${e.error_code} - ${e.description}`
this.writeLog(ctx, errorMsg, logger)
ctx.transient.analytics.actualResponseTime = now()
await sendMessage(ctx, 'Error handling your request').catch(async (e) => { await this.onError(ctx, e, retryCount - 1, logger) })
}
} else if (e instanceof OpenAI.APIError) {
// 429 RateLimitError
// e.status = 400 || e.code = BadRequestError
logger.error(`OPENAI Error ${e.status}(${e.code}) - ${e.message}`)
const errorMsg = `OPENAI Error ${e.status}(${e.code}) - ${e.message}`
this.writeLog(ctx, errorMsg, logger)
if (e.code === 'context_length_exceeded') {
await sendMessage(ctx, e.message).catch(async (e) => { await this.onError(ctx, e, retryCount - 1, logger) })
ctx.transient.analytics.actualResponseTime = now()
Expand All @@ -84,12 +90,15 @@ class ErrorHandler {
ctx.transient.analytics.actualResponseTime = now()
}
} else if (e instanceof AxiosError) {
logger.error(`${e.message}`)
const errorMsg = `${e.message}`
this.writeLog(ctx, errorMsg, logger)
await sendMessage(ctx, 'Error handling your request').catch(async (e) => {
await this.onError(ctx, e, retryCount - 1, logger)
})
} else {
logger.error(`${e.toString()}`)
const errorMsg = `${e.toString()}`
this.writeLog(ctx, errorMsg, logger)
logger.error(e)
await sendMessage(ctx, 'Error handling your request')
.catch(async (e) => { await this.onError(ctx, e, retryCount - 1, logger) }
)
Expand Down
8 changes: 4 additions & 4 deletions src/modules/llms/utils/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ export enum LlmsModelsEnum {
BISON = 'chat-bison',
J2_ULTRA = 'j2-ultra',
CLAUDE_OPUS = 'claude-3-opus-20240229',
CLAUDE_SONNET = 'claude-3-sonnet-20240229',
CLAUDE_SONNET = 'claude-3-5-sonnet-20240620',
CLAUDE_HAIKU = 'claude-3-haiku-20240307',
GEMINI_15 = 'gemini-1.5-pro-latest',
GEMINI = 'gemini-1.0-pro',
Expand Down Expand Up @@ -63,11 +63,11 @@ export const LlmsModels: Record<string, ChatModel> = {
maxContextTokens: 4096,
chargeType: 'TOKEN'
},
'claude-3-sonnet-20240229': {
name: 'claude-3-sonnet-20240229',
'claude-3-5-sonnet-20240620': {
name: 'claude-3-5-sonnet-20240620',
inputPrice: 0.003, // 3.00 (1M Tokens) => 0.003 (1K tokens)
outputPrice: 0.015,
maxContextTokens: 4096,
maxContextTokens: 8192,
chargeType: 'TOKEN'
},
'claude-3-haiku-20240307': {
Expand Down

0 comments on commit 02af912

Please sign in to comment.