Skip to content

Commit

Permalink
add token count on vertex module
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Mar 21, 2024
1 parent 491c9d6 commit 3727b6b
Showing 1 changed file with 27 additions and 18 deletions.
45 changes: 27 additions & 18 deletions src/modules/llms/api/vertex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { GrammyError } from 'grammy'
import { pino } from 'pino'
import { LlmsModelsEnum } from '../types'

const API_ENDPOINT = config.llms.apiEndpoint // config.llms.apiEndpoint // http://localhost:8080' // config.llms.apiEndpoint
const API_ENDPOINT = config.llms.apiEndpoint // config.llms.apiEndpoint // config.llms.apiEndpoint // 'http://127.0.0.1:5000' // config.llms.apiEndpoint

const logger = pino({
name: 'Gemini - llmsBot',
Expand Down Expand Up @@ -71,26 +71,34 @@ export const vertexStreamCompletion = async (
const completionStream: Readable = response.data
// Read and process the stream
let completion = ''
let outputTokens = ''
let inputTokens = ''
for await (const chunk of completionStream) {
const msg = chunk.toString()
if (msg) {
completion += msg.split('Text: ')[1]
completion = completion.replaceAll('...', '')
completion += '...'
if (ctx.chat?.id) {
await ctx.api
.editMessageText(ctx.chat?.id, msgId, completion)
.catch(async (e: any) => {
if (e instanceof GrammyError) {
if (e.error_code !== 400) {
throw e
if (msg.startsWith('Text')) {
completion += msg.split('Text: ')[1]
completion = completion.replaceAll('...', '')
completion += '...'
if (ctx.chat?.id) {
await ctx.api
.editMessageText(ctx.chat?.id, msgId, completion)
.catch(async (e: any) => {
if (e instanceof GrammyError) {
if (e.error_code !== 400) {
throw e
} else {
logger.error(e)
}
} else {
logger.error(e)
throw e
}
} else {
throw e
}
})
})
}
} else if (msg.startsWith('Input Token')) {
const tokenMsg = msg.split('Input Token: ')[1]
inputTokens = tokenMsg.split('Output Tokens: ')[0]
outputTokens = tokenMsg.split('Output Tokens: ')[1]
}
}
}
Expand All @@ -108,8 +116,9 @@ export const vertexStreamCompletion = async (
throw e
}
})
const totalOutputTokens = '10' // response.headers['x-openai-output-tokens']
const totalInputTokens = '10' // response.headers['x-openai-input-tokens']
const totalOutputTokens = outputTokens // response.headers['x-openai-output-tokens']
const totalInputTokens = inputTokens // response.headers['x-openai-input-tokens']

return {
completion: {
content: completion,
Expand Down

0 comments on commit 3727b6b

Please sign in to comment.