Skip to content

Commit

Permalink
fix new command, minimun balance for llmsBot, anthropic token counter,
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed Apr 3, 2024
1 parent 85fe199 commit f836e19
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 21 deletions.
9 changes: 9 additions & 0 deletions src/bot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,15 @@ bot.command(['alias', 'aliases'], async (ctx) => {
})
})

// bot.command(['end'], async (ctx) => {
// logger.info('/end command')
// return await ctx.reply(ALIAS.text, {
// parse_mode: 'Markdown',
// disable_web_page_preview: true,
// message_thread_id: ctx.message?.message_thread_id
// })
// })

// bot.command("memo", (ctx) => {
// ctx.reply(MEMO.text, {
// parse_mode: "Markdown",
Expand Down
2 changes: 1 addition & 1 deletion src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ export default {
? parseInt(process.env.SESSION_TIMEOUT)
: 48, // in hours
llms: {
apiEndpoint: process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000',
apiEndpoint: process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000', // process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000',
wordLimit: 50,
model: 'chat-bison',
minimumBalance: 0,
Expand Down
20 changes: 14 additions & 6 deletions src/modules/llms/api/athropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const logger = pino({
}
})

const API_ENDPOINT = config.llms.apiEndpoint // config.llms.apiEndpoint // 'http://127.0.0.1:5000' // config.llms.apiEndpoint
const API_ENDPOINT = config.llms.apiEndpoint // 'http://127.0.0.1:5000' // config.llms.apiEndpoint

export const anthropicCompletion = async (
conversation: ChatConversation[],
Expand Down Expand Up @@ -87,15 +87,23 @@ export const anthropicStreamCompletion = async (
const msg = chunk.toString()
if (msg) {
if (msg.startsWith('Input Token')) {
inputTokens = msg.split('Input Token: ')[1]
const regex = /Input Token: (\d+)(.*)/
// Execute the regular expression
const match = regex.exec(msg)
if (match) {
inputTokens = match[1].trim() // Extract the integer part
if (match.length >= 3) {
completion += match[2]
}
}
} else if (msg.startsWith('Output Tokens')) {
outputTokens = msg.split('Output Tokens: ')[1]
outputTokens = msg.split('Output Tokens: ')[1].trim()
} else {
wordCount++
completion += msg // .split('Text: ')[1]
completion += msg
if (msg.includes('Output Tokens:')) {
const tokenMsg = msg.split('Output Tokens: ')[1]
outputTokens = tokenMsg.split('Output Tokens: ')[1]
outputTokens = msg.split('Output Tokens: ')[1].trim()
// outputTokens = tokenMsg.split('Output Tokens: ')[1].trim()
completion = completion.split('Output Tokens: ')[0]
}
if (wordCount > wordCountMinimum) { // if (chunck === '.' && wordCount > wordCountMinimum) {
Expand Down
32 changes: 25 additions & 7 deletions src/modules/llms/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -227,14 +227,16 @@ export const hasPrefix = (prompt: string): string => {
)
}

export const getPromptPrice = (completion: LlmCompletion, data: ChatPayload): { price: number, promptTokens: number, completionTokens: number } => {
export const getPromptPrice = (completion: LlmCompletion, data: ChatPayload, updateSession = true): { price: number, promptTokens: number, completionTokens: number } => {
const { ctx, model } = data
const modelPrice = getChatModel(model)
const price =
getChatModelPrice(modelPrice, true, completion.inputTokens ?? 0, completion.outputTokens ?? 0) *
config.openAi.chatGpt.priceAdjustment
ctx.session.llms.usage += completion.outputTokens ?? 0
ctx.session.llms.price += price
if (updateSession) {
ctx.session.llms.usage += completion.outputTokens ?? 0
ctx.session.llms.price += price
}
return {
price,
promptTokens: completion.inputTokens ?? 0,
Expand Down Expand Up @@ -269,8 +271,8 @@ export async function addUrlToCollection (ctx: OnMessageContext | OnCallBackQuer
})
const msgId = (await ctx.reply('...', {
message_thread_id:
ctx.message?.message_thread_id ??
ctx.message?.reply_to_message?.message_thread_id
ctx.message?.message_thread_id ??
ctx.message?.reply_to_message?.message_thread_id
})).message_id

ctx.session.collections.collectionRequestQueue.push({
Expand All @@ -291,8 +293,8 @@ export async function addDocToCollection (ctx: OnMessageContext | OnCallBackQuer
})
const msgId = (await ctx.reply('...', {
message_thread_id:
ctx.message?.message_thread_id ??
ctx.message?.reply_to_message?.message_thread_id
ctx.message?.message_thread_id ??
ctx.message?.reply_to_message?.message_thread_id
})).message_id
ctx.session.collections.collectionRequestQueue.push({
collectionName,
Expand All @@ -304,3 +306,19 @@ export async function addDocToCollection (ctx: OnMessageContext | OnCallBackQuer
processingTime: 0
})
}

export const getMinBalance = async (ctx: OnMessageContext | OnCallBackQueryData,
model: string): Promise<number> => {
const minBalance = getPromptPrice({
inputTokens: 400,
outputTokens: 800,
completion: undefined,
usage: 0,
price: 0
}, {
ctx,
model: model ?? '',
conversation: []
}, false)
return minBalance.price
}
15 changes: 11 additions & 4 deletions src/modules/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { sleep } from '../sd-images/utils'
import {
addDocToCollection,
addUrlToCollection,
getMinBalance,
getPromptPrice,
hasBardPrefix,
hasClaudeOpusPrefix,
Expand Down Expand Up @@ -184,15 +185,18 @@ export class LlmsBot implements PayableBot {
ctx.transient.analytics.actualResponseTime = now()
}

private async hasBalance (ctx: OnMessageContext | OnCallBackQueryData): Promise<boolean> {
private async hasBalance (ctx: OnMessageContext | OnCallBackQueryData, minBalance = +config.llms.minimumBalance): Promise<boolean> {
const minBalanceOne = this.payments.toONE(await this.payments.getPriceInONE(minBalance), false)
const accountId = this.payments.getAccountId(ctx)
const addressBalance = await this.payments.getUserBalance(accountId)
const { totalCreditsAmount } = await chatService.getUserCredits(accountId)
const balance = addressBalance.plus(totalCreditsAmount)
const balanceOne = this.payments.toONE(balance, false).toFixed(2)
const isGroupInWhiteList = await this.payments.isGroupInWhitelist(ctx as OnMessageContext)
return (
+balanceOne > +config.llms.minimumBalance ||
(this.payments.isUserInWhitelist(ctx.from.id, ctx.from.username))
+balanceOne > +minBalanceOne ||
(this.payments.isUserInWhitelist(ctx.from.id, ctx.from.username)) ||
isGroupInWhiteList
)
}

Expand Down Expand Up @@ -441,9 +445,11 @@ export class LlmsBot implements PayableBot {
const processingTime = config.llms.processingTime
while (ctx.session.collections.collectionRequestQueue.length > 0) {
try {
// console.log('HERE MY FRIENDS')
const collection = ctx.session.collections.collectionRequestQueue.shift()
if (collection) {
const result = await llmCheckCollectionStatus(collection?.collectionName ?? '')
// console.log('onCheckCollectionStatus', result)
if (result.price > 0) {
if (
!(await this.payments.pay(ctx as OnMessageContext, result.price)) // price 0.05 x collections (chunks)
Expand Down Expand Up @@ -738,7 +744,8 @@ export class LlmsBot implements PayableBot {
const prompt = msg?.content as string
const model = msg?.model
const { chatConversation } = ctx.session.llms
if (await this.hasBalance(ctx)) {
const minBalance = await getMinBalance(ctx, msg?.model as LlmsModelsEnum)
if (await this.hasBalance(ctx, minBalance)) {
if (!prompt) {
const msg =
chatConversation.length > 0
Expand Down
7 changes: 4 additions & 3 deletions src/modules/open-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ export class OpenAIBot implements PayableBot {
ctx.hasCommand(SupportedCommands.new) ||
(ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private')
) {
await this.onEnd(ctx)
await this.onStop(ctx)
await this.onChat(ctx)
return
}
Expand Down Expand Up @@ -408,14 +408,15 @@ export class OpenAIBot implements PayableBot {
ctx.transient.analytics.actualResponseTime = now()
}

private async hasBalance (ctx: OnMessageContext | OnCallBackQueryData): Promise<boolean> {
private async hasBalance (ctx: OnMessageContext | OnCallBackQueryData,
minBalance = +config.openAi.chatGpt.minimumBalance): Promise<boolean> {
const accountId = this.payments.getAccountId(ctx)
const addressBalance = await this.payments.getUserBalance(accountId)
const { totalCreditsAmount } = await chatService.getUserCredits(accountId)
const balance = addressBalance.plus(totalCreditsAmount)
const balanceOne = this.payments.toONE(balance, false)
return (
(+balanceOne > +config.openAi.chatGpt.minimumBalance) ||
(+balanceOne > minBalance) ||
(this.payments.isUserInWhitelist(ctx.from.id, ctx.from.username))
)
}
Expand Down
1 change: 1 addition & 0 deletions src/modules/payment/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ export class BotPayments {
if (ctx.chat.id && ctx.chat.type !== 'private') {
const { whitelist } = config.payment
const admins = await ctx.getChatAdministrators()
this.logger.info(`Chat Admins(${admins.length})`)
for (let i = 0; i < admins.length; i++) {
const username = admins[i].user.username ?? ''
if (whitelist.includes(admins[i].user.id.toString()) ||
Expand Down

0 comments on commit f836e19

Please sign in to comment.