Skip to content

Commit

Permalink
add api token to axios request header
Browse files Browse the repository at this point in the history
  • Loading branch information
fegloff committed May 3, 2024
1 parent 3dbb85b commit 6d14ef0
Show file tree
Hide file tree
Showing 10 changed files with 38 additions and 21 deletions.
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
"express": "^4.18.2",
"express-async-handler": "^1.2.0",
"form-data": "^4.0.0",
"gpt-tokenizer": "^2.1.1",
"gpt-tokenizer": "^2.1.2",
"grammy": "^1.22.4",
"jsqr": "^1.4.0",
"litllm": "^3.0.0",
Expand Down
3 changes: 2 additions & 1 deletion src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ export default {
? parseInt(process.env.SESSION_TIMEOUT)
: 48, // in hours
llms: {
apiEndpoint: process.env.LLMS_ENDPOINT, // // process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000',
apiEndpoint: 'http://127.0.0.1:5000', // // process.env.LLMS_ENDPOINT, // 'http://127.0.0.1:5000',
apiKey: process.env.LLMS_API_KEY ?? '',
wordLimit: 50,
model: 'chat-bison',
minimumBalance: 0,
Expand Down
18 changes: 9 additions & 9 deletions src/modules/llms/api/athropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import { type OnCallBackQueryData, type OnMessageContext, type ChatConversation
import { type LlmCompletion } from './llmApi'
import { LlmsModelsEnum } from '../utils/types'
import { sleep } from '../../sd-images/utils'
import { headers, headersStream } from './helper'

const logger = pino({
name: 'anthropic - llmsBot',
Expand All @@ -33,7 +34,7 @@ export const anthropicCompletion = async (
.map(m => { return { content: m.content, role: m.role } })
}
const url = `${API_ENDPOINT}/anthropic/completions`
const response = await axios.post(url, data)
const response = await axios.post(url, data, headers)
const respJson = JSON.parse(response.data)
if (response) {
const totalInputTokens = respJson.usage.input_tokens
Expand Down Expand Up @@ -66,7 +67,7 @@ export const anthropicStreamCompletion = async (
logger.info(`Handling ${model} stream completion`)
const data = {
model,
stream: true, // Set stream to true to receive the completion as a stream
stream: true,
system: config.openAi.chatGpt.chatCompletionContext,
max_tokens: limitTokens ? +config.openAi.chatGpt.maxTokens : undefined,
messages: conversation.filter(c => c.model === model).map(m => { return { content: m.content, role: m.role } })
Expand All @@ -77,17 +78,17 @@ export const anthropicStreamCompletion = async (
if (!ctx.chat?.id) {
throw new Error('Context chat id should not be empty after openAI streaming')
}
const response: AxiosResponse = await axios.post(url, data, { responseType: 'stream' })
// Create a Readable stream from the response

const response: AxiosResponse = await axios.post(url, data, headersStream)

const completionStream: Readable = response.data
// Read and process the stream
let completion = ''
let outputTokens = ''
let inputTokens = ''
for await (const chunk of completionStream) {
const msg = chunk.toString()
if (msg) {
if (msg.startsWith('Input Token')) {
if (msg.includes('Input Token:')) {
const regex = /Input Token: (\d+)(.*)/
// Execute the regular expression
const match = regex.exec(msg)
Expand Down Expand Up @@ -176,22 +177,21 @@ export const toolsChatCompletion = async (
.map(m => { return { content: m.content, role: m.role } })
}
const url = `${API_ENDPOINT}/anthropic/completions/tools`
const response = await axios.post(url, input)
const response = await axios.post(url, input, headers)
const respJson = response.data
if (respJson) {
const toolId = respJson.id
let data
let counter = 1
while (true) {
const resp = await axios.get(`${API_ENDPOINT}/anthropic/completions/tools/${toolId}`)
const resp = await axios.get(`${API_ENDPOINT}/anthropic/completions/tools/${toolId}`, headers)
data = resp.data
if (data.status === 'DONE' || counter > 20) {
break
}
counter++
await sleep(3000)
}
console.log('here', data.status, counter)
if (data.status === 'DONE' && !data.error && counter < 20) {
const totalInputTokens = data.data.usage.input_tokens
const totalOutputTokens = data.data.usage.output_tokens
Expand Down
13 changes: 13 additions & 0 deletions src/modules/llms/api/helper.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import config from '../../../config'

export function createHeaders (responseType?: string, apiKey = config.llms.apiKey): Record<string, string | any> {
const headers: Record<string, string | any> = {}
if (responseType) {
headers.responseType = responseType
}
headers.headers = { Authorization: `Bearer ${apiKey}` }
return headers
}

export const headers = createHeaders()
export const headersStream = createHeaders('stream')
10 changes: 6 additions & 4 deletions src/modules/llms/api/llmApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import { type ChatConversation } from '../../types'
import pino from 'pino'
import { LlmsModels, LlmsModelsEnum } from '../utils/types'
import { type ChatModel } from '../utils/types'
import { headers } from './helper'

// import { type ChatModel } from '../../open-ai/types'

const API_ENDPOINT = config.llms.apiEndpoint // config.llms.apiEndpoint // 'http://localhost:8080' // http://127.0.0.1:5000' // config.llms.apiEndpoint
Expand Down Expand Up @@ -58,7 +60,7 @@ export const getChatModelPrice = (
export const llmAddUrlDocument = async (args: LlmAddUrlDocument): Promise<string> => {
const data = { ...args }
const endpointUrl = `${API_ENDPOINT}/collections/document`
const response = await axios.post(endpointUrl, data)
const response = await axios.post(endpointUrl, data, headers)
if (response) {
return response.data.collectionName
}
Expand All @@ -72,7 +74,7 @@ interface LlmCheckCollectionStatusOutput {
}
export const llmCheckCollectionStatus = async (name: string): Promise<LlmCheckCollectionStatusOutput> => {
const endpointUrl = `${API_ENDPOINT}/collections/document/${name}` // ?collectionName=${collectionName}`
const response = await axios.get(endpointUrl)
const response = await axios.get(endpointUrl, headers)
if (response) {
return response.data
}
Expand All @@ -91,7 +93,7 @@ interface QueryUrlDocumentOutput {
export const queryUrlDocument = async (args: QueryUrlDocument): Promise<QueryUrlDocumentOutput> => {
const data = { collectionName: args.collectioName, prompt: args.prompt, conversation: args.conversation }
const endpointUrl = `${API_ENDPOINT}/collections/query`
const response = await axios.post(endpointUrl, data)
const response = await axios.post(endpointUrl, data, headers)
if (response) {
return response.data
}
Expand All @@ -117,7 +119,7 @@ export const llmCompletion = async (
messages: conversation.filter(c => c.model === model)
}
const url = `${API_ENDPOINT}/llms/completions`
const response = await axios.post(url, data)
const response = await axios.post(url, data, headers)

if (response) {
const totalInputTokens = response.data.usage.prompt_tokens
Expand Down
1 change: 0 additions & 1 deletion src/modules/llms/api/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ export async function postGenerateImg (
const response = await openai.images.generate(
payload as OpenAI.Images.ImageGenerateParams
)
console.log(response)
return response.data
}

Expand Down
3 changes: 2 additions & 1 deletion src/modules/llms/api/pdfHandler.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import axios, { AxiosError } from 'axios'
import config from '../../../config'
import { type ChatConversation } from '../../types'
import { headers } from './helper'

export interface PdfCompletion {
completion: ChatConversation | undefined
Expand All @@ -12,7 +13,7 @@ export const handlePdf = async (prompt: string): Promise<PdfCompletion> => {
try {
const data = { question: prompt }
const url = `${config.llms.pdfUrl}/ask`
const response = await axios.post(url, data)
const response = await axios.post(url, data, headers)
if (response) {
console.log(response.data)
return {
Expand Down
5 changes: 3 additions & 2 deletions src/modules/llms/api/vertex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { type Readable } from 'stream'
import { GrammyError } from 'grammy'
import { pino } from 'pino'
import { LlmsModelsEnum } from '../utils/types'
import { headers, headersStream } from './helper'

const API_ENDPOINT = config.llms.apiEndpoint // config.llms.apiEndpoint // 'http://127.0.0.1:5000' // config.llms.apiEndpoint

Expand Down Expand Up @@ -37,7 +38,7 @@ export const vertexCompletion = async (
}

const url = `${API_ENDPOINT}/vertex/completions`
const response = await axios.post(url, data)
const response = await axios.post(url, data, headers)
if (response) {
const totalInputTokens = 4 // response.data.usage.prompt_tokens;
const totalOutputTokens = 5 // response.data.usage.completion_tokens;
Expand Down Expand Up @@ -77,7 +78,7 @@ export const vertexStreamCompletion = async (
if (!ctx.chat?.id) {
throw new Error('Context chat id should not be empty after openAI streaming')
}
const response: AxiosResponse = await axios.post(url, data, { responseType: 'stream' })
const response: AxiosResponse = await axios.post(url, data, headersStream)
// Create a Readable stream from the response
const completionStream: Readable = response.data
// Read and process the stream
Expand Down
2 changes: 1 addition & 1 deletion src/modules/llms/claudeBot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ export class ClaudeBot extends LlmsBase {
(hasClaudeOpusPrefix(ctx.message?.text ?? '') !== '')
) {
this.updateSessionModel(ctx, LlmsModelsEnum.CLAUDE_OPUS)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, false, false) // true)
await this.onChat(ctx, LlmsModelsEnum.CLAUDE_OPUS, true, false)
return
}
if (ctx.hasCommand([SupportedCommands.claudeSonnet, SupportedCommands.sonnet, SupportedCommands.sonnetShort])) {
Expand Down

0 comments on commit 6d14ef0

Please sign in to comment.