From e7270e184d6b10512cb19d1d49562bbb3d578774 Mon Sep 17 00:00:00 2001 From: FluxCapacitor2 <31071265+FluxCapacitor2@users.noreply.github.com> Date: Sun, 17 Nov 2024 23:03:53 -0500 Subject: [PATCH] Add the API key via a parameter instead of environment variable --- app/config/config.go | 1 + app/embedding/embedding.go | 4 ++-- app/processqueue.go | 2 +- app/server/server.go | 2 +- config-sample.yml | 1 + 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/app/config/config.go b/app/config/config.go index 8ce0f44..57c771b 100644 --- a/app/config/config.go +++ b/app/config/config.go @@ -17,6 +17,7 @@ type Config struct { } `yaml:"db"` Embeddings struct { OpenAIBaseURL string `yaml:"openaiBaseUrl"` + APIKey string `yaml:"apiKey"` Model string Dimensions int ChunkSize int `yaml:"chunkSize"` diff --git a/app/embedding/embedding.go b/app/embedding/embedding.go index a505f56..55493e5 100644 --- a/app/embedding/embedding.go +++ b/app/embedding/embedding.go @@ -9,9 +9,9 @@ import ( "github.com/tmc/langchaingo/llms/openai" ) -func GetEmbeddings(openAIBaseURL string, model string, chunk string) ([]float32, error) { +func GetEmbeddings(openAIBaseURL string, model string, apiKey string, chunk string) ([]float32, error) { - llm, err := openai.New(openai.WithBaseURL(openAIBaseURL), openai.WithEmbeddingModel(model)) + llm, err := openai.New(openai.WithBaseURL(openAIBaseURL), openai.WithEmbeddingModel(model), openai.WithToken(apiKey)) if err != nil { return nil, fmt.Errorf("error setting up LLM for embedding: %v", err) } diff --git a/app/processqueue.go b/app/processqueue.go index eae467b..c67fa50 100644 --- a/app/processqueue.go +++ b/app/processqueue.go @@ -82,7 +82,7 @@ func processEmbedQueue(db database.Database, config *config.Config, src config.S } } - vector, err := embedding.GetEmbeddings(config.Embeddings.OpenAIBaseURL, config.Embeddings.Model, item.Content) + vector, err := embedding.GetEmbeddings(config.Embeddings.OpenAIBaseURL, config.Embeddings.Model, config.Embeddings.APIKey, item.Content) if err != nil { fmt.Printf("error getting embeddings: %v\n", err) markFailure() diff --git a/app/server/server.go b/app/server/server.go index 509edcd..7175331 100644 --- a/app/server/server.go +++ b/app/server/server.go @@ -134,7 +134,7 @@ func Start(db database.Database, config *config.Config) { if q != "" && src != nil && len(src) > 0 { - vector, err := embedding.GetEmbeddings(config.Embeddings.OpenAIBaseURL, config.Embeddings.Model, q) + vector, err := embedding.GetEmbeddings(config.Embeddings.OpenAIBaseURL, config.Embeddings.Model, config.Embeddings.APIKey, q) if err != nil { response = &httpResponse{ status: 500, diff --git a/config-sample.yml b/config-sample.yml index 98d80c3..4a49504 100644 --- a/config-sample.yml +++ b/config-sample.yml @@ -51,6 +51,7 @@ embeddings: # openaiBaseUrl: https://api.openai.com/v1/ # model: text-embedding-3-small # dimensions: 1536 + # apiKey: sk-************************************* # You can also use any OpenAI-compatible API, like a local Ollama server: openaiBaseUrl: http://localhost:11434/v1/