Skip to content

Commit

Permalink
Merge pull request #1013 from AStupidBear/dashscope
Browse files Browse the repository at this point in the history
Add support for DashScope provider in LLM and embeddings modules
  • Loading branch information
assafelovic authored Dec 14, 2024
2 parents 99d65b0 + 9bfc731 commit 40a6c40
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
6 changes: 6 additions & 0 deletions gpt_researcher/llm_provider/generic/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"huggingface",
"groq",
"bedrock",
"dashscope"
"xai",
}

Expand Down Expand Up @@ -103,6 +104,11 @@ def from_provider(cls, provider: str, **kwargs: Any):
model_id = kwargs.pop("model", None) or kwargs.pop("model_name", None)
kwargs = {"model_id": model_id, **kwargs}
llm = ChatBedrock(**kwargs)
elif provider == "dashscope":
_check_pkg("langchain_dashscope")
from langchain_dashscope import ChatDashScope

llm = ChatDashScope(**kwargs)
elif provider == "xai":
_check_pkg("langchain_xai")
from langchain_xai import ChatXAI
Expand Down
5 changes: 5 additions & 0 deletions gpt_researcher/memory/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"huggingface",
"nomic",
"voyageai",
"dashscope",
"custom",
}

Expand Down Expand Up @@ -102,6 +103,10 @@ def __init__(self, embedding_provider: str, model: str, **embdding_kwargs: Any):
model=model,
**embdding_kwargs,
)
case "dashscope":
from langchain_community.embeddings import DashScopeEmbeddings

_embeddings = DashScopeEmbeddings(model=model, **embdding_kwargs)
case _:
raise Exception("Embedding not found.")

Expand Down

0 comments on commit 40a6c40

Please sign in to comment.