diff --git a/gpt_researcher/llm_provider/generic/base.py b/gpt_researcher/llm_provider/generic/base.py index 05536f6f6..4c4757c3d 100644 --- a/gpt_researcher/llm_provider/generic/base.py +++ b/gpt_researcher/llm_provider/generic/base.py @@ -17,6 +17,7 @@ "huggingface", "groq", "bedrock", + "dashscope" "xai", } @@ -103,6 +104,11 @@ def from_provider(cls, provider: str, **kwargs: Any): model_id = kwargs.pop("model", None) or kwargs.pop("model_name", None) kwargs = {"model_id": model_id, **kwargs} llm = ChatBedrock(**kwargs) + elif provider == "dashscope": + _check_pkg("langchain_dashscope") + from langchain_dashscope import ChatDashScope + + llm = ChatDashScope(**kwargs) elif provider == "xai": _check_pkg("langchain_xai") from langchain_xai import ChatXAI diff --git a/gpt_researcher/memory/embeddings.py b/gpt_researcher/memory/embeddings.py index 0c5e7ada5..f423dc2b9 100644 --- a/gpt_researcher/memory/embeddings.py +++ b/gpt_researcher/memory/embeddings.py @@ -18,6 +18,7 @@ "huggingface", "nomic", "voyageai", + "dashscope", "custom", } @@ -102,6 +103,10 @@ def __init__(self, embedding_provider: str, model: str, **embdding_kwargs: Any): model=model, **embdding_kwargs, ) + case "dashscope": + from langchain_community.embeddings import DashScopeEmbeddings + + _embeddings = DashScopeEmbeddings(model=model, **embdding_kwargs) case _: raise Exception("Embedding not found.")