diff --git a/cli.py b/cli.py index ea796fe6f..25fb21ce5 100644 --- a/cli.py +++ b/cli.py @@ -12,6 +12,7 @@ import argparse from argparse import RawTextHelpFormatter from uuid import uuid4 +import os from dotenv import load_dotenv @@ -92,6 +93,7 @@ async def main(args): # Write the report to a file artifact_filepath = f"outputs/{uuid4()}.md" + os.makedirs("outputs", exist_ok=True) with open(artifact_filepath, "w") as f: f.write(report) diff --git a/gpt_researcher/llm_provider/generic/base.py b/gpt_researcher/llm_provider/generic/base.py index 4c4757c3d..624348491 100644 --- a/gpt_researcher/llm_provider/generic/base.py +++ b/gpt_researcher/llm_provider/generic/base.py @@ -102,7 +102,7 @@ def from_provider(cls, provider: str, **kwargs: Any): if "model" in kwargs or "model_name" in kwargs: model_id = kwargs.pop("model", None) or kwargs.pop("model_name", None) - kwargs = {"model_id": model_id, **kwargs} + kwargs = {"model_id": model_id, "model_kwargs": kwargs} llm = ChatBedrock(**kwargs) elif provider == "dashscope": _check_pkg("langchain_dashscope")