Skip to content

Commit

Permalink
Update: Add Azure OpenAI Support (#117)
Browse files Browse the repository at this point in the history
  • Loading branch information
koalazf99 authored Jan 4, 2024
1 parent 880e26a commit fe73ac4
Show file tree
Hide file tree
Showing 4 changed files with 151 additions and 3 deletions.
8 changes: 8 additions & 0 deletions backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,14 @@ Set your OpenAI key (if you use OpenAI API):
```bash
export OPENAI_API_KEY=<OPENAI_API_KEY>
```
**Note** if you are using [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) Service, you should choose:
```bash
export OPENAI_API_TYPE=azure
export OPENAI_API_BASE=<AZURE_API_BASE>
export OPENAI_API_VERSION=<AZURE_API_VERSION>
export OPENAI_API_KEY=<AZURE_API_KEY>
```
If you are starting your backend in docker, you should add these environment variables in `docker-compose.yml` as well.

Set your Anthropic key (if you use Anthropic API):
```bash
Expand Down
12 changes: 9 additions & 3 deletions backend/api/language_model.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

from backend.app import app
from real_agents.adapters.models import ChatOpenAI, ChatAnthropic
from real_agents.adapters.models import ChatOpenAI, ChatAnthropic, AzureChatOpenAI
from real_agents.adapters.llm import BaseLanguageModel

LLAMA_DIR = "PATH_TO_LLAMA_DIR"
Expand All @@ -24,8 +24,14 @@ def get_llm_list():
def get_llm(llm_name: str, **kwargs) -> BaseLanguageModel:
"""Gets the llm model by its name."""
if llm_name in ["gpt-3.5-turbo-16k", "gpt-4"]:
return ChatOpenAI(
model_name=llm_name,
openai_api_type = os.getenv("OPENAI_API_TYPE", "open_ai")
if openai_api_type == "open_ai":
chat_openai = ChatOpenAI
kwargs.update({"model_name": llm_name})
elif openai_api_type == "azure":
chat_openai = AzureChatOpenAI
kwargs.update({"deployment_name": llm_name})
return chat_openai(
streaming=True,
verbose=True,
**kwargs
Expand Down
3 changes: 3 additions & 0 deletions real_agents/adapters/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,18 @@

from real_agents.adapters.models.anthropic import ChatAnthropic
from real_agents.adapters.models.openai import ChatOpenAI
from real_agents.adapters.models.azure_openai import AzureChatOpenAI

__all__ = [
"ChatOpenAI",
"ChatAnthropic",
"ChatGooglePalm",
"AzureChatOpenAI",
]

type_to_cls_dict = {
"chat_anthropic": ChatAnthropic,
"chat_google_palm": ChatGooglePalm,
"chat_openai": ChatOpenAI,
"azure_chat_openai": AzureChatOpenAI,
}
131 changes: 131 additions & 0 deletions real_agents/adapters/models/azure_openai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
"""Azure OpenAI chat wrapper."""
from __future__ import annotations

import logging
from typing import Any, Dict, Mapping

from pydantic import root_validator

from real_agents.adapters.models.openai import ChatOpenAI
from langchain.schema import ChatResult
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)


class AzureChatOpenAI(ChatOpenAI):
"""Wrapper around Azure OpenAI Chat Completion API. To use this class you
must have a deployed model on Azure OpenAI. Use `deployment_name` in the
constructor to refer to the "Model deployment name" in the Azure portal.
In addition, you should have the ``openai`` python package installed, and the
following environment variables set or passed in constructor in lower case:
- ``OPENAI_API_TYPE`` (default: ``azure``)
- ``OPENAI_API_KEY``
- ``OPENAI_API_BASE``
- ``OPENAI_API_VERSION``
For exmaple, if you have `gpt-35-turbo` deployed, with the deployment name
`35-turbo-dev`, the constructor should look like:
.. code-block:: python
AzureChatOpenAI(
deployment_name="35-turbo-dev",
openai_api_version="2023-03-15-preview",
)
Be aware the API version may change.
Any parameters that are valid to be passed to the openai.create call can be passed
in, even if not explicitly saved on this class.
"""

deployment_name: str = ""
openai_api_type: str = "azure"
openai_api_base: str = ""
openai_api_version: str = ""
openai_api_key: str = ""
openai_organization: str = ""

@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
openai_api_key = get_from_dict_or_env(
values,
"openai_api_key",
"OPENAI_API_KEY",
)
openai_api_base = get_from_dict_or_env(
values,
"openai_api_base",
"OPENAI_API_BASE",
)
openai_api_version = get_from_dict_or_env(
values,
"openai_api_version",
"OPENAI_API_VERSION",
)
openai_api_type = get_from_dict_or_env(
values,
"openai_api_type",
"OPENAI_API_TYPE",
)
openai_organization = get_from_dict_or_env(
values,
"openai_organization",
"OPENAI_ORGANIZATION",
default="",
)
try:
import openai

openai.api_type = openai_api_type
openai.api_base = openai_api_base
openai.api_version = openai_api_version
openai.api_key = openai_api_key
if openai_organization:
openai.organization = openai_organization
except ImportError:
raise ValueError(
"Could not import openai python package. "
"Please install it with `pip install openai`."
)
try:
values["client"] = openai.ChatCompletion
except AttributeError:
raise ValueError(
"`openai` has no `ChatCompletion` attribute, this is likely "
"due to an old version of the openai package. Try upgrading it "
"with `pip install --upgrade openai`."
)
if values["n"] < 1:
raise ValueError("n must be at least 1.")
if values["n"] > 1 and values["streaming"]:
raise ValueError("n must be 1 when streaming.")
return values

@property
def _default_params(self) -> Dict[str, Any]:
"""Get the default parameters for calling OpenAI API."""
return {
**super()._default_params,
"engine": self.deployment_name,
}

@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""
return {**self._default_params}

@property
def _llm_type(self) -> str:
return "azure-openai-chat"

def _create_chat_result(self, response: Mapping[str, Any]) -> ChatResult:
for res in response["choices"]:
if res.get("finish_reason", None) == "content_filter":
raise ValueError(
"Azure has not provided the response due to a content"
" filter being triggered"
)
return super()._create_chat_result(response)

0 comments on commit fe73ac4

Please sign in to comment.