Skip to content

Commit

Permalink
Add and document abilty to use LiteLLM Logging Observability tools
Browse files Browse the repository at this point in the history
  • Loading branch information
MarkRx committed Aug 19, 2024
1 parent aa87bc6 commit 260b22b
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 6 deletions.
21 changes: 21 additions & 0 deletions docs/docs/usage-guide/additional_configurations.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,24 @@ user="""
"""
```
Note that the new prompt will need to generate an output compatible with the relevant [post-process function](https://github.com/Codium-ai/pr-agent/blob/main/pr_agent/tools/pr_description.py#L137).

## Integrating with Logging Observability Platforms

Various logging observability tools can be used out-of-the box when using the default LiteLLM AI Handler. Simply configure the LiteLLM callback settings in `configuration.toml` and set environment variables according to the LiteLLM [documentation](https://docs.litellm.ai/docs/).

For example, to use [LangSmith](https://www.langchain.com/langsmith) you can add the following to your `configuration.toml` file:
```
[litellm]
...
success_callback = ["langsmith"]
failure_callback = ["langsmith"]
service_callback = []
```

Then set the following environment variables:

```
LANGSMITH_API_KEY=<api_key>
LANGSMITH_PROJECT=<project>
LANGSMITH_BASE_URL=<url>
```
47 changes: 41 additions & 6 deletions pr_agent/algo/ai_handlers/litellm_ai_handler.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import os
import requests
import boto3
import litellm
import openai
from litellm import acompletion
from tenacity import retry, retry_if_exception_type, stop_after_attempt

from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.config_loader import get_settings
from pr_agent.log import get_logger
Expand Down Expand Up @@ -44,6 +44,12 @@ def __init__(self):
litellm.use_client = True
if get_settings().get("LITELLM.DROP_PARAMS", None):
litellm.drop_params = get_settings().litellm.drop_params
if get_settings().get("LITELLM.SUCCESS_CALLBACK", None):
litellm.success_callback = get_settings().litellm.success_callback
if get_settings().get("LITELLM.FAILURE_CALLBACK", None):
litellm.failure_callback = get_settings().litellm.failure_callback
if get_settings().get("LITELLM.SERVICE_CALLBACK", None):
litellm.service_callback = get_settings().litellm.service_callback
if get_settings().get("OPENAI.ORG", None):
litellm.organization = get_settings().openai.org
if get_settings().get("OPENAI.API_TYPE", None):
Expand Down Expand Up @@ -90,27 +96,56 @@ def prepare_logs(self, response, system, user, resp, finish_reason):
return response_log

def add_litellm_callbacks(selfs, kwargs) -> dict:
pr_metadata = []
captured_extra = []

def capture_logs(message):
# Parsing the log message and context
record = message.record
log_entry = {}
if record.get('extra', {}).get('command', None) is not None:
if record.get('extra', None).get('command', None) is not None:
log_entry.update({"command": record['extra']["command"]})
if record.get('extra', {}).get('pr_url', None) is not None:
log_entry.update({"pr_url": record['extra']["pr_url"]})

# Append the log entry to the captured_logs list
pr_metadata.append(log_entry)
captured_extra.append(log_entry)

# Adding the custom sink to Loguru
handler_id = get_logger().add(capture_logs)
get_logger().debug("Capturing logs for litellm callbacks")
get_logger().remove(handler_id)

context = captured_extra[0] if len(captured_extra) > 0 else None

command = context.get("command", "unknown")
pr_url = context.get("pr_url", "unknown")
git_provider = get_settings().config.git_provider

metadata = dict()
callbacks = litellm.success_callback + litellm.failure_callback + litellm.service_callback
if "langfuse" in callbacks:
metadata.update({
"trace_name": command,
"tags": [git_provider, command],
"trace_metadata": {
"command": command,
"pr_url": pr_url,
},
})
if "langsmith" in callbacks:
metadata.update({
"run_name": command,
"tags": [git_provider, command],
"extra": {
"metadata": {
"command": command,
"pr_url": pr_url,
}
},
})

# Adding the captured logs to the kwargs
kwargs["metadata"] = pr_metadata
kwargs["metadata"] = metadata

return kwargs

Expand All @@ -125,7 +160,7 @@ def deployment_id(self):
retry=retry_if_exception_type((openai.APIError, openai.APIConnectionError, openai.APITimeoutError)), # No retry on RateLimitError
stop=stop_after_attempt(OPENAI_RETRIES)
)
async def chat_completion(self, model: str, system: str, user: str, temperature: float = 0.2, img_path: str = None):
async def chat_completion(self, model: str, system: str, user: str, metadata:dict = None, temperature: float = 0.2, img_path: str = None):
try:
resp, finish_reason = None, None
deployment_id = self.deployment_id
Expand Down
3 changes: 3 additions & 0 deletions pr_agent/settings/configuration.toml
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,9 @@ pr_commands = [
# use_client = false
# drop_params = false
enable_callbacks = false
success_callback = []
failure_callback = []
service_callback = []

[pr_similar_issue]
skip_comments = false
Expand Down

0 comments on commit 260b22b

Please sign in to comment.