Skip to content

Commit

Permalink
update examples
Browse files Browse the repository at this point in the history
  • Loading branch information
phact committed Jun 19, 2024
1 parent 99b818a commit b727a8f
Show file tree
Hide file tree
Showing 3 changed files with 129 additions and 20 deletions.
73 changes: 73 additions & 0 deletions examples/python/function_calling/astra_data_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import time
from openai import OpenAI
from dotenv import load_dotenv
from astra_assistants import patch
from openai.lib.streaming import AssistantEventHandler
from typing_extensions import override
from openai.types.beta.threads.runs import ToolCall
import logging

from astra_assistants.astra_assistants_event_handler import AstraEventHandler
from astra_assistants.tools.astra_data_api import AstraDataAPITool

logger = logging.getLogger(__name__)

load_dotenv("./.env")

client = patch(OpenAI())

# Ensure the right environment variables are configured for the model you are using
model="gpt-4-1106-preview"
#model="anthropic/claude-3-opus-20240229"
#model="anthropic/claude-3-sonnet-20240229"
#model="gpt-3.5-turbo"
#model="cohere_chat/command-r"
#model="perplexity/mixtral-8x7b-instruct"
#model="perplexity/pplx-70b-online"
#model="anthropic.claude-v2"
#model="gemini/gemini-1.5-pro-latest"
#model = "meta.llama2-13b-chat-v1"


print(f"making assistant for model {model}")

# get url from the astradb UI
db_url = "https://<db_id>-<region>.apps.astra.datastax.com"
collection_name = "movie_reviews"
namespace = "default"
data_api_tool = AstraDataAPITool(
db_url=db_url,
collection_name=collection_name,
namespace=namespace,
vectorize=False,
openai_client=client,
embedding_model="text-embedding-ada-002",
)

# Create the assistant
assistant = client.beta.assistants.create(
name="Smart bot",
instructions="You are a bot. Use the provided functions to answer questions about movies.",
model="gpt-3.5-turbo",
tools=[data_api_tool.to_function()],
)

event_handler = AstraEventHandler(client)
event_handler.register_tool(data_api_tool)


thread = client.beta.threads.create()

client.beta.threads.messages.create(thread.id, content="What's a good, short kids movie?", role="user")

# Run the assistant
with client.beta.threads.runs.create_and_stream(
thread_id=thread.id,
assistant_id=assistant.id,
event_handler=event_handler,
tool_choice=data_api_tool.tool_choice_object(),
) as stream:
for text in stream.text_deltas:
print(text, end="", flush=True)
print()
print(f"tool_outputs: {event_handler.tool_outputs}")
24 changes: 16 additions & 8 deletions examples/python/retrieval/basic.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import time
from openai import OpenAI
from dotenv import load_dotenv
Expand All @@ -6,23 +7,30 @@
load_dotenv("./.env")

def run_with_assistant(assistant, client):
print(f"created assistant: {assistant.name}")
print(f"using assistant: {assistant}")
print("Uploading file:")
# Upload the file
file = client.files.create(
file=open(
"./examples/python/language_models_are_unsupervised_multitask_learners.pdf",
"./tests/fixtures/language_models_are_unsupervised_multitask_learners.pdf",
"rb",
),
purpose="assistants",
)
print("adding file id to assistant")

vector_store = client.beta.vector_stores.create(
name="papers",
file_ids=[file.id]
)

print("adding vector_store id to assistant")
# Update Assistant
assistant = client.beta.assistants.update(
assistant.id,
tools=[{"type": "retrieval"}],
file_ids=[file.id],
tools=[{"type": "file_search"}],
tool_resources={"file_search": {"vector_store_ids": [vector_store.id]}},
)
print(f"updated assistant: {assistant}")
user_message = "What are some cool math concepts behind this ML paper pdf? Explain in two sentences."
print("creating persistent thread and message")
thread = client.beta.threads.create()
Expand All @@ -49,10 +57,10 @@ def run_with_assistant(assistant, client):
)
time.sleep(0.5)

print("-->", end="")
print(f"thread.id {thread.id}")
print(f"{assistant.model} =>")
response = client.beta.threads.messages.list(thread_id=thread.id)
print(f"{response.data[0].content[0].text.value}", end="")
print("\n")
print(response.data[0].content[0].text.value)


client = patch(OpenAI())
Expand Down
52 changes: 40 additions & 12 deletions examples/python/streaming_retrieval/basic.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,42 @@
import json
import logging

from openai import OpenAI
from dotenv import load_dotenv
from openai.lib.streaming import AssistantEventHandler
from typing_extensions import override

from astra_assistants import patch


load_dotenv("./.env")
load_dotenv("../../../.env")

def run_with_assistant(assistant, client):
print(f"created assistant: {assistant.name}")
print(f"using assistant: {assistant}")
print("Uploading file:")
# Upload the file
file = client.files.create(
file=open(
"./examples/python/language_models_are_unsupervised_multitask_learners.pdf",
"./tests/fixtures/language_models_are_unsupervised_multitask_learners.pdf",
"rb",
),
purpose="assistants",
)
print("adding file id to assistant")

vector_store = client.beta.vector_stores.create(
name="papers",
file_ids=[file.id]
)

print("adding vector_store id to assistant")
# Update Assistant
assistant = client.beta.assistants.update(
assistant.id,
tools=[{"type": "retrieval"}],
file_ids=[file.id],
tools=[{"type": "file_search"}],
tool_resources={"file_search": {"vector_store_ids": [vector_store.id]}},
)
print(f"updated assistant: {assistant}")
user_message = "What are some cool math concepts behind this ML paper pdf? Explain in two sentences."
print("creating persistent thread and message")
thread = client.beta.threads.create()
Expand All @@ -32,16 +45,31 @@ def run_with_assistant(assistant, client):
)
print(f"> {user_message}")

class EventHandler(AssistantEventHandler):
def __init__(self):
super().__init__()
self.on_text_created_count = 0
self.on_text_delta_count = 0

@override
def on_run_step_done(self, run_step) -> None:
print("file_search")
matches = []
for tool_call in run_step.step_details.tool_calls:
matches = tool_call.file_search
print(json.dumps(tool_call.file_search))
assert len(matches) > 0, "No matches found"

event_handler = EventHandler()

print(f"creating run")
with client.beta.threads.runs.create_and_stream(
thread_id=thread.id,
assistant_id=assistant.id,
thread_id=thread.id,
assistant_id=assistant.id,
event_handler=event_handler,
) as stream:
for text in stream.text_deltas:
print(text, end="", flush=True)
print()

print("\n")
for part in stream.text_deltas:
print(part, end="", flush=True)


client = patch(OpenAI())
Expand Down

0 comments on commit b727a8f

Please sign in to comment.