Skip to content

Commit

Permalink
added publishing and improved rag example
Browse files Browse the repository at this point in the history
  • Loading branch information
Anton Kulaga committed Jun 8, 2024
1 parent 3f3649d commit 90289b6
Show file tree
Hide file tree
Showing 10 changed files with 67 additions and 24 deletions.
5 changes: 5 additions & 0 deletions .pypirc
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[distutils]
index-servers=pypi
[pypi]
repository=https://upload.pypi.org/legacy/
username=antonkulaga
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,4 +65,4 @@ for _ in range(exchanges):

All prompts that we use are stored in yaml files that you can easily overload.

The only complex dependency that we use is Mako for prompt templates.
The only complex (but not mandatory) dependency that we use is Mako for prompt templates
6 changes: 3 additions & 3 deletions environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ dependencies:
- python=3.10
- pip
- loguru
- click
- requests
- pytest
- typer
- pytest #for tests
- typer #for CLI
- twine #for publishing
- pip:
- litellm>=1.40.6
- jupyter
Expand Down
53 changes: 37 additions & 16 deletions examples/rag.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pprint
import sys
from pathlib import Path
from typing import Dict, Any

Expand All @@ -15,6 +16,11 @@

app = typer.Typer(no_args_is_help=True)

def configure_logger(level: str) -> None:
"""Configure the logger to the specified log level."""
logger.remove()
logger.add(sys.stdout, level=level)

@app.command()
def search(query: str, limit: int = 10):
"""
Expand All @@ -31,35 +37,50 @@ def search(query: str, limit: int = 10):


@app.command()
def rapamycin():
def rapamycin(prompt_name: str = "rapamycin_case", sub_prompt: str = "with_requirements", log_level: str = "INFO"):
configure_logger(log_level)
logger.add("logs/rag_rapamycin.txt", rotation="1 MB")
load_dotenv()


# setting up relative paths to define output and load prompts
current_folder: Path = Path(__file__).parent
example_prompts = current_folder / "example_prompts.yaml"
output = Path(__file__).parent.parent / "output" / "examples"
prompts = yaml.safe_load(example_prompts.open("r"))
question = prompts[prompt_name][sub_prompt]


scientist: ChatAgent = ChatAgent(llm_options = LLAMA3,
role = "scientist",
goal = "Research the topics in the most comprehensive way, using search in academic literature and providing sources",
task="Address the research question in the most comprehensive way",
tools = [literature_search])

# ADDING LOGGER HANDLERS:
scientist.memory.add_on_message(lambda m: logger.debug(f"SCIENTIST MESSAGE: {m}"))
scientist.memory.add_on_tool_call(lambda f: logger.debug(f"SCIENTIST FUNCTION: {f}"))
scientist.memory.add_on_tool_result(lambda m: logger.debug(f"SCIENTIST TOOL result from {m.name} with tool call id {m.tool_call_id} is {m.content}"))


critic: ChatAgent = ChatAgent(llm_options = LLAMA3,
role = "critic",
goal = "Evaluate the answer according to the criteria provided",
task="Evaluate the answer according to the criteria provided and make recommendations to improve")
#TODO: write down cryticism
role = "critic",
goal = "Evaluate the answer according to the criteria provided",
task="Evaluate the answer according to the criteria provided and make recommendations to improve")

scientist.memory.add_on_message(lambda m: logger.info(f"MESSAGE: {m}"))
scientist.memory.add_on_tool_call(lambda t: logger.info(f"FUNCTION: {t}"))
# ADDING CRITICS HANDLERS:
critic.memory.add_on_message(lambda m: logger.debug(f"CRITIC MESSAGE: {m}"))

# setting up relative paths to define output and load prompts
current_folder: Path = Path(__file__).parent
example_prompts = current_folder / "example_prompts.yaml"
output = Path(__file__).parent.parent / "output" / "examples"
prompts = yaml.safe_load(example_prompts.open("r"))
answer = scientist.query(question, output=output / prompt_name / f"{sub_prompt}_initial_answer.txt")
logger.info(f"INITIAL ANSWER: {answer}")

rapamycin = prompts["rapamycin_case"]["with_requirements"]
for_review = f"""
The question that scientist asked was: {question}
The answer that she gave was: {answer}
"""

result = scientist.query(rapamycin, output=output / "rapamycin" / "with_requirements.txt")
print("RESULT IS:")
pprint.pprint(result)
review_results = critic.query(for_review, output=output / prompt_name / f"{sub_prompt}_answer_review.txt")
logger.info(f"REVIEW RESULTS: {review_results}")



Expand Down
1 change: 0 additions & 1 deletion examples/two_agents_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def purchase_example():

exchanges: int = 3
customer.memory.add_on_message(lambda m: logger.info(f"Customer: {m}") if m.role == "user" else logger.info(f"Storekeeper: {m}"))

customer_reply = "Hi."
for _ in range(exchanges):
storekeeper_reply = storekeeper.query(customer_reply)
Expand Down
2 changes: 2 additions & 0 deletions just_agents/llm_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,8 @@ def _query(self, run_callbacks: bool = True, output: Optional[Path] = None) -> s
result: str = self.memory.last_message.content if self.memory.last_message is not None and self.memory.last_message.content is not None else str(
self.memory.last_message)
if output is not None:
if not output.parent.exists():
output.parent.mkdir(parents=True, exist_ok=True)
output.write_text(result)
return result

Expand Down
9 changes: 9 additions & 0 deletions just_agents/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ class Memory:
def add_on_message(self, handler: OnMessageCallable):
self.on_message.append(handler)

def add_on_tool_result(self, handler: OnMessageCallable):
self.add_on_message(lambda m: handler(m) if m.role == "tool" else None)

def add_on_tool_call(self, fun: OnFunctionCallable):
"""
Adds handler only to function calls to track what exactly was called
Expand All @@ -34,6 +37,12 @@ def tool_handler(message: Message) -> None:
def remove_on_message(self, handler: OnMessageCallable):
self.on_message = [m for m in self.on_message if m == handler]

def add_system_message(self, prompt: str, run_callbacks: bool = True):
return self.add_message(Message(role="system", content=prompt), run_callbacks=run_callbacks)

def add_user_message(self, prompt: str, run_callbacks: bool = True):
return self.add_message(Message(role="user", content=prompt), run_callbacks=run_callbacks)


def add_message(self, message: Message, run_callbacks: bool = True):
"""
Expand Down
2 changes: 1 addition & 1 deletion just_agents/tools/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def get_semantic_paper(query: str,
return results



def hybrid_search(text: str,
collections = ["aging_papers_paragraphs_bge_base_en_v1.5", "aging_papers_paragraphs_specter2"],
limit: int = 10,
Expand Down Expand Up @@ -112,6 +111,7 @@ def hybrid_search(text: str,
results = response.json()
return ".".join(results).replace("\\n", "\n") if string else results


def literature_search(query: str, limit: int = 20):
"""
Search in the academic literature
Expand Down
4 changes: 4 additions & 0 deletions publish.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
##!/bin/bash
rm -rf dist
python setup.py sdist bdist_wheel --universal
twine upload --verbose dist/* --config-file ~/.pypirc
7 changes: 5 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
with codecs.open(os.path.join(here, "README.md"), encoding="utf-8") as fh:
long_description = "\n" + fh.read()

VERSION = '0.0.3'
VERSION = '0.0.4'
DESCRIPTION = 'Just Agents'
LONG_DESCRIPTION = 'LLM Agents that are implemented without unnecessary complexity'

Expand All @@ -21,11 +21,14 @@
long_description_content_type="text/markdown",
long_description=long_description,
packages=find_packages(),
install_requires=["litellm", "pydantic", "numpydoc", "loguru", "requests"],
install_requires=["litellm>=1.40.6", "numpydoc", "loguru", "requests"],
extras_require={
'tools': [
# some default tools
'semanticscholar>=0.8.1'
],
'templates': [
'mako'
]
},
keywords=['python', 'llm', 'science', 'review', 'agents', 'AI'],
Expand Down

0 comments on commit 90289b6

Please sign in to comment.