Skip to content

Commit

Permalink
add: citations
Browse files Browse the repository at this point in the history
  • Loading branch information
glorenzo972 committed Aug 31, 2024
1 parent 53859f8 commit aaeaa94
Show file tree
Hide file tree
Showing 9 changed files with 549 additions and 78 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-08-31]
### 0.2.12
- add: citations

## [2024-07-31]
### 0.2.11
- fix: log
Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.2.11"
version = "0.2.12"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <gianluca.lorenzo@gmail.com>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand Down Expand Up @@ -41,6 +41,8 @@ docx2txt= "0.8"
wikipedia= "1.4.0"
html2text= "2024.2.26"
psutil= "6.0.0"
httpx= "0.27.0"
gql= "3.5.0"


[tool.poetry.dependencies.uvicorn]
Expand Down
22 changes: 20 additions & 2 deletions tilellm/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
ScrapeStatusReq,
ScrapeStatusResponse,
PineconeIndexingResult, RetrievalResult, PineconeNamespaceResult,
PineconeDescNamespaceResult, PineconeItems, QuestionToLLM, SimpleAnswer)
PineconeDescNamespaceResult, PineconeItems, QuestionToLLM, SimpleAnswer,
QuestionToAgent)

from tilellm.store.redis_repository import redis_xgroup_create
from tilellm.controller.controller import (ask_with_memory,
Expand All @@ -36,7 +37,7 @@
get_desc_namespace,
get_list_namespace,
get_sources_namespace,
ask_to_llm)
ask_to_llm, ask_to_agent)

import logging

Expand Down Expand Up @@ -350,6 +351,23 @@ async def post_ask_with_memory_main(question_answer: QuestionAnswer):
return JSONResponse(content=result.model_dump())


@app.post("/api/agent", response_model=SimpleAnswer)
async def post_ask_to_agent_main(question_to_agent: QuestionToAgent):
"""
Query and Aswer with chat history
:param question_to_agent:
:return: SimpleAnswer
"""
print(question_to_agent)
logger.debug(question_to_agent)

result = await ask_to_agent(question_to_agent)

logger.debug(result)
return JSONResponse(content=result.model_dump())



@app.post("/api/ask", response_model=SimpleAnswer)
async def post_ask_to_llm_main(question: QuestionToLLM):
"""
Expand Down
130 changes: 130 additions & 0 deletions tilellm/agents/shopify_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
from langchain_core.prompts import PromptTemplate



from langchain.agents import create_react_agent, AgentExecutor
from langchain_core.tools import Tool
from langchain_openai import ChatOpenAI

from tilellm.tools.shopify_tool import get_graphql_answer
from tilellm.shared.const import react_prompt_template
from functools import partial


def lookup(question_to_agent, chat_model, chat_history:str):
#You are an API agent.
template1 = """
Given the question {question} I want you to find the answer. the first step is to create a GraphQL query
for Shopify Admin client that answer the question, then pass the query to tool in GraphQL format.
(USE ONLY GraphQL format. No comment is needed! Use the parameter in the same language as the question).
Use this schema {schema} for GraphQL and not exceed 10 items.
Examples of GraphQL query:
- query {{ products(first: 100) {{ edges {{ node {{ id title price }} }} }} }}
- query {{ products(first: 10) {{ edges {{ node {{ id title }} }} }} }}
- query {{ products(first: 10, query: "price:<50") {{ edges {{ node {{ title variants(first: 1) {{ edges {{ node {{ price }} }} }} }} }} }} }}
In Your Final answer, use the same language of the question, the response should interpret and summarize the key information from the query result
in a clear and concise manner. If there isn't product that answer the question, simply say that there isn't products.
"""
template="""
Follow these instructions exactly to answer the question: {question}
1. Create a GraphQL query for Shopify Admin client that answers the question.
- Use ONLY GraphQL format, no comments.
- Use parameters in the same language as the question.
- Use this schema: {schema}
- Limit results to a maximum of 10 items.
2. Query format:
query {{
// Your code here
}}
3. Examples of valid queries:
- query {{ products(first: 10) {{ edges {{ node {{ id title price }} }} }} }}
- query {{ products(first: 10, query: "price:<50") {{ edges {{ node {{ title variants(first: 1) {{ edges {{ node {{ price }} }} }} }} }} }} }}
4. Present ONLY the GraphQL query, nothing else.
5. After receiving the query results, provide the final answer:
- Use the same language as the original question.
- Interpret and summarize key information from the query results.
- Be clear and concise.
- If there are no products that answer the question, state this explicitly.
Remember: Follow these instructions to the letter. Do not add explanations or comments that are not requested.
"""
question = question_to_agent.question

for tool in question_to_agent.tools:
if 'shopify' in tool:
shopify_tool = tool['shopify']

# Safely access the root dictionary
api_key = shopify_tool.root.get('api_key')
url = shopify_tool.root.get('url')
break # Exit the loop once 'shopify' is found

get_graphql_answer_with_key = partial(get_graphql_answer, url=url, api_key=api_key)
tools_for_agent_shopify = [
Tool(
name="Retrieve content from Shopify given GraphQL query",
func=get_graphql_answer_with_key,
description="useful when you need get the useful information from shopify"

),
]

schema = """
products(first: 10, query: "") {
edges {
node {
title
variants(first: 1, last: 10) {
edges {
node {
price
availableForSale
barcode
displayName
id
}
}
}
bodyHtml
descriptionHtml
id
productType
tags
totalInventory
}
}
}
"""
prompt_template = PromptTemplate(
input_variables=["question", "schema"], template=template
)

#react_prompt = hub.pull("hwchase17/react")
react_prompt = PromptTemplate.from_template(react_prompt_template)

agent = create_react_agent(
llm=chat_model, tools=tools_for_agent_shopify, prompt=react_prompt
)

agent_executor = AgentExecutor(
agent=agent,
tools=tools_for_agent_shopify,
verbose=True,
max_iterations=4,
early_stopping_method="force",
handle_parsing_errors=True
)

result = agent_executor.invoke(
input={"input": prompt_template.format_prompt(question=question, schema=schema),
"chat_history": chat_history}
)

return result
Loading

0 comments on commit aaeaa94

Please sign in to comment.