Skip to content

Commit

Permalink
modify: source
Browse files Browse the repository at this point in the history
  • Loading branch information
glorenzo972 committed Sep 4, 2024
1 parent aaeaa94 commit 34e2109
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 8 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-09-04]
### 0.2.13
- modify: source on qa

## [2024-08-31]
### 0.2.12
- add: citations
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.2.12"
version = "0.2.13"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <gianluca.lorenzo@gmail.com>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand All @@ -18,7 +18,7 @@ jsonschema= "4.23.0"
redis= "^5.0.7"
aioredis= "2.0.1"
#redismutex = "^1.0.0"
langchain = "0.2.11"
langchain = "0.2.16"
jq = "1.7.0"
openai = "1.37.1"
langchain-openai = "0.1.19"
Expand Down
15 changes: 10 additions & 5 deletions tilellm/controller/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,6 +345,7 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:


if question_answer.citations:

rag_chain_from_docs = (
RunnablePassthrough.assign(context=(lambda x: format_docs_with_id(x["context"])))
| qa_prompt
Expand All @@ -355,23 +356,26 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:

chain_w_citations = RunnablePassthrough.assign(context=retrieve_docs).assign(
answer=rag_chain_from_docs
)
).assign(only_answer=lambda text: text["answer"].answer)

conversational_rag_chain = RunnableWithMessageHistory(
chain_w_citations,
get_session_history,
input_messages_key="input",
history_messages_key="chat_history",
output_messages_key="answer",
output_messages_key="only_answer",

)

result = conversational_rag_chain.invoke(
{"input": question_answer.question, }, # 'chat_history': chat_history_list},
{"input": question_answer.question }, # 'chat_history': chat_history_list},
config={"configurable": {"session_id": uuid.uuid4().hex}
} # constructs a key "abc123" in `store`.
)

# print(result.keys())
# from pprint import pprint
# pprint(result["answer"])
# print(f"===== {result['only_ans']} =====")
citations = result['answer'].citations
result['answer'], success = verify_answer(result['answer'].answer)

Expand Down Expand Up @@ -412,7 +416,8 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:

ids = list(set(ids))
sources = list(set(sources))
source = " ".join(sources)
# source = " ".join(sources)
source = " ".join([cit.source_name for cit in citations])
metadata_id = ids[0]

logger.info(f"input: {result['input']}")
Expand Down
2 changes: 1 addition & 1 deletion tilellm/models/item_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ class QuestionAnswer(BaseModel):
embedding: str = Field(default_factory=lambda: "text-embedding-ada-002")
similarity_threshold: float = Field(default_factory=lambda: 1.0)
debug: bool = Field(default_factory=lambda: False)
citations: bool = Field(default_factory=lambda: False)
citations: bool = Field(default_factory=lambda: True)
system_context: Optional[str] = None
search_type: str = Field(default_factory=lambda: "similarity")
chat_history_dict: Optional[Dict[str, ChatEntry]] = None
Expand Down

0 comments on commit 34e2109

Please sign in to comment.