Skip to content

Commit

Permalink
fix: logging
Browse files Browse the repository at this point in the history
  • Loading branch information
glorenzo972 committed Jul 31, 2024
1 parent 6be4f91 commit 53859f8
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 15 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-07-31]
### 0.2.11
- fix: log

## [2024-07-31]
### 0.2.10
- fix: write log
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.2.10"
version = "0.2.11"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <gianluca.lorenzo@gmail.com>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand Down
41 changes: 27 additions & 14 deletions tilellm/controller/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ async def ask_to_llm(question, chat_model=None):
qa_prompt = ChatPromptTemplate.from_messages(
[
("system", question.system_context),
MessagesPlaceholder("chat_history_a", n_messages=question.n_messages),
MessagesPlaceholder("chat_history", n_messages=question.n_messages),
("human", "{input}"),
]
)
Expand All @@ -208,7 +208,7 @@ async def ask_to_llm(question, chat_model=None):

def get_session_history(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = ChatMessageHistory()
store[session_id] = load_session_history(question.chat_history_dict) #ChatMessageHistory()
return store[session_id]

runnable = qa_prompt | chat_model
Expand All @@ -217,15 +217,16 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:
runnable,
get_session_history,
input_messages_key="input",
history_messages_key="chat_history"

)

result = await runnable_with_history.ainvoke(
{"input": question.question, 'chat_history_a': chat_history_list},
{"input": question.question},# 'chat_history_a': chat_history_list},
config={"configurable": {"session_id": uuid.uuid4().hex}
},
)

# logger.info(result)
if not question.chat_history_dict:
question.chat_history_dict = {}

Expand Down Expand Up @@ -259,11 +260,14 @@ async def ask_with_memory(question_answer, repo=None) -> RetrievalResult:
# chat_history_dict : Dict[str, ChatEntry]

question_answer_list = []
chat_history_list = []
if question_answer.chat_history_dict is not None:
for key, entry in question_answer.chat_history_dict.items():
chat_history_list.append(HumanMessage(content=entry.question)) # ('human', entry.question))
chat_history_list.append(AIMessage(content=entry.answer))

question_answer_list.append((entry.question, entry.answer))

logger.info(question_answer_list)
openai_callback_handler = OpenAICallbackHandler()

llm = ChatOpenAI(model_name=question_answer.model,
Expand Down Expand Up @@ -324,7 +328,7 @@ async def ask_with_memory(question_answer, repo=None) -> RetrievalResult:

def get_session_history(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = ChatMessageHistory()
store[session_id] = load_session_history(question_answer.chat_history_dict)
return store[session_id]

conversational_rag_chain = RunnableWithMessageHistory(
Expand All @@ -336,7 +340,7 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:
)

result = conversational_rag_chain.invoke(
{"input": question_answer.question, 'chat_history': question_answer_list},
{"input": question_answer.question, }, #'chat_history': chat_history_list},
config={"configurable": {"session_id": uuid.uuid4().hex}
}, # constructs a key "abc123" in `store`.
)
Expand All @@ -351,6 +355,7 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:
("human", "{input}"),
]
)
# logger.info(contextualize_q_prompt)
history_aware_retriever = create_history_aware_retriever(
llm, retriever, contextualize_q_prompt
)
Expand All @@ -373,7 +378,7 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:

def get_session_history(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = ChatMessageHistory()
store[session_id] = load_session_history(question_answer.chat_history_dict)
return store[session_id]

conversational_rag_chain = RunnableWithMessageHistory(
Expand All @@ -385,14 +390,11 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:
)

result = conversational_rag_chain.invoke(
{"input": question_answer.question, 'chat_history': question_answer_list},
{"input": question_answer.question}, #'chat_history': chat_history_list},
config={"configurable": {"session_id": uuid.uuid4().hex}
}, # constructs a key "abc123" in `store`.
)

# print(store)
# print(question_answer_list)

docs = result["context"]
# from pprint import pprint
# pprint(docs)
Expand All @@ -416,8 +418,9 @@ def get_session_history(session_id: str) -> BaseChatMessageHistory:
source = " ".join(sources)
metadata_id = ids[0]

#logger.info(result)
#print(result['answer'])
logger.info(f"input: {result['input']}")
logger.info(f"chat_history: {result['chat_history']}")
logger.info(f"answer: {result['answer']}")

result['answer'], success = verify_answer(result['answer'])

Expand Down Expand Up @@ -658,6 +661,7 @@ async def delete_chunk_id_from_namespace(chunk_id:str, namespace: str, repo=None
logger.error(ex)
raise ex


@inject_repo
async def get_list_namespace(repo=None) -> PineconeNamespaceResult:
"""
Expand Down Expand Up @@ -747,3 +751,12 @@ def verify_answer(s):
else:
success = True
return s, success


def load_session_history(history) -> BaseChatMessageHistory:
chat_history = ChatMessageHistory()
if history is not None:
for key, entry in history.items():
chat_history.add_message(HumanMessage(content=entry.question)) # ('human', entry.question))
chat_history.add_message(AIMessage(content=entry.answer))
return chat_history

0 comments on commit 53859f8

Please sign in to comment.