Skip to content

Commit

Permalink
add taipy study and code
Browse files Browse the repository at this point in the history
  • Loading branch information
jbcodeforce committed May 11, 2024
1 parent ab4482a commit 531a571
Show file tree
Hide file tree
Showing 8 changed files with 345 additions and 9 deletions.
8 changes: 0 additions & 8 deletions docs/techno/paity/index.md

This file was deleted.

23 changes: 23 additions & 0 deletions docs/techno/taipy/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Taipy

[TapPy](https://docs.taipy.io/) a Python open-source library designed for easy development of data-driven web applications.

It generates web pages from a Flask Server. The main class is `Gui`.

* Support multiple pages
* Keep state of application variables with dynamic binding
* User interactions are event driven
* Pages may be defined by Html or markdown template, or built by code. Page has name for navigation
* Include a CLI to create app or run them.
* Blocks let you organize controls (or blocks) in pages

## Some how to

* Pages are created in different modules, the variables that they can bind to visual elements may have a scope limited to their origin module.
* For Single Page Application we need to associate one page to "/"

## Code

* [1st UI](https://github.com/jbcodeforce/ML-studies/blob/master/techno/taipy/1st_ui.py)
* [Markdown, html, navbar based pages](https://github.com/jbcodeforce/ML-studies/blob/master/techno/taipy/md_ui.py)
* [A chatbot to integrate LangGraph for prompt builder](https://github.com/jbcodeforce/ML-studies/blob/master/llm-langchain/langgraph/chatbot_graph_ui.py)
79 changes: 79 additions & 0 deletions llm-langchain/langgraph/chatbot_graph_ui.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@

from taipy.gui import Gui, State, notify
from prompt_builder_grapth import send_user_msg

context = "Hi"
conversation = {
"Conversation": []
}
current_user_message = ""



def invoke_model(state: State, user: str) -> str:
"""
Send a message to graph for processing
Args:
- state: The current conversation state.
- prompt: The prompt to send to the API.
Returns:
The response from the API.
"""
for output in send_user_msg(user):
for key, value in output.items():
print(f"Output from node '{key}':")
print("---")
print(value.content)
return value.content

def send_message(state: State) -> None:
"""
Send the user's message to the API and update the conversation.
Args:
- state: The current state.
"""
# Add the user's message to the context
state.context += state.current_user_message
# Send the user's message to the API and get the response
answer = invoke_model(state, state.context)
# Add the response to the context for future messages
state.context += answer
# Update the conversation
conv = state.conversation._dict.copy()
conv["Conversation"] += [state.current_user_message, answer]
state.conversation = conv
# Clear the input field
state.current_user_message = ""


def style_conv(state: State, idx: int, row: int) -> str:
"""
Apply a style to the conversation table depending on the message's author.
Args:
- state: The current state of the app.
- idx: The index of the message in the table.
- row: The row of the message in the table.
Returns:
The style to apply to the message.
"""
if idx is None:
return None
elif idx % 2 == 0:
return "user_message"
else:
return "bot_message"

page = """
<|{conversation}|table|show_all|style=style_conv|>
<|{current_user_message}|input|label=Write your message here...|on_action=send_message|class_name=fullwidth|>
"""



if __name__ == "__main__":
Gui(page, css_file="./main.css").run(title="Prompt Builder Chat")
28 changes: 28 additions & 0 deletions llm-langchain/langgraph/main.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
.bot_message td {
margin-left: 30px;
margin-bottom: 20px;
margin-top: 20px;
position: relative;
display: inline-block;
padding: 20px;
background-color: #2773ca;
border-radius: 20px;
max-width: 80%;
box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19);
font-size: large;
}

.user_message td {
margin-right: 30px;
margin-bottom: 20px;
margin-top: 20px;
position: relative;
display: inline-block;
padding: 20px;
background-color: #864cc0;
border-radius: 20px;
max-width: 80%;
float: right;
box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19);
font-size: large;
}
138 changes: 138 additions & 0 deletions llm-langchain/langgraph/prompt_builder_grapth.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@

from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph.message import add_messages
from langgraph.graph import MessageGraph, END
from langchain_openai import ChatOpenAI
from langchain_core.pydantic_v1 import BaseModel
import uuid
from langchain_core.messages import HumanMessage, SystemMessage
from typing import List
from dotenv import load_dotenv


load_dotenv("../../.env")

"""
A chat bot that helps a user generate a prompt.
There are two separate states, gather requirements and Generate Prompt and the LLM decides when to transition between them.
Based on this tutorial: https://github.com/langchain-ai/langgraph/blob/main/examples/chatbots/information-gather-prompting.ipynb
"""
class PromptInstructions(BaseModel):
"""Instructions on how to prompt the LLM."""
objective: str
variables: List[str]
constraints: List[str]
requirements: List[str]

class GraphState(TypedDict):
messages: Annotated[list, add_messages]


def _is_tool_call(msg):
return hasattr(msg, "additional_kwargs") and "tool_calls" in msg.additional_kwargs

def define_gather_info_prompt():
template = """Your job is to get information from a user about what type of prompt template they want to create.
You should get the following information from them:
- What the objective of the prompt is
- What variables will be passed into the prompt template
- Any constraints for what the output should NOT do
- Any requirements that the output MUST adhere to
If you are not able to discern this info, ask them to clarify! Do not attempt to wildly guess.
After you are able to discern all the information, call the relevant tool"""
return template

def get_messages_info(messages):
return [SystemMessage(content=define_gather_info_prompt())] + messages

# Function to get the messages for the prompt
# Will only get messages AFTER the tool call
def get_prompt_messages(messages):
prompt_system = """Based on the following requirements, write a good prompt template:
{reqs}"""

tool_call = None
other_msgs = []
for m in messages:
if _is_tool_call(m):
tool_call = m.additional_kwargs["tool_calls"][0]["function"]["arguments"]
elif tool_call is not None:
other_msgs.append(m)
return [SystemMessage(content=prompt_system.format(reqs=tool_call))] + other_msgs

def build_chains():
"""
There are two nodes so two chains to support this processing.
"""
llm = ChatOpenAI(temperature=0)
llm_with_tool = llm.bind_tools([PromptInstructions])
gather_info_chain = get_messages_info | llm_with_tool
prompt_gen_chain = get_prompt_messages | llm
return gather_info_chain, prompt_gen_chain

def get_state(messages):
if _is_tool_call(messages[-1]):
return "prompt"
elif not isinstance(messages[-1], HumanMessage):
return END
for m in messages:
if _is_tool_call(m):
return "prompt"
return "info"

def define_graph():
nodes = {k: k for k in ["info", "prompt", END]}
workflow = MessageGraph()
gather_info_chain, prompt_gen_chain = build_chains()
workflow.add_node("info", gather_info_chain)
workflow.add_node("prompt", prompt_gen_chain)
workflow.add_conditional_edges("info", get_state, nodes)
workflow.add_conditional_edges("prompt", get_state, nodes)
workflow.set_entry_point("info")
graph = workflow.compile()
return graph


_graph= None

def get_or_build_graph():
global _graph
if not _graph:
_graph=define_graph()
return _graph

def send_user_msg(user_msg):
config = {"configurable": {"thread_id": str(uuid.uuid4())}}
graph=get_or_build_graph()
return graph.stream([HumanMessage(content=user_msg)], config=config)


def text_chat():
config = {"configurable": {"thread_id": str(uuid.uuid4())}}
while True:
user = input("User (q/Q to quit): ")
if user in {"q", "Q"}:
print("AI: Byebye")
break
for output in graph.stream([HumanMessage(content=user)], config=config):
print(output)
if "__end__" in output:
continue
# stream() yields dictionaries with output keyed by node name
for key, value in output.items():
print(f"Output from node '{key}':")
print("---")
print(value.content)
print("\n---\n")


if __name__ == "__main__":
text_chat()
2 changes: 1 addition & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ nav:
- Feature Store: techno/feature_store.md/
- Kaggle: kaggle.md
- LangChain: coding/langchain.md

- OpenSearch: techno/opensearch.md
- Pandas: coding/pandas.md
- Python studies: https://jbcodeforce.github.io/python-code/
Expand All @@ -47,6 +46,7 @@ nav:
- Spark studies: https://jbcodeforce.github.io/spark-studies/
- Gradio: techno/gradio/index.md
- Streamlit: techno/streamlit.md
- TaiPy: techno/taipy/index.md
- NiceGUI: techno/nicegui.md
- WatsonX: techno/watsonx.md
- Solutions:
Expand Down
40 changes: 40 additions & 0 deletions techno/taipy/dialog_ui.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
from taipy.gui import Gui
from taipy.gui import Markdown
from taipy.gui import Html

name=""
show_dialog=False

def button_action(state):
state.show_dialog = True

def dialog_action(state, id, payload):
with state as st:
if payload["args"][0] == 0:
name=name
st.show_dialog = False

dialog_props = {
"title": "Select an item in the list",
"labels": "Cancel;Validate",
"page_id": "page",
"close_label": "Cancel"
}

root_md = """
# Page title
{name}
<|Open Me|button|on_action=button_action|class_name=plain|>
<|{show_dialog}|dialog|labels=Save;Cancel|on_action=dialog_action |
Enter a name:
<|{name}|input|>
|>
"""

if __name__ == "__main__":
Gui(page=root_md).run(title="Dialog chart", debug=True, use_reloader=True)
36 changes: 36 additions & 0 deletions techno/taipy/md_ui.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from taipy.gui import Gui
from taipy.gui import Markdown
from taipy.gui import Html

root_md = """
<|navbar|>
# Multi-page application
<|content|>
This application was created with [Taipy](https://www.taipy.io/).
"""

page2 = Html("""
<h1>Html Page 2</h1>
Go to <a href="/page1"><i> First Page </i></a> for more information
""")

page1 = Markdown("""
# MD Page 1
Any [*Markdown*](https://en.wikipedia.org/wiki/Markdown) content can be used here.
Go to [Second Page](/page2) for more information
""")

pages = {
"/": root_md,
"page1": page1,
"page2": page2
}

if __name__ == "__main__":
Gui(pages=pages).run(title="Dynamic chart", debug=True, use_reloader=True)

0 comments on commit 531a571

Please sign in to comment.