diff --git a/LOADBALANCE.md b/LOADBALANCE.md
new file mode 100644
index 0000000..bc6cf5b
--- /dev/null
+++ b/LOADBALANCE.md
@@ -0,0 +1,54 @@
+# Load Balancing and Routing
+
+Langflow (at least at version 1.0.15) expects all URLs to be rooted from `/`: there is not a straightforward way to configure a `base_url` or similar.
+This is a challenge for load balancers and other application routers. This goal of this page is to inform you about some tricks you can use
+to configure your own system; it is based on the popular Nginx framework but should be easily translatable to whatever framework you use.
+
+## Server Proxy
+
+Consider this Nginx `server` configuration, which allows Langflow developers to access the UI at `langflow.dev.internal` on port `80`,
+whilst routing requests to `langflow.dev.service` on port `7860`:
+
+```
+ server {
+ listen 80;
+
+ server_name langflow.dev.internal;
+
+ location / {
+ proxy_pass http://langflow.dev.service:7860;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+ }
+```
+
+## Server Proxy with Routing
+
+To add an application routing based on URL (in this case `/chat`), use the route to identify the destination location, but
+remove the route from the URL before passing to Langflow:
+
+```
+ server {
+ listen 80;
+
+ server_name langflow.test.internal;
+
+ location /chat {
+ rewrite ^/chat/(.*)$ /$1 break;
+ proxy_pass http://langflow.test.service:7860;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+ }
+```
+
+Two important things to note about this routing approach:
+
+1. The Langflow frontend will not work without a '/' location, but backend API calls should. You can combine both locations within the `server` configuration.
+2. Some returned content (such as when an API is called with `stream=true`) includes URLs that will not be prefixed (i.e. `stream_url` will begin `/api/v1/...`); you will need to add the prefix within your application logic.
+
diff --git a/README.md b/README.md
index c346821..4e46136 100644
--- a/README.md
+++ b/README.md
@@ -33,7 +33,7 @@ apply your own Enterprise architucture patterns to it.
## Langflow Version
-This repo is currently based on Langflow v1.0.14; future versions of Langflow may change
+This repo is currently based on Langflow v1.0.17; future versions of Langflow may change
the advice.
## Contributions
diff --git a/example.html b/example.html
index f61f05b..c602caf 100644
--- a/example.html
+++ b/example.html
@@ -289,8 +289,10 @@
Document Summary
output_type: "chat",
input_type: "chat",
"tweaks": {
- "TextInput-eYtbq": {"input_value": base64Content },
- "TextInput-AlrlI": {"input_value": filename }
+ "Parameter Input": {
+ "base64_file": base64Content,
+ "filename": filename
+ }
}
})
})
diff --git a/flows/AstraMemoryChatbot.json b/flows/AstraMemoryChatbot.json
index 484c6ef..e49f379 100644
--- a/flows/AstraMemoryChatbot.json
+++ b/flows/AstraMemoryChatbot.json
@@ -1 +1 @@
-{"id":"19ac2fc8-afac-4613-9a55-214aa48d738c","data":{"nodes":[{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-yHUvF","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Continue the following conversation:\n\n{context}\n\nUser: {user_message}\nAI: ","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","load_from_db":false},"context":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"context","display_name":"context","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"user_message":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_message","display_name":"user_message","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["context","user_message"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false},"type":"Prompt"},"dragging":false,"height":525,"id":"Prompt-yHUvF","position":{"x":1913.4489065776725,"y":646.0296877481713},"positionAbsolute":{"x":1913.4489065776725,"y":646.0296877481713},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-ZgIR1","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"what is my name","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":317,"id":"ChatInput-ZgIR1","position":{"x":818.0456657708498,"y":811.6396906055986},"positionAbsolute":{"x":818.0456657708498,"y":811.6396906055986},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-Z8YU1","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"height":411,"id":"ChatOutput-Z8YU1","position":{"x":2905.211251193657,"y":1298.5481160975671},"selected":false,"type":"genericNode","width":384,"dragging":false,"positionAbsolute":{"x":2905.211251193657,"y":1298.5481160975671}},{"id":"AstraDBChatMemory-Ij2ox","type":"genericNode","position":{"x":846.5819719179892,"y":25.823793813293406},"data":{"type":"AstraDBChatMemory","node":{"template":{"_type":"Component","api_endpoint":{"load_from_db":true,"required":true,"placeholder":"","show":true,"value":"","name":"api_endpoint","display_name":"API Endpoint","advanced":false,"input_types":[],"dynamic":false,"info":"API endpoint URL for the Astra DB service.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.memory.model import LCChatMemoryComponent\nfrom langflow.inputs import MessageTextInput, StrInput, SecretStrInput\nfrom langflow.field_typing import BaseChatMessageHistory\n\n\nclass AstraDBChatMemory(LCChatMemoryComponent):\n display_name = \"Astra DB Chat Memory\"\n description = \"Retrieves and store chat messages from Astra DB.\"\n name = \"AstraDBChatMemory\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n ]\n\n def build_message_history(self) -> BaseChatMessageHistory:\n try:\n from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n memory = AstraDBChatMessageHistory(\n session_id=self.session_id,\n collection_name=self.collection_name,\n token=self.token,\n api_endpoint=self.api_endpoint,\n namespace=self.namespace or None,\n )\n return memory\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"collection_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"memories","name":"collection_name","display_name":"Collection Name","advanced":false,"dynamic":false,"info":"The name of the collection within Astra DB where the vectors will be stored.","title_case":false,"type":"str","_input_type":"StrInput"},"namespace":{"trace_as_metadata":true,"load_from_db":true,"list":false,"required":false,"placeholder":"","show":true,"value":"ASTRA_DB_NAMESPACE","name":"namespace","display_name":"Namespace","advanced":true,"dynamic":false,"info":"Optional namespace within Astra DB to use for the collection.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"token":{"load_from_db":true,"required":true,"placeholder":"","show":true,"value":"","name":"token","display_name":"Astra DB Application Token","advanced":false,"input_types":[],"dynamic":false,"info":"Authentication token for accessing Astra DB.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieves and store chat messages from Astra DB.","icon":"AstraDB","base_classes":["BaseChatMessageHistory"],"display_name":"Astra DB Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["BaseChatMessageHistory"],"selected":"BaseChatMessageHistory","name":"memory","display_name":"Memory","method":"build_message_history","value":"__UNDEFINED__","cache":true}],"field_order":["collection_name","token","api_endpoint","namespace","session_id"],"beta":false,"edited":false},"id":"AstraDBChatMemory-Ij2ox","description":"Retrieves and store chat messages from Astra DB.","display_name":"Astra DB Chat Memory"},"selected":false,"width":384,"height":619,"positionAbsolute":{"x":846.5819719179892,"y":25.823793813293406},"dragging":false},{"id":"StoreMessage-egqaz","type":"genericNode","position":{"x":1382.3668535221948,"y":552.1130092608814},"data":{"type":"StoreMessage","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"The external memory to store the message. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.inputs import MessageInput, StrInput, HandleInput\nfrom langflow.schema.message import Message\nfrom langflow.template import Output\nfrom langflow.memory import get_messages, store_message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI\n\n\nclass StoreMessageComponent(Component):\n display_name = \"Store Message\"\n description = \"Stores a chat message or text into Langflow tables or an external memory.\"\n icon = \"save\"\n name = \"StoreMessage\"\n\n inputs = [\n MessageInput(name=\"message\", display_name=\"Message\", info=\"The chat message to be stored.\", required=True),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"The external memory to store the message. If empty, it will use the Langflow tables.\",\n ),\n StrInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message.\",\n advanced=True,\n ),\n StrInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"The name of the sender.\",\n advanced=True,\n ),\n StrInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Stored Messages\", name=\"stored_messages\", method=\"store_message\"),\n ]\n\n def store_message(self) -> Message:\n message = self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n\n if self.memory:\n # override session_id\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n self.memory.add_messages([lc_message])\n stored = self.memory.messages\n stored = [Message.from_lc_message(m) for m in stored]\n if message.sender:\n stored = [m for m in stored if m.sender == message.sender]\n else:\n store_message(message, flow_id=self.graph.flow_id)\n stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender)\n self.status = stored\n return stored\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"","name":"message","display_name":"Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The chat message to be stored.","title_case":false,"type":"str","_input_type":"MessageInput"},"sender":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender","display_name":"Sender","advanced":true,"dynamic":false,"info":"The sender of the message.","title_case":false,"type":"str","_input_type":"StrInput"},"sender_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"dynamic":false,"info":"The name of the sender.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"Stores a chat message or text into Langflow tables or an external memory.","icon":"save","base_classes":["Message"],"display_name":"Store Message","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"stored_messages","display_name":"Stored Messages","method":"store_message","value":"__UNDEFINED__","cache":true}],"field_order":["message","memory","sender","sender_name","session_id"],"beta":false,"edited":true},"id":"StoreMessage-egqaz","description":"Stores a chat message or text into Langflow tables or an external memory.","display_name":"Store Message"},"selected":false,"width":384,"height":487,"positionAbsolute":{"x":1382.3668535221948,"y":552.1130092608814},"dragging":false},{"id":"StoreMessage-fkHzK","type":"genericNode","position":{"x":3390.7241079317982,"y":585.4542713719517},"data":{"type":"StoreMessage","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"The external memory to store the message. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.inputs import MessageInput, StrInput, HandleInput\nfrom langflow.schema.message import Message\nfrom langflow.template import Output\nfrom langflow.memory import get_messages, store_message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI\n\n\nclass StoreMessageComponent(Component):\n display_name = \"Store Message\"\n description = \"Stores a chat message or text into Langflow tables or an external memory.\"\n icon = \"save\"\n name = \"StoreMessage\"\n\n inputs = [\n MessageInput(name=\"message\", display_name=\"Message\", info=\"The chat message to be stored.\", required=True),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"The external memory to store the message. If empty, it will use the Langflow tables.\",\n ),\n StrInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message.\",\n advanced=True,\n ),\n StrInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"The name of the sender.\",\n advanced=True,\n ),\n StrInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Stored Messages\", name=\"stored_messages\", method=\"store_message\"),\n ]\n\n def store_message(self) -> Message:\n message = self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n \n if self.memory:\n # override session_id\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n self.memory.add_messages([lc_message])\n stored = self.memory.messages\n stored = [Message.from_lc_message(m) for m in stored]\n if message.sender:\n stored = [m for m in stored if m.sender == message.sender]\n else:\n store_message(message, flow_id=self.graph.flow_id)\n stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender)\n self.status = stored\n return stored\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"","name":"message","display_name":"Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The chat message to be stored.","title_case":false,"type":"str","_input_type":"MessageInput"},"sender":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender","display_name":"Sender","advanced":true,"dynamic":false,"info":"The sender of the message.","title_case":false,"type":"str","_input_type":"StrInput"},"sender_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"dynamic":false,"info":"The name of the sender.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"Stores a chat message or text into Langflow tables or an external memory.","icon":"save","base_classes":["Message"],"display_name":"Store Message","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"stored_messages","display_name":"Stored Messages","method":"store_message","value":"__UNDEFINED__","cache":true}],"field_order":["message","memory","sender","sender_name","session_id"],"beta":false,"edited":true},"id":"StoreMessage-fkHzK","description":"Stores a chat message or text into Langflow tables or an external memory.","display_name":"Store Message"},"selected":false,"width":384,"height":487,"positionAbsolute":{"x":3390.7241079317982,"y":585.4542713719517},"dragging":false},{"id":"OpenAIModel-phOaO","type":"genericNode","position":{"x":2394.8044551303005,"y":967.4304980247605},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"api_key","display_name":"OpenAI API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"json_mode","display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":{},"name":"model_kwargs","display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"combobox":false,"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"required":false,"placeholder":"","show":true,"value":"gpt-4o-mini","name":"model_name","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"openai_api_base","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"value":{},"name":"output_schema","display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":1,"name":"seed","display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"id":"OpenAIModel-phOaO","description":"Generates text using OpenAI LLMs.","display_name":"OpenAI"},"selected":false,"width":384,"height":639,"positionAbsolute":{"x":2394.8044551303005,"y":967.4304980247605},"dragging":false},{"id":"Memory-r2fFC","type":"genericNode","position":{"x":1375.373727378358,"y":1259.3743794430238},"data":{"type":"Memory","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":100,"name":"n_messages","display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"trace_as_metadata":true,"options":["Ascending","Descending"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Ascending","name":"order","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Machine and User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{sender_name}: {text}","name":"template","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":true},"id":"Memory-r2fFC","description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory"},"selected":false,"width":384,"height":513,"positionAbsolute":{"x":1375.373727378358,"y":1259.3743794430238},"dragging":false},{"id":"Memory-jnOwI","type":"genericNode","position":{"x":813.7066906701654,"y":1263.930599810258},"data":{"type":"Memory","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":100,"name":"n_messages","display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"trace_as_metadata":true,"options":["Ascending","Descending"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Ascending","name":"order","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Machine and User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{sender_name}: {text}","name":"template","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":false},"id":"Memory-jnOwI"},"selected":false,"width":384,"height":419,"positionAbsolute":{"x":813.7066906701654,"y":1263.930599810258},"dragging":true}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-ZgIR1","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_message","id":"Prompt-yHUvF","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-ZgIR1{œdataTypeœ:œChatInputœ,œidœ:œChatInput-ZgIR1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-yHUvF{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-yHUvFœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-ZgIR1","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-ZgIR1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-yHUvF","targetHandle":"{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-yHUvFœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false},{"source":"ChatInput-ZgIR1","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-ZgIR1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"StoreMessage-egqaz","targetHandle":"{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-egqazœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"message","id":"StoreMessage-egqaz","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-ZgIR1","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-ZgIR1{œdataTypeœ:œChatInputœ,œidœ:œChatInput-ZgIR1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-StoreMessage-egqaz{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-egqazœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"ChatOutput-Z8YU1","sourceHandle":"{œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-Z8YU1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"StoreMessage-fkHzK","targetHandle":"{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-fkHzKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"message","id":"StoreMessage-fkHzK","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ChatOutput","id":"ChatOutput-Z8YU1","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatOutput-Z8YU1{œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-Z8YU1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-StoreMessage-fkHzK{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-fkHzKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"Prompt-yHUvF","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-yHUvFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-phOaO","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-phOaOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-phOaO","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-yHUvF","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-yHUvF{œdataTypeœ:œPromptœ,œidœ:œPrompt-yHUvFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-phOaO{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-phOaOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"OpenAIModel-phOaO","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-phOaOœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-Z8YU1","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Z8YU1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-Z8YU1","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-phOaO","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-OpenAIModel-phOaO{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-phOaOœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Z8YU1{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Z8YU1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"AstraDBChatMemory-Ij2ox","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"StoreMessage-fkHzK","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-fkHzKœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"StoreMessage-fkHzK","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-Ij2ox","name":"memory","output_types":["BaseChatMessageHistory"]}},"id":"reactflow__edge-AstraDBChatMemory-Ij2ox{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-StoreMessage-fkHzK{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-fkHzKœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""},{"source":"AstraDBChatMemory-Ij2ox","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"StoreMessage-egqaz","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-egqazœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"StoreMessage-egqaz","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-Ij2ox","name":"memory","output_types":["BaseChatMessageHistory"]}},"selected":false,"id":"reactflow__edge-AstraDBChatMemory-Ij2ox{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-StoreMessage-egqaz{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-egqazœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""},{"source":"Memory-r2fFC","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-r2fFCœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-yHUvF","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-yHUvFœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"context","id":"Prompt-yHUvF","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"Memory","id":"Memory-r2fFC","name":"messages_text","output_types":["Message"]}},"className":"","id":"reactflow__edge-Memory-r2fFC{œdataTypeœ:œMemoryœ,œidœ:œMemory-r2fFCœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-yHUvF{œfieldNameœ:œcontextœ,œidœ:œPrompt-yHUvFœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"source":"AstraDBChatMemory-Ij2ox","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"Memory-r2fFC","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œMemory-r2fFCœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"Memory-r2fFC","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-Ij2ox","name":"memory","output_types":["BaseChatMessageHistory"]}},"id":"reactflow__edge-AstraDBChatMemory-Ij2ox{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-Ij2oxœ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-Memory-r2fFC{œfieldNameœ:œmemoryœ,œidœ:œMemory-r2fFCœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""}],"viewport":{"x":-333.2349241185582,"y":-169.1736561660755,"zoom":0.5225899319671766}},"description":"This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.","name":"Astra Memory Chatbot","last_tested_version":"1.0.14","endpoint_name":"astra-memory-chatbot","is_component":false}
\ No newline at end of file
+{"id":"b82a60f2-ca42-42ff-a61a-fde2f9d32375","data":{"nodes":[{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-g0DH3","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"Continue the following conversation:\n\n{context}\n\nUser: {user_message}\nAI: ","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false},"context":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"context","display_name":"context","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"user_message":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_message","display_name":"user_message","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["context","user_message"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"type":"Prompt"},"dragging":false,"height":498,"id":"Prompt-g0DH3","position":{"x":1890.4863516833816,"y":724.4850836369983},"positionAbsolute":{"x":1890.4863516833816,"y":724.4850836369983},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-7fpJZ","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"what is my name","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":298,"id":"ChatInput-7fpJZ","position":{"x":818.0456657708498,"y":811.6396906055986},"positionAbsolute":{"x":818.0456657708498,"y":811.6396906055986},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-wxcw1","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"height":384,"id":"ChatOutput-wxcw1","position":{"x":2905.211251193657,"y":1298.5481160975671},"selected":false,"type":"genericNode","width":384,"dragging":false,"positionAbsolute":{"x":2905.211251193657,"y":1298.5481160975671}},{"id":"AstraDBChatMemory-G14Y9","type":"genericNode","position":{"x":837.0142407120347,"y":-81.90885956575445},"data":{"type":"AstraDBChatMemory","node":{"template":{"_type":"Component","api_endpoint":{"load_from_db":true,"required":true,"placeholder":"","show":true,"value":"","name":"api_endpoint","display_name":"API Endpoint","advanced":false,"input_types":[],"dynamic":false,"info":"API endpoint URL for the Astra DB service.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.memory.model import LCChatMemoryComponent\nfrom langflow.inputs import MessageTextInput, StrInput, SecretStrInput\nfrom langflow.field_typing import BaseChatMessageHistory\n\n\nclass AstraDBChatMemory(LCChatMemoryComponent):\n display_name = \"Astra DB Chat Memory\"\n description = \"Retrieves and store chat messages from Astra DB.\"\n name = \"AstraDBChatMemory\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n ]\n\n def build_message_history(self) -> BaseChatMessageHistory:\n try:\n from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n memory = AstraDBChatMessageHistory(\n session_id=self.session_id,\n collection_name=self.collection_name,\n token=self.token,\n api_endpoint=self.api_endpoint,\n namespace=self.namespace or None,\n )\n return memory\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"collection_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"memories","name":"collection_name","display_name":"Collection Name","advanced":false,"dynamic":false,"info":"The name of the collection within Astra DB where the vectors will be stored.","title_case":false,"type":"str","_input_type":"StrInput"},"namespace":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"namespace","display_name":"Namespace","advanced":true,"dynamic":false,"info":"Optional namespace within Astra DB to use for the collection.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"token":{"load_from_db":true,"required":true,"placeholder":"","show":true,"value":"","name":"token","display_name":"Astra DB Application Token","advanced":false,"input_types":[],"dynamic":false,"info":"Authentication token for accessing Astra DB.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieves and store chat messages from Astra DB.","icon":"AstraDB","base_classes":["BaseChatMessageHistory"],"display_name":"Astra DB Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["BaseChatMessageHistory"],"selected":"BaseChatMessageHistory","name":"memory","display_name":"Memory","method":"build_message_history","value":"__UNDEFINED__","cache":true}],"field_order":["collection_name","token","api_endpoint","namespace","session_id"],"beta":false,"edited":false},"id":"AstraDBChatMemory-G14Y9","description":"Retrieves and store chat messages from Astra DB.","display_name":"Astra DB Chat Memory"},"selected":false,"width":384,"height":584,"positionAbsolute":{"x":837.0142407120347,"y":-81.90885956575445},"dragging":false},{"id":"StoreMessage-ZmqdD","type":"genericNode","position":{"x":1382.3668535221948,"y":552.1130092608814},"data":{"type":"StoreMessage","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"The external memory to store the message. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.inputs import MessageInput, StrInput, HandleInput\nfrom langflow.schema.message import Message\nfrom langflow.template import Output\nfrom langflow.memory import get_messages, store_message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI\n\n\nclass StoreMessageComponent(Component):\n display_name = \"Store Message\"\n description = \"Stores a chat message or text into Langflow tables or an external memory.\"\n icon = \"save\"\n name = \"StoreMessage\"\n\n inputs = [\n MessageInput(name=\"message\", display_name=\"Message\", info=\"The chat message to be stored.\", required=True),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"The external memory to store the message. If empty, it will use the Langflow tables.\",\n ),\n StrInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message.\",\n advanced=True,\n ),\n StrInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"The name of the sender.\",\n advanced=True,\n ),\n StrInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Stored Messages\", name=\"stored_messages\", method=\"store_message\"),\n ]\n\n def store_message(self) -> Message:\n message = self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n\n if self.memory:\n # override session_id\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n self.memory.add_messages([lc_message])\n stored = self.memory.messages\n stored = [Message.from_lc_message(m) for m in stored]\n if message.sender:\n stored = [m for m in stored if m.sender == message.sender]\n else:\n store_message(message, flow_id=self.graph.flow_id)\n stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender)\n self.status = stored\n return stored\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"","name":"message","display_name":"Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The chat message to be stored.","title_case":false,"type":"str","_input_type":"MessageInput"},"sender":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender","display_name":"Sender","advanced":true,"dynamic":false,"info":"The sender of the message.","title_case":false,"type":"str","_input_type":"StrInput"},"sender_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"dynamic":false,"info":"The name of the sender.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"Stores a chat message or text into Langflow tables or an external memory.","icon":"save","base_classes":["Message"],"display_name":"Store Message","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"stored_messages","display_name":"Stored Messages","method":"store_message","value":"__UNDEFINED__","cache":true}],"field_order":["message","memory","sender","sender_name","session_id"],"beta":false,"edited":true},"id":"StoreMessage-ZmqdD","description":"Stores a chat message or text into Langflow tables or an external memory.","display_name":"Store Message"},"selected":false,"width":384,"height":460,"positionAbsolute":{"x":1382.3668535221948,"y":552.1130092608814},"dragging":false},{"id":"StoreMessage-uqpdF","type":"genericNode","position":{"x":3390.7241079317982,"y":585.4542713719517},"data":{"type":"StoreMessage","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"The external memory to store the message. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.inputs import MessageInput, StrInput, HandleInput\nfrom langflow.schema.message import Message\nfrom langflow.template import Output\nfrom langflow.memory import get_messages, store_message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI\n\n\nclass StoreMessageComponent(Component):\n display_name = \"Store Message\"\n description = \"Stores a chat message or text into Langflow tables or an external memory.\"\n icon = \"save\"\n name = \"StoreMessage\"\n\n inputs = [\n MessageInput(name=\"message\", display_name=\"Message\", info=\"The chat message to be stored.\", required=True),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"The external memory to store the message. If empty, it will use the Langflow tables.\",\n ),\n StrInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message.\",\n advanced=True,\n ),\n StrInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"The name of the sender.\",\n advanced=True,\n ),\n StrInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Stored Messages\", name=\"stored_messages\", method=\"store_message\"),\n ]\n\n def store_message(self) -> Message:\n message = self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n \n if self.memory:\n # override session_id\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n self.memory.add_messages([lc_message])\n stored = self.memory.messages\n stored = [Message.from_lc_message(m) for m in stored]\n if message.sender:\n stored = [m for m in stored if m.sender == message.sender]\n else:\n store_message(message, flow_id=self.graph.flow_id)\n stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender)\n self.status = stored\n return stored\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"value":"","name":"message","display_name":"Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The chat message to be stored.","title_case":false,"type":"str","_input_type":"MessageInput"},"sender":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender","display_name":"Sender","advanced":true,"dynamic":false,"info":"The sender of the message.","title_case":false,"type":"str","_input_type":"StrInput"},"sender_name":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"dynamic":false,"info":"The name of the sender.","title_case":false,"type":"str","_input_type":"StrInput"},"session_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"Stores a chat message or text into Langflow tables or an external memory.","icon":"save","base_classes":["Message"],"display_name":"Store Message","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"stored_messages","display_name":"Stored Messages","method":"store_message","value":"__UNDEFINED__","cache":true}],"field_order":["message","memory","sender","sender_name","session_id"],"beta":false,"edited":true},"id":"StoreMessage-uqpdF","description":"Stores a chat message or text into Langflow tables or an external memory.","display_name":"Store Message"},"selected":false,"width":384,"height":460,"positionAbsolute":{"x":3390.7241079317982,"y":585.4542713719517},"dragging":false},{"id":"OpenAIModel-TL1ta","type":"genericNode","position":{"x":2394.8044551303005,"y":967.4304980247605},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o-mini","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"id":"OpenAIModel-TL1ta","description":"Generates text using OpenAI LLMs.","display_name":"OpenAI"},"selected":false,"width":384,"height":601,"positionAbsolute":{"x":2394.8044551303005,"y":967.4304980247605},"dragging":false},{"id":"Memory-mgRkE","type":"genericNode","position":{"x":1375.373727378358,"y":1259.3743794430238},"data":{"type":"Memory","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":100,"name":"n_messages","display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"trace_as_metadata":true,"options":["Ascending","Descending"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Ascending","name":"order","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Machine and User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{sender_name}: {text}","name":"template","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":true},"id":"Memory-mgRkE","description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory"},"selected":false,"width":384,"height":464,"positionAbsolute":{"x":1375.373727378358,"y":1259.3743794430238},"dragging":false}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-7fpJZ","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_message","id":"Prompt-g0DH3","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-7fpJZ{œdataTypeœ:œChatInputœ,œidœ:œChatInput-7fpJZœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-g0DH3{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-g0DH3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-7fpJZ","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-7fpJZœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-g0DH3","targetHandle":"{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-g0DH3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false},{"source":"ChatInput-7fpJZ","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-7fpJZœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"StoreMessage-ZmqdD","targetHandle":"{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-ZmqdDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"message","id":"StoreMessage-ZmqdD","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-7fpJZ","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-7fpJZ{œdataTypeœ:œChatInputœ,œidœ:œChatInput-7fpJZœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-StoreMessage-ZmqdD{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-ZmqdDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"ChatOutput-wxcw1","sourceHandle":"{œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-wxcw1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"StoreMessage-uqpdF","targetHandle":"{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-uqpdFœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"message","id":"StoreMessage-uqpdF","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ChatOutput","id":"ChatOutput-wxcw1","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatOutput-wxcw1{œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-wxcw1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-StoreMessage-uqpdF{œfieldNameœ:œmessageœ,œidœ:œStoreMessage-uqpdFœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"Prompt-g0DH3","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-g0DH3œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-TL1ta","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-TL1taœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-TL1ta","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-g0DH3","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-g0DH3{œdataTypeœ:œPromptœ,œidœ:œPrompt-g0DH3œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-TL1ta{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-TL1taœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"OpenAIModel-TL1ta","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-TL1taœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-wxcw1","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-wxcw1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-wxcw1","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-TL1ta","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-OpenAIModel-TL1ta{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-TL1taœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-wxcw1{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-wxcw1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"AstraDBChatMemory-G14Y9","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"StoreMessage-uqpdF","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-uqpdFœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"StoreMessage-uqpdF","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-G14Y9","name":"memory","output_types":["BaseChatMessageHistory"]}},"id":"reactflow__edge-AstraDBChatMemory-G14Y9{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-StoreMessage-uqpdF{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-uqpdFœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""},{"source":"AstraDBChatMemory-G14Y9","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"StoreMessage-ZmqdD","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-ZmqdDœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"StoreMessage-ZmqdD","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-G14Y9","name":"memory","output_types":["BaseChatMessageHistory"]}},"selected":false,"id":"reactflow__edge-AstraDBChatMemory-G14Y9{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-StoreMessage-ZmqdD{œfieldNameœ:œmemoryœ,œidœ:œStoreMessage-ZmqdDœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""},{"source":"Memory-mgRkE","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-mgRkEœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-g0DH3","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-g0DH3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"context","id":"Prompt-g0DH3","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"Memory","id":"Memory-mgRkE","name":"messages_text","output_types":["Message"]}},"className":"","id":"reactflow__edge-Memory-mgRkE{œdataTypeœ:œMemoryœ,œidœ:œMemory-mgRkEœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-g0DH3{œfieldNameœ:œcontextœ,œidœ:œPrompt-g0DH3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"source":"AstraDBChatMemory-G14Y9","sourceHandle":"{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}","target":"Memory-mgRkE","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œMemory-mgRkEœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"memory","id":"Memory-mgRkE","inputTypes":["BaseChatMessageHistory"],"type":"other"},"sourceHandle":{"dataType":"AstraDBChatMemory","id":"AstraDBChatMemory-G14Y9","name":"memory","output_types":["BaseChatMessageHistory"]}},"id":"reactflow__edge-AstraDBChatMemory-G14Y9{œdataTypeœ:œAstraDBChatMemoryœ,œidœ:œAstraDBChatMemory-G14Y9œ,œnameœ:œmemoryœ,œoutput_typesœ:[œBaseChatMessageHistoryœ]}-Memory-mgRkE{œfieldNameœ:œmemoryœ,œidœ:œMemory-mgRkEœ,œinputTypesœ:[œBaseChatMessageHistoryœ],œtypeœ:œotherœ}","className":""}],"viewport":{"x":-398.23492411855796,"y":-74.87365616607542,"zoom":0.5225899319671766}},"description":"This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.","name":"AstraMemoryChatbot","last_tested_version":"1.0.17","endpoint_name":"astra-memory-chatbot","is_component":false}
\ No newline at end of file
diff --git a/flows/BasicMemoryChatbot.json b/flows/BasicMemoryChatbot.json
index 7406ffe..751659f 100644
--- a/flows/BasicMemoryChatbot.json
+++ b/flows/BasicMemoryChatbot.json
@@ -1 +1 @@
-{"id":"a01c4246-5f32-438a-ae32-2804abea19d0","data":{"nodes":[{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-dRigH","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"{context}\n\nUser: {user_message}\nAI: ","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","load_from_db":false},"context":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"context","display_name":"context","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"user_message":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_message","display_name":"user_message","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["context","user_message"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"type":"Prompt"},"dragging":false,"height":522,"id":"Prompt-dRigH","position":{"x":1880.8227904110583,"y":625.8049209882275},"positionAbsolute":{"x":1880.8227904110583,"y":625.8049209882275},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-cmpQG","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"what is my name","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":315,"id":"ChatInput-cmpQG","position":{"x":1283.580404331952,"y":1071.4889933561165},"positionAbsolute":{"x":1283.580404331952,"y":1071.4889933561165},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Generates text using OpenAI LLMs.","display_name":"OpenAI","id":"OpenAIModel-vmvpM","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"api_key","display_name":"OpenAI API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"json_mode","display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":{},"name":"model_kwargs","display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"combobox":false,"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"required":false,"placeholder":"","show":true,"value":"gpt-4o","name":"model_name","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput","load_from_db":false},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"openai_api_base","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"value":{},"name":"output_schema","display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":1,"name":"seed","display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"type":"OpenAIModel"},"dragging":true,"height":635,"id":"OpenAIModel-vmvpM","position":{"x":2530.2018592056684,"y":372.54142059595915},"positionAbsolute":{"x":2530.2018592056684,"y":372.54142059595915},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-caYfT","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"height":315,"id":"ChatOutput-caYfT","position":{"x":3083.1710516244116,"y":701.521688846004},"selected":false,"type":"genericNode","width":384,"dragging":false},{"data":{"description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory","id":"Memory-wcuYr","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"memory","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":100,"name":"n_messages","display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"combobox":false,"trace_as_metadata":true,"options":["Ascending","Descending"],"required":false,"placeholder":"","show":true,"value":"Ascending","name":"order","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"required":false,"placeholder":"","show":true,"value":"Machine and User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{sender_name}: {text}","name":"template","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":false},"type":"Memory"},"dragging":false,"height":511,"id":"Memory-wcuYr","position":{"x":1275.1936550508672,"y":420.4251098153348},"positionAbsolute":{"x":1275.1936550508672,"y":420.4251098153348},"selected":true,"type":"genericNode","width":384},{"id":"GoogleGenerativeAIModel-wVHD4","type":"genericNode","position":{"x":2527.5499536627904,"y":1089.6495372148831},"data":{"type":"GoogleGenerativeAIModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_output_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[\"gemini-1.5-pro\", \"gemini-1.5-flash\", \"gemini-1.0-pro\", \"gemini-1.0-pro-vision\"],\n value=\"gemini-1.5-pro\",\n ),\n SecretStrInput(\n name=\"google_api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError:\n raise ImportError(\"The 'langchain_google_genai' package is required to use the Google Generative AI model.\")\n\n google_api_key = self.google_api_key\n model = self.model\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n output = ChatGoogleGenerativeAI( # type: ignore\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key),\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"google_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"google_api_key","display_name":"Google API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The Google API Key to use for the Google Generative AI.","title_case":false,"password":true,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str"},"max_output_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_output_tokens","display_name":"Max Output Tokens","advanced":false,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int"},"model":{"trace_as_metadata":true,"options":["gemini-1.5-pro","gemini-1.5-flash","gemini-1.0-pro","gemini-1.0-pro-vision"],"required":false,"placeholder":"","show":true,"value":"gemini-1.5-flash","name":"model","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","title_case":false,"type":"str","load_from_db":false},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float"},"top_k":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_k","display_name":"Top K","advanced":true,"dynamic":false,"info":"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.","title_case":false,"type":"int"},"top_p":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_p","display_name":"Top P","advanced":true,"dynamic":false,"info":"The maximum cumulative probability of tokens to consider when sampling.","title_case":false,"type":"float"}},"description":"Generate text using Google Generative AI.","icon":"GoogleGenerativeAI","base_classes":["LanguageModel","Message"],"display_name":"Google Generative AI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_output_tokens","model","google_api_key","top_p","temperature","n","top_k"],"beta":false,"edited":false},"id":"GoogleGenerativeAIModel-wVHD4","description":"Generate text using Google Generative AI.","display_name":"Google Generative AI"},"selected":false,"width":384,"height":705,"positionAbsolute":{"x":2527.5499536627904,"y":1089.6495372148831},"dragging":false}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-cmpQG","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_message","id":"Prompt-dRigH","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-cmpQG{œdataTypeœ:œChatInputœ,œidœ:œChatInput-cmpQGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-dRigH{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-dRigHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-cmpQG","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-cmpQGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-dRigH","targetHandle":"{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-dRigHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Memory","id":"Memory-wcuYr","name":"messages_text","output_types":["Message"]},"targetHandle":{"fieldName":"context","id":"Prompt-dRigH","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-Memory-wcuYr{œdataTypeœ:œMemoryœ,œidœ:œMemory-wcuYrœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-dRigH{œfieldNameœ:œcontextœ,œidœ:œPrompt-dRigHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"Memory-wcuYr","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-wcuYrœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-dRigH","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-dRigHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Prompt","id":"Prompt-dRigH","name":"prompt","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-vmvpM","inputTypes":["Message"],"type":"str"}},"source":"Prompt-dRigH","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-dRigHœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-vmvpM","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-vmvpMœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-dRigH{œdataTypeœ:œPromptœ,œidœ:œPrompt-dRigHœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-vmvpM{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-vmvpMœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-vmvpM","name":"text_output","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"ChatOutput-caYfT","inputTypes":["Message"],"type":"str"}},"source":"OpenAIModel-vmvpM","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-vmvpMœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-caYfT","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-caYfTœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-OpenAIModel-vmvpM{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-vmvpMœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-caYfT{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-caYfTœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":-408.8721583282727,"y":-49.61696339765376,"zoom":0.5225899319671745}},"description":"This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.","name":"Basic Memory Chatbot","last_tested_version":"1.0.14","endpoint_name":"basic-memory-chatbot","is_component":false}
\ No newline at end of file
+{"id":"84e55d35-ffd9-4f02-8e63-d33d25cb65d3","data":{"nodes":[{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-ZnsWa","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{context}\n\nUser: {user_message}\nAI: ","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false},"context":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"context","display_name":"context","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"user_message":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_message","display_name":"user_message","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["context","user_message"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"type":"Prompt"},"dragging":false,"height":498,"id":"Prompt-ZnsWa","position":{"x":1880.8227904110583,"y":625.8049209882275},"positionAbsolute":{"x":1880.8227904110583,"y":625.8049209882275},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-302RQ","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"what is my name","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":298,"id":"ChatInput-302RQ","position":{"x":1287.4074968143339,"y":1071.4889933561165},"positionAbsolute":{"x":1287.4074968143339,"y":1071.4889933561165},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Generates text using OpenAI LLMs.","display_name":"OpenAI","id":"OpenAIModel-irqkh","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput","load_from_db":false},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"type":"OpenAIModel"},"dragging":false,"height":601,"id":"OpenAIModel-irqkh","position":{"x":2530.2018592056684,"y":372.54142059595915},"positionAbsolute":{"x":2530.2018592056684,"y":372.54142059595915},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-Y7mBW","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"height":298,"id":"ChatOutput-Y7mBW","position":{"x":3086.9981441067935,"y":701.521688846004},"selected":false,"type":"genericNode","width":384,"dragging":false,"positionAbsolute":{"x":3086.9981441067935,"y":701.521688846004}},{"data":{"description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory","id":"Memory-HZ9Jm","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"memory","value":"","display_name":"External Memory","advanced":false,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"n_messages","value":100,"display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"trace_as_metadata":true,"options":["Ascending","Descending"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"order","value":"Ascending","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine and User","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{sender_name}: {text}","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":false},"type":"Memory"},"dragging":false,"height":464,"id":"Memory-HZ9Jm","position":{"x":1275.1936550508672,"y":420.4251098153348},"positionAbsolute":{"x":1275.1936550508672,"y":420.4251098153348},"selected":false,"type":"genericNode","width":384},{"id":"GoogleGenerativeAIModel-kOuJJ","type":"genericNode","position":{"x":2527.5499536627904,"y":1089.6495372148831},"data":{"type":"GoogleGenerativeAIModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_output_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[\"gemini-1.5-pro\", \"gemini-1.5-flash\", \"gemini-1.0-pro\", \"gemini-1.0-pro-vision\"],\n value=\"gemini-1.5-pro\",\n ),\n SecretStrInput(\n name=\"google_api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError:\n raise ImportError(\"The 'langchain_google_genai' package is required to use the Google Generative AI model.\")\n\n google_api_key = self.google_api_key\n model = self.model\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n output = ChatGoogleGenerativeAI( # type: ignore\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key),\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"google_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"google_api_key","display_name":"Google API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The Google API Key to use for the Google Generative AI.","title_case":false,"password":true,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str"},"max_output_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_output_tokens","display_name":"Max Output Tokens","advanced":false,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int"},"model":{"trace_as_metadata":true,"options":["gemini-1.5-pro","gemini-1.5-flash","gemini-1.0-pro","gemini-1.0-pro-vision"],"required":false,"placeholder":"","show":true,"value":"gemini-1.5-flash","name":"model","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","title_case":false,"type":"str","load_from_db":false},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float"},"top_k":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_k","display_name":"Top K","advanced":true,"dynamic":false,"info":"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.","title_case":false,"type":"int"},"top_p":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_p","display_name":"Top P","advanced":true,"dynamic":false,"info":"The maximum cumulative probability of tokens to consider when sampling.","title_case":false,"type":"float"}},"description":"Generate text using Google Generative AI.","icon":"GoogleGenerativeAI","base_classes":["LanguageModel","Message"],"display_name":"Google Generative AI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_output_tokens","model","google_api_key","top_p","temperature","n","top_k"],"beta":false,"edited":false},"id":"GoogleGenerativeAIModel-kOuJJ","description":"Generate text using Google Generative AI.","display_name":"Google Generative AI"},"selected":false,"width":384,"height":687,"positionAbsolute":{"x":2527.5499536627904,"y":1089.6495372148831},"dragging":false}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-302RQ","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_message","id":"Prompt-ZnsWa","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-302RQ{œdataTypeœ:œChatInputœ,œidœ:œChatInput-302RQœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-ZnsWa{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-ZnsWaœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-302RQ","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-302RQœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-ZnsWa","targetHandle":"{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-ZnsWaœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Memory","id":"Memory-HZ9Jm","name":"messages_text","output_types":["Message"]},"targetHandle":{"fieldName":"context","id":"Prompt-ZnsWa","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-Memory-HZ9Jm{œdataTypeœ:œMemoryœ,œidœ:œMemory-HZ9Jmœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-ZnsWa{œfieldNameœ:œcontextœ,œidœ:œPrompt-ZnsWaœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"Memory-HZ9Jm","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-HZ9Jmœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-ZnsWa","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-ZnsWaœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Prompt","id":"Prompt-ZnsWa","name":"prompt","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-irqkh","inputTypes":["Message"],"type":"str"}},"source":"Prompt-ZnsWa","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-ZnsWaœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-irqkh","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-irqkhœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-ZnsWa{œdataTypeœ:œPromptœ,œidœ:œPrompt-ZnsWaœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-irqkh{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-irqkhœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-irqkh","name":"text_output","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"ChatOutput-Y7mBW","inputTypes":["Message"],"type":"str"}},"source":"OpenAIModel-irqkh","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-irqkhœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-Y7mBW","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Y7mBWœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-OpenAIModel-irqkh{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-irqkhœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Y7mBW{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Y7mBWœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":-749.2514477078926,"y":-191.67643906569356,"zoom":0.6895615493209557}},"description":"This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.","name":"BasicMemoryChatbot","last_tested_version":"1.0.17","endpoint_name":"basic-memory-chatbot","is_component":false}
\ No newline at end of file
diff --git a/flows/PiratePrompt.json b/flows/PiratePrompt.json
index 09e26fe..ddac183 100644
--- a/flows/PiratePrompt.json
+++ b/flows/PiratePrompt.json
@@ -1 +1 @@
-{"id":"53f0905d-13d4-49a7-a6e7-d452e0cc129d","data":{"nodes":[{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-Dbm3h","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"hello","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":315,"id":"ChatInput-Dbm3h","position":{"x":-493.6459512396177,"y":1083.200545525551},"positionAbsolute":{"x":-493.6459512396177,"y":1083.200545525551},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-6FsmA","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","load_from_db":false},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["user_input"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"type":"Prompt"},"dragging":false,"height":421,"id":"Prompt-6FsmA","position":{"x":56.354011530798516,"y":1157.2005405164796},"positionAbsolute":{"x":56.354011530798516,"y":1157.2005405164796},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-q0SlV","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"dragging":false,"height":307,"id":"ChatOutput-q0SlV","position":{"x":1219.477374823274,"y":1200.950216973985},"positionAbsolute":{"x":1219.477374823274,"y":1200.950216973985},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Generates text using OpenAI LLMs.","display_name":"OpenAI","id":"OpenAIModel-gg1Ma","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"api_key","display_name":"OpenAI API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"json_mode","display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":{},"name":"model_kwargs","display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"combobox":false,"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"required":false,"placeholder":"","show":true,"value":"gpt-4o","name":"model_name","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput","load_from_db":false},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"openai_api_base","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"value":{},"name":"output_schema","display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":1,"name":"seed","display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"type":"OpenAIModel"},"dragging":false,"height":619,"id":"OpenAIModel-gg1Ma","position":{"x":664.0296638933031,"y":1026.5966174731725},"positionAbsolute":{"x":664.0296638933031,"y":1026.5966174731725},"selected":true,"type":"genericNode","width":384}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-Dbm3h","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_input","id":"Prompt-6FsmA","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-Dbm3h{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Dbm3hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-6FsmA{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-6FsmAœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-Dbm3h","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Dbm3hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-6FsmA","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-6FsmAœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Prompt","id":"Prompt-6FsmA","name":"prompt","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-gg1Ma","inputTypes":["Message"],"type":"str"}},"id":"reactflow__edge-Prompt-6FsmA{œdataTypeœ:œPromptœ,œidœ:œPrompt-6FsmAœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-gg1Ma{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gg1Maœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","source":"Prompt-6FsmA","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-6FsmAœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-gg1Ma","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gg1Maœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-gg1Ma","name":"text_output","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"ChatOutput-q0SlV","inputTypes":["Message"],"type":"str"}},"id":"reactflow__edge-OpenAIModel-gg1Ma{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gg1Maœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-q0SlV{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-q0SlVœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","source":"OpenAIModel-gg1Ma","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gg1Maœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-q0SlV","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-q0SlVœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":427.12410642709597,"y":-361.39815091467085,"zoom":0.5562299357713679}},"description":"This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ","name":"Pirate Prompt","last_tested_version":"1.0.14","endpoint_name":"pirate-prompt","is_component":false}
\ No newline at end of file
+{"id":"ebc064b8-24a0-42bc-82d7-052ff8494566","data":{"nodes":[{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-SfwHw","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"hello","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"combobox":false,"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":298,"id":"ChatInput-SfwHw","position":{"x":-493.6459512396177,"y":1083.200545525551},"positionAbsolute":{"x":-493.6459512396177,"y":1083.200545525551},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-umh7I","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["user_input"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"type":"Prompt"},"dragging":false,"height":412,"id":"Prompt-umh7I","position":{"x":56.354011530798516,"y":1157.2005405164796},"positionAbsolute":{"x":56.354011530798516,"y":1157.2005405164796},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-EmFLK","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"data_template","value":"{text}","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"AI","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"dragging":false,"height":298,"id":"ChatOutput-EmFLK","position":{"x":1219.477374823274,"y":1200.950216973985},"positionAbsolute":{"x":1219.477374823274,"y":1200.950216973985},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Generates text using OpenAI LLMs.","display_name":"OpenAI","id":"OpenAIModel-nHTBr","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput","load_from_db":false},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false},"type":"OpenAIModel"},"dragging":false,"height":603,"id":"OpenAIModel-nHTBr","position":{"x":664.0296638933031,"y":1026.5966174731725},"positionAbsolute":{"x":664.0296638933031,"y":1026.5966174731725},"selected":true,"type":"genericNode","width":384}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-SfwHw","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_input","id":"Prompt-umh7I","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-SfwHw{œdataTypeœ:œChatInputœ,œidœ:œChatInput-SfwHwœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-umh7I{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-umh7Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-SfwHw","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-SfwHwœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-umh7I","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-umh7Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Prompt","id":"Prompt-umh7I","name":"prompt","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-nHTBr","inputTypes":["Message"],"type":"str"}},"id":"reactflow__edge-Prompt-umh7I{œdataTypeœ:œPromptœ,œidœ:œPrompt-umh7Iœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-nHTBr{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-nHTBrœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","source":"Prompt-umh7I","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-umh7Iœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-nHTBr","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-nHTBrœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-nHTBr","name":"text_output","output_types":["Message"]},"targetHandle":{"fieldName":"input_value","id":"ChatOutput-EmFLK","inputTypes":["Message"],"type":"str"}},"id":"reactflow__edge-OpenAIModel-nHTBr{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-nHTBrœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-EmFLK{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-EmFLKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","source":"OpenAIModel-nHTBr","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-nHTBrœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-EmFLK","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-EmFLKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":428.12410642709597,"y":-361.39815091467085,"zoom":0.5562299357713679}},"description":"This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ","name":"PiratePrompt","last_tested_version":"1.0.17","endpoint_name":"pirate-prompt","is_component":false}
\ No newline at end of file
diff --git a/flows/ResearchPaperDigest.json b/flows/ResearchPaperDigest.json
index a273ef0..83c7071 100644
--- a/flows/ResearchPaperDigest.json
+++ b/flows/ResearchPaperDigest.json
@@ -1 +1 @@
-{"id":"1d6e70c6-ffea-476b-8033-c0227b4c8d53","data":{"nodes":[{"id":"GoogleGenerativeAIModel-YZ8QZ","type":"genericNode","position":{"x":2340.847503609555,"y":1275.7611409417955},"data":{"type":"GoogleGenerativeAIModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_output_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[\"gemini-1.5-pro\", \"gemini-1.5-flash\", \"gemini-1.0-pro\", \"gemini-1.0-pro-vision\"],\n value=\"gemini-1.5-pro\",\n ),\n SecretStrInput(\n name=\"google_api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError:\n raise ImportError(\"The 'langchain_google_genai' package is required to use the Google Generative AI model.\")\n\n google_api_key = self.google_api_key\n model = self.model\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n output = ChatGoogleGenerativeAI( # type: ignore\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key),\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"google_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"google_api_key","display_name":"Google API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The Google API Key to use for the Google Generative AI.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_output_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"8192","name":"max_output_tokens","display_name":"Max Output Tokens","advanced":false,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int","_input_type":"IntInput"},"model":{"trace_as_metadata":true,"options":["gemini-1.5-pro","gemini-1.5-flash","gemini-1.0-pro","gemini-1.0-pro-vision"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"gemini-1.5-flash","name":"model","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","title_case":false,"type":"str","_input_type":"DropdownInput"},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"0.0","name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"},"top_k":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_k","display_name":"Top K","advanced":true,"dynamic":false,"info":"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.","title_case":false,"type":"int","_input_type":"IntInput"},"top_p":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":".95","name":"top_p","display_name":"Top P","advanced":false,"dynamic":false,"info":"The maximum cumulative probability of tokens to consider when sampling.","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Google Generative AI.","icon":"GoogleGenerativeAI","base_classes":["LanguageModel","Message"],"display_name":"Google Generative AI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_output_tokens","model","google_api_key","top_p","temperature","n","top_k"],"beta":false,"edited":false},"id":"GoogleGenerativeAIModel-YZ8QZ"},"selected":true,"width":384,"height":903,"dragging":false,"positionAbsolute":{"x":2340.847503609555,"y":1275.7611409417955}},{"id":"TextOutput-vdHQL","type":"genericNode","position":{"x":2896.9291358458267,"y":1880.9626202552622},"data":{"type":"TextOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextOutputComponent(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n name = \"TextOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as output.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n self.status = self.input_value\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"}},"description":"Display a text output in the Playground.","icon":"type","base_classes":["Message"],"display_name":"Text Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false},"id":"TextOutput-vdHQL"},"selected":false,"width":384,"height":317,"positionAbsolute":{"x":2896.9291358458267,"y":1880.9626202552622},"dragging":false},{"id":"Prompt-BmJjb","type":"genericNode","position":{"x":1726.1040189342011,"y":1696.1116782972658},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"You are the top editor at a company that creates digests of research papers, similar to what Readers' Digest does with books.\n\nAfter the title, include the author names, the publication name and year of publication, and any \nhyperlink to the original paper. If no hyperlink is provided, include unique identifying information \nsuch as DOI, arXiv, PMID, etc.\n\nFollow with a digest of the research paper, being sure to include all sections and sub-sections in the \noriginal paper, including the abstract but exclude appendixes and bibliography. \n\nThe writing should be in the same style as the original authors; imagine their editor gave them the task to \ncondense the paper down to 10% of the original size, and that is the sort of digest you are creating.\n\nExample:\n----\n# This is the Title of an Amazing Research Paper\n\nAuthors: Alice Jones, Bob Smith\n\nPublication: The Research Journal (2023)\n\nURL: https://doi.org/10.1186/s1234-566-032-3\n\n[digest of research paper]\n\n----\n\nOutput should be in English (regardless of the paper's language), and in Markdown format.\n","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":[]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false},"id":"Prompt-BmJjb"},"selected":false,"width":384,"height":337},{"id":"Prompt-polxO","type":"genericNode","position":{"x":1709.9593707440326,"y":1161.4212576598093},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Parsed Research Paper:\n\n{paper_text}","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"},"paper_text":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"paper_text","display_name":"paper_text","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["paper_text"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false},"id":"Prompt-polxO"},"selected":false,"width":384,"height":431},{"id":"ParseData-Lb2jD","type":"genericNode","position":{"x":1201.1329167993918,"y":1112.6108635262467},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_input":true,"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"data","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"\n","name":"sep","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"template","display_name":"Template","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false},"id":"ParseData-Lb2jD"},"selected":false,"width":384,"height":401},{"id":"base64 File-gNcQS","type":"genericNode","position":{"x":662.4798684352011,"y":992.1170697179748},"data":{"type":"base64 File","node":{"template":{"_type":"Component","base64_string":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"Enter base64 encoded file string here...","show":true,"value":"","name":"base64_string","display_name":"base64 Encoded File","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import base64\r\nimport os\r\nimport tempfile\r\nfrom langflow.custom import Component\r\nfrom langflow.schema import Data\r\nfrom langflow.base.data.utils import parse_text_file_to_data\r\nfrom langflow.io import BoolInput, MessageTextInput, Output\r\n\r\nclass Base64FileLoader(Component):\r\n display_name = \"base64 File\"\r\n description = \"Loads a file from a base64 encoded string, temporarily stores it, and processes similar to FileComponent.\"\r\n icon = \"file-text\"\r\n name = \"base64 File\"\r\n\r\n inputs = [\r\n MessageTextInput(name=\"base64_string\", display_name=\"base64 Encoded File\", required=True, placeholder=\"Enter base64 encoded file string here...\"),\r\n MessageTextInput(name=\"filename\", display_name=\"Filename\", required=True, placeholder=\"Enter filename with extension...\"),\r\n BoolInput(\r\n name=\"silent_errors\",\r\n display_name=\"Silent Errors\",\r\n advanced=True,\r\n info=\"If true, errors will not raise an exception.\",\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Data\", name=\"data\", method=\"decode_and_load_file\"),\r\n ]\r\n\r\n def decode_and_load_file(self) -> Data:\r\n\r\n if not self.base64_string:\r\n raise ValueError(\"Base64 string is empty. Please provide a valid base64 encoded file.\")\r\n if not self.filename:\r\n raise ValueError(\"Filename is required.\")\r\n\r\n base64_val = self.base64_string\r\n filename_val = self.filename \r\n \r\n # Decode the base64 string to bytes\r\n try:\r\n file_bytes = base64.b64decode(base64_val)\r\n except Exception as e:\r\n raise ValueError(f\"Failed to decode base64 string: {str(e)}\")\r\n\r\n # Create a temporary directory to store the file\r\n with tempfile.TemporaryDirectory() as tmpdirname:\r\n temp_file_path = os.path.join(tmpdirname, filename_val)\r\n\r\n # Write bytes to a temporary file\r\n with open(temp_file_path, 'wb') as temp_file:\r\n temp_file.write(file_bytes)\r\n\r\n # Use the existing utility to parse the text file\r\n try:\r\n data = parse_text_file_to_data(temp_file_path, silent_errors=self.silent_errors)\r\n except Exception as e:\r\n raise ValueError(f\"Failed to parse the file: {str(e)}\")\r\n\r\n # Ensure file is deleted after processing if required\r\n os.remove(temp_file_path)\r\n\r\n self.status = \"File processed successfully.\"\r\n return data\r\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filename":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"Enter filename with extension...","show":true,"value":"","name":"filename","display_name":"Filename","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"silent_errors":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"silent_errors","display_name":"Silent Errors","advanced":true,"dynamic":false,"info":"If true, errors will not raise an exception.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Loads a file from a base64 encoded string, temporarily stores it, and processes similar to FileComponent.","icon":"file-text","base_classes":["Data"],"display_name":"base64 File","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"data","display_name":"Data","method":"decode_and_load_file","value":"__UNDEFINED__","cache":true}],"field_order":["base64_string","filename","silent_errors"],"beta":false,"edited":true,"official":false},"id":"base64 File-gNcQS"},"selected":false,"width":384,"height":467,"positionAbsolute":{"x":662.4798684352011,"y":992.1170697179748},"dragging":false},{"id":"TextInput-AlrlI","type":"genericNode","position":{"x":92.29661360789726,"y":1366.4538119353083},"data":{"type":"TextInput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as input.","title_case":false,"type":"str","_input_type":"MessageTextInput"}},"description":"Get text inputs from the Playground.","icon":"type","base_classes":["Message"],"display_name":"Text Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false},"id":"TextInput-AlrlI"},"selected":false,"width":384,"height":317,"positionAbsolute":{"x":92.29661360789726,"y":1366.4538119353083},"dragging":false},{"id":"TextInput-eYtbq","type":"genericNode","position":{"x":94.03771473448944,"y":835.4179683246732},"data":{"type":"TextInput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as input.","title_case":false,"type":"str","_input_type":"MessageTextInput"}},"description":"Get text inputs from the Playground.","icon":"type","base_classes":["Message"],"display_name":"Text Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false},"id":"TextInput-eYtbq"},"selected":false,"width":384,"height":317,"positionAbsolute":{"x":94.03771473448944,"y":835.4179683246732},"dragging":false}],"edges":[{"source":"GoogleGenerativeAIModel-YZ8QZ","target":"TextOutput-vdHQL","sourceHandle":"{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-vdHQLœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-GoogleGenerativeAIModel-YZ8QZ{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-TextOutput-vdHQL{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-vdHQLœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"TextOutput-vdHQL","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"GoogleGenerativeAIModel","id":"GoogleGenerativeAIModel-YZ8QZ","name":"text_output","output_types":["Message"]}},"selected":false,"className":""},{"source":"Prompt-BmJjb","target":"GoogleGenerativeAIModel-YZ8QZ","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-BmJjbœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-BmJjb{œdataTypeœ:œPromptœ,œidœ:œPrompt-BmJjbœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-YZ8QZ{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"system_message","id":"GoogleGenerativeAIModel-YZ8QZ","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-BmJjb","name":"prompt","output_types":["Message"]}},"selected":false,"className":""},{"source":"Prompt-polxO","target":"GoogleGenerativeAIModel-YZ8QZ","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-polxOœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-polxO{œdataTypeœ:œPromptœ,œidœ:œPrompt-polxOœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-YZ8QZ{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-YZ8QZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"GoogleGenerativeAIModel-YZ8QZ","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-polxO","name":"prompt","output_types":["Message"]}},"selected":false,"className":""},{"source":"ParseData-Lb2jD","target":"Prompt-polxO","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-Lb2jDœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œpaper_textœ,œidœ:œPrompt-polxOœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","id":"reactflow__edge-ParseData-Lb2jD{œdataTypeœ:œParseDataœ,œidœ:œParseData-Lb2jDœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-polxO{œfieldNameœ:œpaper_textœ,œidœ:œPrompt-polxOœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"paper_text","id":"Prompt-polxO","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-Lb2jD","name":"text","output_types":["Message"]}},"selected":false,"className":""},{"source":"base64 File-gNcQS","sourceHandle":"{œdataTypeœ:œbase64 Fileœ,œidœ:œbase64 File-gNcQSœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-Lb2jD","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-Lb2jDœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-Lb2jD","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"base64 File","id":"base64 File-gNcQS","name":"data","output_types":["Data"]}},"id":"reactflow__edge-base64 File-gNcQS{œdataTypeœ:œbase64 Fileœ,œidœ:œbase64 File-gNcQSœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-Lb2jD{œfieldNameœ:œdataœ,œidœ:œParseData-Lb2jDœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}"},{"source":"TextInput-AlrlI","sourceHandle":"{œdataTypeœ:œTextInputœ,œidœ:œTextInput-AlrlIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"base64 File-gNcQS","targetHandle":"{œfieldNameœ:œfilenameœ,œidœ:œbase64 File-gNcQSœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"filename","id":"base64 File-gNcQS","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"TextInput","id":"TextInput-AlrlI","name":"text","output_types":["Message"]}},"id":"reactflow__edge-TextInput-AlrlI{œdataTypeœ:œTextInputœ,œidœ:œTextInput-AlrlIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-base64 File-gNcQS{œfieldNameœ:œfilenameœ,œidœ:œbase64 File-gNcQSœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"source":"TextInput-eYtbq","sourceHandle":"{œdataTypeœ:œTextInputœ,œidœ:œTextInput-eYtbqœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"base64 File-gNcQS","targetHandle":"{œfieldNameœ:œbase64_stringœ,œidœ:œbase64 File-gNcQSœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"base64_string","id":"base64 File-gNcQS","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"TextInput","id":"TextInput-eYtbq","name":"text","output_types":["Message"]}},"id":"reactflow__edge-TextInput-eYtbq{œdataTypeœ:œTextInputœ,œidœ:œTextInput-eYtbqœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-base64 File-gNcQS{œfieldNameœ:œbase64_stringœ,œidœ:œbase64 File-gNcQSœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":165.49019032569964,"y":-163.30030808397362,"zoom":0.4352752816480626}},"description":"Creates a digest version of a research paper","name":"ResearchPaperDigest","last_tested_version":"1.0.14","endpoint_name":"research-paper-digest","is_component":false}
\ No newline at end of file
+{"id":"cc90bbe5-2ac0-46d2-b5f5-2c92a07d87ef","data":{"nodes":[{"id":"GoogleGenerativeAIModel-CS0W8","type":"genericNode","position":{"x":2340.847503609555,"y":1275.7611409417955},"data":{"type":"GoogleGenerativeAIModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_output_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[\"gemini-1.5-pro\", \"gemini-1.5-flash\", \"gemini-1.0-pro\", \"gemini-1.0-pro-vision\"],\n value=\"gemini-1.5-pro\",\n ),\n SecretStrInput(\n name=\"google_api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError:\n raise ImportError(\"The 'langchain_google_genai' package is required to use the Google Generative AI model.\")\n\n google_api_key = self.google_api_key\n model = self.model\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n output = ChatGoogleGenerativeAI( # type: ignore\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key),\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"google_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"google_api_key","display_name":"Google API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The Google API Key to use for the Google Generative AI.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_output_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"8192","name":"max_output_tokens","display_name":"Max Output Tokens","advanced":false,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int","_input_type":"IntInput"},"model":{"trace_as_metadata":true,"options":["gemini-1.5-pro","gemini-1.5-flash","gemini-1.0-pro","gemini-1.0-pro-vision"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"gemini-1.5-flash","name":"model","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","title_case":false,"type":"str","_input_type":"DropdownInput"},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":false,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"0.0","name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"},"top_k":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"top_k","display_name":"Top K","advanced":true,"dynamic":false,"info":"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.","title_case":false,"type":"int","_input_type":"IntInput"},"top_p":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":".95","name":"top_p","display_name":"Top P","advanced":false,"dynamic":false,"info":"The maximum cumulative probability of tokens to consider when sampling.","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Google Generative AI.","icon":"GoogleGenerativeAI","base_classes":["LanguageModel","Message"],"display_name":"Google Generative AI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_output_tokens","model","google_api_key","top_p","temperature","n","top_k"],"beta":false,"edited":false},"id":"GoogleGenerativeAIModel-CS0W8"},"selected":false,"width":384,"height":859,"dragging":false,"positionAbsolute":{"x":2340.847503609555,"y":1275.7611409417955}},{"id":"TextOutput-d9nFE","type":"genericNode","position":{"x":2896.9291358458267,"y":1880.9626202552622},"data":{"type":"TextOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextOutputComponent(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n name = \"TextOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as output.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n self.status = self.input_value\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"}},"description":"Display a text output in the Playground.","icon":"type","base_classes":["Message"],"display_name":"Text Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false},"id":"TextOutput-d9nFE"},"selected":false,"width":384,"height":298,"positionAbsolute":{"x":2896.9291358458267,"y":1880.9626202552622},"dragging":false},{"id":"Prompt-Y2bvA","type":"genericNode","position":{"x":1726.1040189342011,"y":1696.1116782972658},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"You are the top editor at a company that creates digests of research papers, similar to what Readers' Digest does with books.\n\nAfter the title, include the author names, the publication name and year of publication, and any \nhyperlink to the original paper. If no hyperlink is provided, include unique identifying information \nsuch as DOI, arXiv, PMID, etc.\n\nFollow with a digest of the research paper, being sure to include all sections and sub-sections in the \noriginal paper, including the abstract but exclude appendixes and bibliography. \n\nThe writing should be in the same style as the original authors; imagine their editor gave them the task to \ncondense the paper down to 10% of the original size, and that is the sort of digest you are creating.\n\nExample:\n----\n# This is the Title of an Amazing Research Paper\n\nAuthors: Alice Jones, Bob Smith\n\nPublication: The Research Journal (2023)\n\nURL: https://doi.org/10.1186/s1234-566-032-3\n\n[digest of research paper]\n\n----\n\nOutput should be in English (regardless of the paper's language), and in Markdown format.\n","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":[]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"id":"Prompt-Y2bvA","description":"Create a prompt template with dynamic variables.","display_name":"Prompt"},"selected":false,"width":384,"height":326},{"id":"Prompt-JBx0Z","type":"genericNode","position":{"x":1709.9593707440326,"y":1161.4212576598093},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"Parsed Research Paper:\n\n{paper_text}","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false},"paper_text":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"paper_text","display_name":"paper_text","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","base_classes":["Message"],"display_name":"Prompt","documentation":"","custom_fields":{"template":["paper_text"]},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"edited":false},"id":"Prompt-JBx0Z","description":"Create a prompt template with dynamic variables.","display_name":"Prompt"},"selected":false,"width":384,"height":412},{"id":"ParseData-7fnJC","type":"genericNode","position":{"x":1201.1329167993918,"y":1112.6108635262467},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_input":true,"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"data","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"\n","name":"sep","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"template","display_name":"Template","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false},"id":"ParseData-7fnJC"},"selected":false,"width":384,"height":374},{"id":"base64 File-k0XGQ","type":"genericNode","position":{"x":662.4798684352011,"y":992.1170697179748},"data":{"type":"base64 File","node":{"template":{"_type":"Component","base64_string":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"Enter base64 encoded file string here...","show":true,"value":"","name":"base64_string","display_name":"base64 Encoded File","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import base64\r\nimport os\r\nimport tempfile\r\nfrom langflow.custom import Component\r\nfrom langflow.schema import Data\r\nfrom langflow.base.data.utils import parse_text_file_to_data\r\nfrom langflow.io import BoolInput, MessageTextInput, Output\r\n\r\nclass Base64FileLoader(Component):\r\n display_name = \"base64 File\"\r\n description = \"Loads a file from a base64 encoded string, temporarily stores it, and processes similar to FileComponent.\"\r\n icon = \"file-text\"\r\n name = \"base64 File\"\r\n\r\n inputs = [\r\n MessageTextInput(name=\"base64_string\", display_name=\"base64 Encoded File\", required=True, placeholder=\"Enter base64 encoded file string here...\"),\r\n MessageTextInput(name=\"filename\", display_name=\"Filename\", required=True, placeholder=\"Enter filename with extension...\"),\r\n BoolInput(\r\n name=\"silent_errors\",\r\n display_name=\"Silent Errors\",\r\n advanced=True,\r\n info=\"If true, errors will not raise an exception.\",\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Data\", name=\"data\", method=\"decode_and_load_file\"),\r\n ]\r\n\r\n def decode_and_load_file(self) -> Data:\r\n\r\n if not self.base64_string:\r\n raise ValueError(\"Base64 string is empty. Please provide a valid base64 encoded file.\")\r\n if not self.filename:\r\n raise ValueError(\"Filename is required.\")\r\n\r\n base64_val = self.base64_string\r\n filename_val = self.filename \r\n \r\n # Decode the base64 string to bytes\r\n try:\r\n file_bytes = base64.b64decode(base64_val)\r\n except Exception as e:\r\n raise ValueError(f\"Failed to decode base64 string: {str(e)}\")\r\n\r\n # Create a temporary directory to store the file\r\n with tempfile.TemporaryDirectory() as tmpdirname:\r\n temp_file_path = os.path.join(tmpdirname, filename_val)\r\n\r\n # Write bytes to a temporary file\r\n with open(temp_file_path, 'wb') as temp_file:\r\n temp_file.write(file_bytes)\r\n\r\n # Use the existing utility to parse the text file\r\n try:\r\n data = parse_text_file_to_data(temp_file_path, silent_errors=self.silent_errors)\r\n except Exception as e:\r\n raise ValueError(f\"Failed to parse the file: {str(e)}\")\r\n\r\n # Ensure file is deleted after processing if required\r\n os.remove(temp_file_path)\r\n\r\n self.status = \"File processed successfully.\"\r\n return data\r\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filename":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"Enter filename with extension...","show":true,"value":"","name":"filename","display_name":"Filename","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"silent_errors":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"silent_errors","display_name":"Silent Errors","advanced":true,"dynamic":false,"info":"If true, errors will not raise an exception.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Loads a file from a base64 encoded string, temporarily stores it, and processes similar to FileComponent.","icon":"file-text","base_classes":["Data"],"display_name":"base64 File","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"data","display_name":"Data","method":"decode_and_load_file","value":"__UNDEFINED__","cache":true}],"field_order":["base64_string","filename","silent_errors"],"beta":false,"edited":true,"official":false},"id":"base64 File-k0XGQ"},"selected":false,"width":384,"height":440,"positionAbsolute":{"x":662.4798684352011,"y":992.1170697179748},"dragging":false},{"id":"ParameterInput-AtPx5","type":"genericNode","position":{"x":68.92986503033467,"y":901.1480049376594},"data":{"type":"ParameterInput","node":{"template":{"_type":"Component","base64_file":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"base64_file","value":"","display_name":"base64 File","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Contents of file with base64 encoding","title_case":false,"type":"str","_input_type":"MessageTextInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.io import MessageTextInput, Output\n\n# This should work from v1.0.16 only which fixes issue #3380\n\n# It is admittedly fairly hack-ey but demonstrates the concept. \n# Developer will need to edit the component to name parameters sensibly,\n# and to add more parameters as necessary!\n\nclass ParameterComponent(Component):\n # The tweak can use the display_name, e.g.\n # \"tweaks\": {\"Parameter Input\": { \"parameter_1\": varParameter1, \"parameter_2\": varParameter2 }}\n # As this will not change on export/import\n display_name = \"Parameter Input\"\n description = \"Parameters combined into single Component.\"\n icon = \"type\"\n name = \"ParameterInput\"\n\n # Each input will be a MessageTextInput with a distinct name=\n # There will be a corresponding output with a name= using _out postfix\n # There will be a corresponding function f_ for each parameter to define the output\n inputs = [\n MessageTextInput(\n name=\"base64_file\",\n display_name=\"base64 File\",\n info=\"Contents of file with base64 encoding\",\n ),\n MessageTextInput(\n name=\"filename\",\n display_name=\"Filename\",\n info=\"Name of file with extension\",\n ),\n ]\n\n outputs = [\n # name= and method= parameters need to not conflict with the input name=: \n # Name of this is the parameter name with _out\n Output(display_name=\"base64 File\", name=\"base64_file_out\", method=\"f_base64_file\"),\n Output(display_name=\"Filename\", name=\"filename_out\", method=\"f_filename\"),\n ]\n\n def f_base64_file(self) -> Message:\n return self.base64_file\n\n def f_filename(self) -> Message:\n return self.filename\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filename":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"filename","value":"","display_name":"Filename","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Name of file with extension","title_case":false,"type":"str","_input_type":"MessageTextInput"}},"description":"Parameters combined into single Component.","icon":"type","base_classes":["Message"],"display_name":"Parameter Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"base64_file_out","display_name":"base64 File","method":"f_base64_file","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"filename_out","display_name":"Filename","method":"f_filename","value":"__UNDEFINED__","cache":true}],"field_order":["base64_file","filename"],"beta":false,"edited":true},"id":"ParameterInput-AtPx5"},"selected":false,"width":384,"height":429,"positionAbsolute":{"x":68.92986503033467,"y":901.1480049376594},"dragging":false}],"edges":[{"source":"GoogleGenerativeAIModel-CS0W8","target":"TextOutput-d9nFE","sourceHandle":"{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-d9nFEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-GoogleGenerativeAIModel-CS0W8{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-TextOutput-d9nFE{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-d9nFEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"TextOutput-d9nFE","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"GoogleGenerativeAIModel","id":"GoogleGenerativeAIModel-CS0W8","name":"text_output","output_types":["Message"]}},"selected":false,"className":""},{"source":"Prompt-Y2bvA","target":"GoogleGenerativeAIModel-CS0W8","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-Y2bvAœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-Y2bvA{œdataTypeœ:œPromptœ,œidœ:œPrompt-Y2bvAœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-CS0W8{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"system_message","id":"GoogleGenerativeAIModel-CS0W8","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-Y2bvA","name":"prompt","output_types":["Message"]}},"selected":false,"className":""},{"source":"Prompt-JBx0Z","target":"GoogleGenerativeAIModel-CS0W8","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-JBx0Zœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-Prompt-JBx0Z{œdataTypeœ:œPromptœ,œidœ:œPrompt-JBx0Zœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-CS0W8{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-CS0W8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"GoogleGenerativeAIModel-CS0W8","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-JBx0Z","name":"prompt","output_types":["Message"]}},"selected":false,"className":""},{"source":"ParseData-7fnJC","target":"Prompt-JBx0Z","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-7fnJCœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œpaper_textœ,œidœ:œPrompt-JBx0Zœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","id":"reactflow__edge-ParseData-7fnJC{œdataTypeœ:œParseDataœ,œidœ:œParseData-7fnJCœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-JBx0Z{œfieldNameœ:œpaper_textœ,œidœ:œPrompt-JBx0Zœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"paper_text","id":"Prompt-JBx0Z","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-7fnJC","name":"text","output_types":["Message"]}},"selected":false,"className":""},{"source":"base64 File-k0XGQ","sourceHandle":"{œdataTypeœ:œbase64 Fileœ,œidœ:œbase64 File-k0XGQœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-7fnJC","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-7fnJCœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-7fnJC","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"base64 File","id":"base64 File-k0XGQ","name":"data","output_types":["Data"]}},"id":"reactflow__edge-base64 File-k0XGQ{œdataTypeœ:œbase64 Fileœ,œidœ:œbase64 File-k0XGQœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-7fnJC{œfieldNameœ:œdataœ,œidœ:œParseData-7fnJCœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","className":""},{"source":"ParameterInput-AtPx5","sourceHandle":"{œdataTypeœ:œParameterInputœ,œidœ:œParameterInput-AtPx5œ,œnameœ:œbase64_file_outœ,œoutput_typesœ:[œMessageœ]}","target":"base64 File-k0XGQ","targetHandle":"{œfieldNameœ:œbase64_stringœ,œidœ:œbase64 File-k0XGQœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"base64_string","id":"base64 File-k0XGQ","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ParameterInput","id":"ParameterInput-AtPx5","name":"base64_file_out","output_types":["Message"]}},"id":"reactflow__edge-ParameterInput-AtPx5{œdataTypeœ:œParameterInputœ,œidœ:œParameterInput-AtPx5œ,œnameœ:œbase64_file_outœ,œoutput_typesœ:[œMessageœ]}-base64 File-k0XGQ{œfieldNameœ:œbase64_stringœ,œidœ:œbase64 File-k0XGQœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"ParameterInput-AtPx5","sourceHandle":"{œdataTypeœ:œParameterInputœ,œidœ:œParameterInput-AtPx5œ,œnameœ:œfilename_outœ,œoutput_typesœ:[œMessageœ]}","target":"base64 File-k0XGQ","targetHandle":"{œfieldNameœ:œfilenameœ,œidœ:œbase64 File-k0XGQœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"filename","id":"base64 File-k0XGQ","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ParameterInput","id":"ParameterInput-AtPx5","name":"filename_out","output_types":["Message"]}},"id":"reactflow__edge-ParameterInput-AtPx5{œdataTypeœ:œParameterInputœ,œidœ:œParameterInput-AtPx5œ,œnameœ:œfilename_outœ,œoutput_typesœ:[œMessageœ]}-base64 File-k0XGQ{œfieldNameœ:œfilenameœ,œidœ:œbase64 File-k0XGQœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""}],"viewport":{"x":-26.47675109801719,"y":-372.27033564622707,"zoom":0.6597539553864481}},"description":"Creates a digest version of a research paper","name":"ResearchPaperDigest","last_tested_version":"1.0.17","endpoint_name":"research-paper-digest","is_component":false}
\ No newline at end of file
diff --git a/render.yaml b/render.yaml
index b8ae25d..f1ab59f 100644
--- a/render.yaml
+++ b/render.yaml
@@ -36,7 +36,7 @@ services:
- key: LANGFLOW_HOST
value: 0.0.0.0
- key: LANGFLOW_PORT
- value: 10000
+ value: 10000 # This is the default port used by Render
- key: LANGFLOW_LOG_LEVEL
value: INFO
- fromGroup: langflow-demo-flow-secrets