diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Input node b/src/backend/base/langflow/initial_setup/starter_projects/Input node new file mode 100644 index 000000000000..1ed1a45a57c6 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Input node @@ -0,0 +1,1498 @@ + + "data": { + "edges": [ + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Memory", + "id": "Memory-gWJrq", + "name": "messages_text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "memory", + "id": "Prompt-yhdMP", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Memory-gWJrq{œdataTypeœ:œMemoryœ,œidœ:œMemory-gWJrqœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-yhdMP{œfieldNameœ:œmemoryœ,œidœ:œPrompt-yhdMPœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Memory-gWJrq", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-gWJrqœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-yhdMP", + "targetHandle": "{œfieldNameœ: œmemoryœ, œidœ: œPrompt-yhdMPœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-PEO9d", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-63o3Q", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-PEO9d{œdataTypeœ:œChatInputœ,œidœ:œChatInput-PEO9dœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-63o3Q{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-63o3Qœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-PEO9d", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-PEO9dœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-63o3Q", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-63o3Qœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-yhdMP", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-63o3Q", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-yhdMP{œdataTypeœ:œPromptœ,œidœ:œPrompt-yhdMPœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-63o3Q{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-63o3Qœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt-yhdMP", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-yhdMPœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-63o3Q", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-63o3Qœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-63o3Q", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-BIXzI", + "inputTypes": ["Data", "DataFrame", "Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-63o3Q{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-63o3Qœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-BIXzI{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BIXzIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-63o3Q", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-63o3Qœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-BIXzI", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-BIXzIœ, œinputTypesœ: [œDataœ, œDataFrameœ, œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "id": "ChatInput-PEO9d", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "tool_mode": true, + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "temp_file": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": [], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "what is my name" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-PEO9d", + "measured": { + "height": 234, + "width": 320 + }, + "position": { + "x": 2321.5543981677606, + "y": 374.0457826421628 + }, + "positionAbsolute": { + "x": 2321.5543981677606, + "y": 374.0457826421628 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-BIXzI", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "tool_mode": true, + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "clean_data": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Basic Clean Data", + "dynamic": false, + "info": "Whether to clean the data", + "list": false, + "list_add_label": "Add More", + "name": "clean_data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from collections.abc import Generator\nfrom typing import Any\n\nimport orjson\nfrom fastapi.encoders import jsonable_encoder\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.inputs.inputs import HandleInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n HandleInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n BoolInput(\n name=\"clean_data\",\n display_name=\"Basic Clean Data\",\n value=True,\n info=\"Whether to clean the data\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n # Handle case where source is a ChatOpenAI object\n if hasattr(source, \"model_name\"):\n source_dict[\"source\"] = source.model_name\n elif hasattr(source, \"model\"):\n source_dict[\"source\"] = str(source.model)\n else:\n source_dict[\"source\"] = str(source)\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n # First convert the input to string if needed\n text = self.convert_to_string()\n\n # Get source properties\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n\n # Create or use existing Message object\n if isinstance(self.input_value, Message):\n message = self.input_value\n # Update message properties\n message.text = text\n else:\n message = Message(text=text)\n\n # Set message properties\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n\n # Store message if needed\n if self.session_id and self.should_store_message:\n stored_message = await self.send_message(message)\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n\n def _validate_input(self) -> None:\n \"\"\"Validate the input data and raise ValueError if invalid.\"\"\"\n if self.input_value is None:\n msg = \"Input data cannot be None\"\n raise ValueError(msg)\n if isinstance(self.input_value, list) and not all(\n isinstance(item, Message | Data | DataFrame | str) for item in self.input_value\n ):\n invalid_types = [\n type(item).__name__\n for item in self.input_value\n if not isinstance(item, Message | Data | DataFrame | str)\n ]\n msg = f\"Expected Data or DataFrame or Message or str, got {invalid_types}\"\n raise TypeError(msg)\n if not isinstance(\n self.input_value,\n Message | Data | DataFrame | str | list | Generator | type(None),\n ):\n type_name = type(self.input_value).__name__\n msg = f\"Expected Data or DataFrame or Message or str, Generator or None, got {type_name}\"\n raise TypeError(msg)\n\n def _serialize_data(self, data: Data) -> str:\n \"\"\"Serialize Data object to JSON string.\"\"\"\n # Convert data.data to JSON-serializable format\n serializable_data = jsonable_encoder(data.data)\n # Serialize with orjson, enabling pretty printing with indentation\n json_bytes = orjson.dumps(serializable_data, option=orjson.OPT_INDENT_2)\n # Convert bytes to string and wrap in Markdown code blocks\n return \"```json\\n\" + json_bytes.decode(\"utf-8\") + \"\\n```\"\n\n def _safe_convert(self, data: Any) -> str:\n \"\"\"Safely convert input data to string.\"\"\"\n try:\n if isinstance(data, str):\n return data\n if isinstance(data, Message):\n return data.get_text()\n if isinstance(data, Data):\n return self._serialize_data(data)\n if isinstance(data, DataFrame):\n if self.clean_data:\n # Remove empty rows\n data = data.dropna(how=\"all\")\n # Remove empty lines in each cell\n data = data.replace(r\"^\\s*$\", \"\", regex=True)\n # Replace multiple newlines with a single newline\n data = data.replace(r\"\\n+\", \"\\n\", regex=True)\n\n # Replace pipe characters to avoid markdown table issues\n processed_data = data.replace(r\"\\|\", r\"\\\\|\", regex=True)\n\n processed_data = processed_data.map(\n lambda x: str(x).replace(\"\\n\", \"
\") if isinstance(x, str) else x\n )\n\n return processed_data.to_markdown(index=False)\n return str(data)\n except (ValueError, TypeError, AttributeError) as e:\n msg = f\"Error converting data: {e!s}\"\n raise ValueError(msg) from e\n\n def convert_to_string(self) -> str | Generator[Any, None, None]:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n self._validate_input()\n if isinstance(self.input_value, list):\n return \"\\n\".join([self._safe_convert(item) for item in self.input_value])\n if isinstance(self.input_value, Generator):\n return self.input_value\n return self._safe_convert(self.input_value)\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Data", "DataFrame", "Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": true, + "height": 234, + "id": "ChatOutput-BIXzI", + "measured": { + "height": 234, + "width": 320 + }, + "position": { + "x": 3113.9880204333917, + "y": 769.7618411016429 + }, + "positionAbsolute": { + "x": 3101.965731391458, + "y": 776.4408905693839 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-jlIQj", + "node": { + "description": "# Memory Chatbot\n\nA flexible chatbot implementation featuring advanced conversation memory capabilities. This serves as a foundational tool for building chat experiences with persistent context.\n\n## Core Components\n\n1. **Chat Input**\n - Accepts user messages\n - Configures conversation storage\n - Tracks session identity\n\n2. **Chat Memory**\n - Stores and retrieves up to 100 previous messages\n - Maintains conversation context\n - Tracks separate chat sessions\n - Preserves sender information and message order\n\n3. **Prompt**\n - Creates dynamic prompt templates\n - Integrates memory into conversation flow\n\n4. **OpenAI**\n - Processes user input with context\n - Accesses conversation history\n - Includes options for model configuration and API key setup\n\n5. **Chat Output**\n - Displays formatted responses\n - Maintains conversation flow\n - Syncs with memory storage\n\n## Memory Features\n\n- Stores message history\n- Plans conversation trajectory\n- Differentiates between chat sessions\n- Preserves sender and message metadata\n\n## Quick Start\n\n1. **Initialize** with a clear session ID\n2. **Enter** message in Chat Input\n3. **AI Processes** with context from memory\n4. **Response** appears in Chat Output\n5. Context remains available for follow-ups\n\nThis robust system demonstrates thorough memory integration with minimal complexity. 🧠💬\n", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 736, + "id": "note-jlIQj", + "measured": { + "height": 733, + "width": 325 + }, + "position": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "positionAbsolute": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "resizing": false, + "selected": false, + "style": { + "height": 736, + "width": 324 + }, + "type": "noteNode", + "width": 324 + }, + { + "data": { + "id": "note-oU1IW", + "node": { + "description": "## Get Your OpenAI API Key\n\n**Steps**:\n\n1. **Visit** [OpenAI's API Key Page](https://platform.openai.com/api-keys).\n\n2. **Log In/Sign Up**:\n - Log in or create a new OpenAI account.\n\n3. **Generate API Key**:\n - Click \"Create New Secret Key\" to obtain your key.\n\n4. **Store Your Key Securely**:\n - Note it down as it will only display once.\n\n5. **Enter API Key**:\n - Input your key in the OpenAI API Key field within the component setup.\n\nKeep your key safe and manage it responsibly!", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-oU1IW", + "measured": { + "height": 324, + "width": 325 + }, + "position": { + "x": 2727.7060397092964, + "y": 115.42518754847691 + }, + "positionAbsolute": { + "x": 2727.7060397092964, + "y": 115.42518754847691 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "Memory-gWJrq", + "node": { + "base_classes": ["Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "documentation": "", + "edited": false, + "field_order": [ + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template" + ], + "frozen": false, + "icon": "message-square-more", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Data", + "method": "retrieve_messages", + "name": "messages", + "selected": "Data", + "tool_mode": true, + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "Message", + "method": "retrieve_messages_as_text", + "name": "messages_text", + "selected": "Message", + "tool_mode": true, + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "DataFrame", + "method": "as_dataframe", + "name": "dataframe", + "selected": "DataFrame", + "tool_mode": true, + "types": ["DataFrame"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import cast\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import aget_messages\nfrom langflow.schema import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Message History\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"Memory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n tool_mode=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Message\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"DataFrame\", name=\"dataframe\", method=\"as_dataframe\"),\n ]\n\n async def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory and not hasattr(self.memory, \"aget_messages\"):\n memory_name = type(self.memory).__name__\n err_msg = f\"External Memory object ({memory_name}) must have 'aget_messages' method.\"\n raise AttributeError(err_msg)\n # Check if n_messages is None or 0\n if n_messages == 0:\n stored = []\n elif self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = await self.memory.aget_messages()\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = await aget_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return cast(Data, stored)\n\n async def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, await self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n async def as_dataframe(self) -> DataFrame:\n \"\"\"Convert the retrieved messages into a DataFrame.\n\n Returns:\n DataFrame: A DataFrame containing the message data.\n \"\"\"\n messages = await self.retrieve_messages()\n return DataFrame(messages)\n" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["Memory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + } + }, + "tool_mode": false + }, + "type": "Memory" + }, + "dragging": false, + "height": 264, + "id": "Memory-gWJrq", + "measured": { + "height": 264, + "width": 320 + }, + "position": { + "x": 1947.7805399474369, + "y": 766.1115984799474 + }, + "positionAbsolute": { + "x": 1947.7805399474369, + "y": 766.1115984799474 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "Prompt-yhdMP", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["memory"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "tool_mode": true, + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "memory": { + "advanced": false, + "display_name": "memory", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a helpful assistant that answer questions.\n\nUse markdown to format your answer, properly embedding images and urls.\n\nHistory: \n\n{memory}\n" + }, + "tool_placeholder": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Tool Placeholder", + "dynamic": false, + "info": "A placeholder input for tool mode.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "tool_placeholder", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 347, + "id": "Prompt-yhdMP", + "measured": { + "height": 347, + "width": 320 + }, + "position": { + "x": 2327.422938009026, + "y": 675.992123914672 + }, + "positionAbsolute": { + "x": 2327.422938009026, + "y": 675.992123914672 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "OpenAIModel-63o3Q", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "category": "models", + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed" + ], + "frozen": false, + "icon": "OpenAI", + "key": "OpenAIModel", + "legacy": false, + "metadata": { + "keywords": ["model", "llm", "language model", "large language model"] + }, + "minimized": false, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Message", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "tool_mode": true, + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": ["api_key"], + "selected": "LanguageModel", + "tool_mode": true, + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "score": 0.14285714285714285, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": [], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import Any\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import (\n OPENAI_MODEL_NAMES,\n OPENAI_REASONING_MODEL_NAMES,\n)\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput\nfrom langflow.logging import logger\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES + OPENAI_REASONING_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[1],\n combobox=True,\n real_time_refresh=True,\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n required=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n show=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n IntInput(\n name=\"max_retries\",\n display_name=\"Max Retries\",\n info=\"The maximum number of retries to make when generating.\",\n advanced=True,\n value=5,\n ),\n IntInput(\n name=\"timeout\",\n display_name=\"Timeout\",\n info=\"The timeout for requests to OpenAI completion API.\",\n advanced=True,\n value=700,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n parameters = {\n \"api_key\": SecretStr(self.api_key).get_secret_value() if self.api_key else None,\n \"model_name\": self.model_name,\n \"max_tokens\": self.max_tokens or None,\n \"model_kwargs\": self.model_kwargs or {},\n \"base_url\": self.openai_api_base or \"https://api.openai.com/v1\",\n \"seed\": self.seed,\n \"max_retries\": self.max_retries,\n \"timeout\": self.timeout,\n \"temperature\": self.temperature if self.temperature is not None else 0.1,\n }\n\n logger.info(f\"Model name: {self.model_name}\")\n if self.model_name in OPENAI_REASONING_MODEL_NAMES:\n logger.info(\"Getting reasoning model parameters\")\n parameters.pop(\"temperature\")\n parameters.pop(\"seed\")\n output = ChatOpenAI(**parameters)\n if self.json_mode:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict:\n if field_name in {\"base_url\", \"model_name\", \"api_key\"} and field_value in OPENAI_REASONING_MODEL_NAMES:\n build_config[\"temperature\"][\"show\"] = False\n build_config[\"seed\"][\"show\"] = False\n if field_name in {\"base_url\", \"model_name\", \"api_key\"} and field_value in OPENAI_MODEL_NAMES:\n build_config[\"temperature\"][\"show\"] = True\n build_config[\"seed\"][\"show\"] = True\n return build_config\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "list_add_label": "Add More", + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_retries": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Retries", + "dynamic": false, + "info": "The maximum number of retries to make when generating.", + "list": false, + "list_add_label": "Add More", + "name": "max_retries", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "list_add_label": "Add More", + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "list_add_label": "Add More", + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": true, + "dialog_inputs": {}, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4.1", + "gpt-4.1-mini", + "gpt-4.1-nano", + "gpt-4.5-preview", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "o1" + ], + "options_metadata": [], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4.1-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "list_add_label": "Add More", + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "list_add_label": "Add More", + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "multiline": true, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "SliderInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "max_label": "", + "max_label_icon": "", + "min_label": "", + "min_label_icon": "", + "name": "temperature", + "placeholder": "", + "range_spec": { + "max": 1, + "min": 0, + "step": 0.01, + "step_type": "float" + }, + "required": false, + "show": true, + "slider_buttons": false, + "slider_buttons_options": [], + "slider_input": false, + "title_case": false, + "tool_mode": false, + "type": "slider", + "value": 0.1 + }, + "timeout": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Timeout", + "dynamic": false, + "info": "The timeout for requests to OpenAI completion API.", + "list": false, + "list_add_label": "Add More", + "name": "timeout", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 700 + } + }, + "tool_mode": false + }, + "showNode": true, + "type": "OpenAIModel" + }, + "dragging": false, + "id": "OpenAIModel-63o3Q", + "measured": { + "height": 653, + "width": 320 + }, + "position": { + "x": 2726.7429097065215, + "y": 460.96432586131795 + }, + "selected": false, + "type": "genericNode" + } + ], + "viewport": { + "x": -1173.6150415418488, + "y": -168.93756272342318, + "zoom": 0.7486095175892006 + } + }, + "description": "Create a chatbot that saves and references previous messages, enabling the model to maintain context throughout the conversation.", + "endpoint_name": null, + "gradient": "4", + "icon": "MessagesSquare", + "id": "7d334df6-6cf5-4d09-b6bf-169247b20446", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Memory Chatbot", + "tags": ["chatbots", "openai", "assistants"] +}