| { | |
| "id": "b979378b-cae3-4091-8163-d88f898511f3", | |
| "data": { | |
| "nodes": [ | |
| { | |
| "id": "ChatOutput-QK9Qv", | |
| "type": "genericNode", | |
| "position": { | |
| "x": 1810.0826252105828, | |
| "y": 258.16032656133603 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "background_color": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "background_color", | |
| "value": "", | |
| "display_name": "Background Color", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The background color of the icon.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "chat_icon": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chat_icon", | |
| "value": "", | |
| "display_name": "Icon", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The icon of the message.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "data_template": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "data_template", | |
| "value": "{text}", | |
| "display_name": "Data Template", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "input_value": { | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "input_value", | |
| "value": "", | |
| "display_name": "Text", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "Message to be passed as output.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageInput" | |
| }, | |
| "sender": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "Machine", | |
| "User" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "sender", | |
| "value": "Machine", | |
| "display_name": "Sender Type", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Type of sender.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| }, | |
| "sender_name": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "sender_name", | |
| "value": "", | |
| "display_name": "Sender Name", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "Name of the sender.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "session_id": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "session_id", | |
| "value": "", | |
| "display_name": "Session ID", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "should_store_message": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "should_store_message", | |
| "value": true, | |
| "display_name": "Store Messages", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Store the message in the history.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "text_color": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "text_color", | |
| "value": "", | |
| "display_name": "Text Color", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The text color of the name", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| } | |
| }, | |
| "description": "Display a chat message in the Playground.", | |
| "icon": "MessagesSquare", | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "display_name": "Chat Output", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Message" | |
| ], | |
| "selected": "Message", | |
| "name": "message", | |
| "display_name": "Message", | |
| "method": "message_response", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "input_value", | |
| "should_store_message", | |
| "sender", | |
| "sender_name", | |
| "session_id", | |
| "data_template", | |
| "background_color", | |
| "chat_icon", | |
| "text_color" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "ChatOutput", | |
| "id": "ChatOutput-QK9Qv" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 319, | |
| "positionAbsolute": { | |
| "x": 1810.0826252105828, | |
| "y": 258.16032656133603 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "ChatInput-VybAX", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -456.75172621990527, | |
| "y": 308.05960206433684 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "files": { | |
| "trace_as_metadata": true, | |
| "file_path": "", | |
| "fileTypes": [ | |
| "txt", | |
| "md", | |
| "mdx", | |
| "csv", | |
| "json", | |
| "yaml", | |
| "yml", | |
| "xml", | |
| "html", | |
| "htm", | |
| "pdf", | |
| "docx", | |
| "py", | |
| "sh", | |
| "sql", | |
| "js", | |
| "ts", | |
| "tsx", | |
| "jpg", | |
| "jpeg", | |
| "png", | |
| "bmp", | |
| "image" | |
| ], | |
| "list": true, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "files", | |
| "value": "", | |
| "display_name": "Files", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Files to be sent with the message.", | |
| "title_case": false, | |
| "type": "file", | |
| "_input_type": "FileInput" | |
| }, | |
| "background_color": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "background_color", | |
| "value": "", | |
| "display_name": "Background Color", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The background color of the icon.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "chat_icon": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chat_icon", | |
| "value": "", | |
| "display_name": "Icon", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The icon of the message.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "input_value": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "multiline": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "input_value", | |
| "value": "what is Digital Service Units ?", | |
| "display_name": "Text", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "Message to be passed as input.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MultilineInput" | |
| }, | |
| "sender": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "Machine", | |
| "User" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "sender", | |
| "value": "User", | |
| "display_name": "Sender Type", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Type of sender.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| }, | |
| "sender_name": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "sender_name", | |
| "value": "User", | |
| "display_name": "Sender Name", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "Name of the sender.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "session_id": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "session_id", | |
| "value": "", | |
| "display_name": "Session ID", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "should_store_message": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "should_store_message", | |
| "value": true, | |
| "display_name": "Store Messages", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Store the message in the history.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "text_color": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "text_color", | |
| "value": "", | |
| "display_name": "Text Color", | |
| "advanced": true, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The text color of the name", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| } | |
| }, | |
| "description": "Get chat inputs from the Playground.", | |
| "icon": "MessagesSquare", | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "display_name": "Chat Input", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Message" | |
| ], | |
| "selected": "Message", | |
| "name": "message", | |
| "display_name": "Message", | |
| "method": "message_response", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "input_value", | |
| "should_store_message", | |
| "sender", | |
| "sender_name", | |
| "session_id", | |
| "files", | |
| "background_color", | |
| "chat_icon", | |
| "text_color" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "ChatInput", | |
| "id": "ChatInput-VybAX" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 233, | |
| "positionAbsolute": { | |
| "x": -456.75172621990527, | |
| "y": 308.05960206433684 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "OllamaModel-s3Gtg", | |
| "type": "genericNode", | |
| "position": { | |
| "x": 835.2958364585486, | |
| "y": 454.5846273774677 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "output_parser": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "output_parser", | |
| "value": "", | |
| "display_name": "Output Parser", | |
| "advanced": true, | |
| "input_types": [ | |
| "OutputParser" | |
| ], | |
| "dynamic": false, | |
| "info": "The parser to use to parse the output of the model", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "HandleInput" | |
| }, | |
| "base_url": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "base_url", | |
| "value": "http://localhost:11434", | |
| "display_name": "Base URL", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from typing import Any\nfrom urllib.parse import urljoin\n\nimport httpx\nfrom langchain_ollama import ChatOllama\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs.inputs import HandleInput\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput\n\n\nclass ChatOllamaComponent(LCModelComponent):\n display_name = \"Ollama\"\n description = \"Generate text using Ollama Local LLMs.\"\n icon = \"Ollama\"\n name = \"OllamaModel\"\n\n def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None):\n if field_name == \"mirostat\":\n if field_value == \"Disabled\":\n build_config[\"mirostat_eta\"][\"advanced\"] = True\n build_config[\"mirostat_tau\"][\"advanced\"] = True\n build_config[\"mirostat_eta\"][\"value\"] = None\n build_config[\"mirostat_tau\"][\"value\"] = None\n\n else:\n build_config[\"mirostat_eta\"][\"advanced\"] = False\n build_config[\"mirostat_tau\"][\"advanced\"] = False\n\n if field_value == \"Mirostat 2.0\":\n build_config[\"mirostat_eta\"][\"value\"] = 0.2\n build_config[\"mirostat_tau\"][\"value\"] = 10\n else:\n build_config[\"mirostat_eta\"][\"value\"] = 0.1\n build_config[\"mirostat_tau\"][\"value\"] = 5\n\n if field_name == \"model_name\":\n base_url_dict = build_config.get(\"base_url\", {})\n base_url_load_from_db = base_url_dict.get(\"load_from_db\", False)\n base_url_value = base_url_dict.get(\"value\")\n if base_url_load_from_db:\n base_url_value = self.variables(base_url_value, field_name)\n elif not base_url_value:\n base_url_value = \"http://localhost:11434\"\n build_config[\"model_name\"][\"options\"] = self.get_model(base_url_value)\n if field_name == \"keep_alive_flag\":\n if field_value == \"Keep\":\n build_config[\"keep_alive\"][\"value\"] = \"-1\"\n build_config[\"keep_alive\"][\"advanced\"] = True\n elif field_value == \"Immediately\":\n build_config[\"keep_alive\"][\"value\"] = \"0\"\n build_config[\"keep_alive\"][\"advanced\"] = True\n else:\n build_config[\"keep_alive\"][\"advanced\"] = False\n\n return build_config\n\n def get_model(self, base_url_value: str) -> list[str]:\n try:\n url = urljoin(base_url_value, \"/api/tags\")\n with httpx.Client() as client:\n response = client.get(url)\n response.raise_for_status()\n data = response.json()\n\n return [model[\"name\"] for model in data.get(\"models\", [])]\n except Exception as e:\n msg = \"Could not retrieve models. Please, make sure Ollama is running.\"\n raise ValueError(msg) from e\n\n inputs = [\n StrInput(\n name=\"base_url\",\n display_name=\"Base URL\",\n info=\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\",\n value=\"http://localhost:11434\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n value=\"llama3.1\",\n info=\"Refer to https://ollama.com/library for more models.\",\n refresh_button=True,\n ),\n FloatInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.2,\n info=\"Controls the creativity of model responses.\",\n ),\n StrInput(\n name=\"format\", display_name=\"Format\", info=\"Specify the format of the output (e.g., json).\", advanced=True\n ),\n DictInput(name=\"metadata\", display_name=\"Metadata\", info=\"Metadata to add to the run trace.\", advanced=True),\n DropdownInput(\n name=\"mirostat\",\n display_name=\"Mirostat\",\n options=[\"Disabled\", \"Mirostat\", \"Mirostat 2.0\"],\n info=\"Enable/disable Mirostat sampling for controlling perplexity.\",\n value=\"Disabled\",\n advanced=True,\n real_time_refresh=True,\n ),\n FloatInput(\n name=\"mirostat_eta\",\n display_name=\"Mirostat Eta\",\n info=\"Learning rate for Mirostat algorithm. (Default: 0.1)\",\n advanced=True,\n ),\n FloatInput(\n name=\"mirostat_tau\",\n display_name=\"Mirostat Tau\",\n info=\"Controls the balance between coherence and diversity of the output. (Default: 5.0)\",\n advanced=True,\n ),\n IntInput(\n name=\"num_ctx\",\n display_name=\"Context Window Size\",\n info=\"Size of the context window for generating tokens. (Default: 2048)\",\n advanced=True,\n ),\n IntInput(\n name=\"num_gpu\",\n display_name=\"Number of GPUs\",\n info=\"Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)\",\n advanced=True,\n ),\n IntInput(\n name=\"num_thread\",\n display_name=\"Number of Threads\",\n info=\"Number of threads to use during computation. (Default: detected for optimal performance)\",\n advanced=True,\n ),\n IntInput(\n name=\"repeat_last_n\",\n display_name=\"Repeat Last N\",\n info=\"How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)\",\n advanced=True,\n ),\n FloatInput(\n name=\"repeat_penalty\",\n display_name=\"Repeat Penalty\",\n info=\"Penalty for repetitions in generated text. (Default: 1.1)\",\n advanced=True,\n ),\n FloatInput(name=\"tfs_z\", display_name=\"TFS Z\", info=\"Tail free sampling value. (Default: 1)\", advanced=True),\n IntInput(name=\"timeout\", display_name=\"Timeout\", info=\"Timeout for the request stream.\", advanced=True),\n IntInput(\n name=\"top_k\", display_name=\"Top K\", info=\"Limits token selection to top K. (Default: 40)\", advanced=True\n ),\n FloatInput(name=\"top_p\", display_name=\"Top P\", info=\"Works together with top-k. (Default: 0.9)\", advanced=True),\n BoolInput(name=\"verbose\", display_name=\"Verbose\", info=\"Whether to print out response text.\", advanced=True),\n StrInput(\n name=\"tags\",\n display_name=\"Tags\",\n info=\"Comma-separated list of tags to add to the run trace.\",\n advanced=True,\n ),\n StrInput(\n name=\"stop_tokens\",\n display_name=\"Stop Tokens\",\n info=\"Comma-separated list of tokens to signal the model to stop generating text.\",\n advanced=True,\n ),\n StrInput(name=\"system\", display_name=\"System\", info=\"System to use for generating text.\", advanced=True),\n StrInput(name=\"template\", display_name=\"Template\", info=\"Template to use for generating text.\", advanced=True),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n *LCModelComponent._base_inputs,\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # Mapping mirostat settings to their corresponding values\n mirostat_options = {\"Mirostat\": 1, \"Mirostat 2.0\": 2}\n\n # Default to 0 for 'Disabled'\n mirostat_value = mirostat_options.get(self.mirostat, 0)\n\n # Set mirostat_eta and mirostat_tau to None if mirostat is disabled\n if mirostat_value == 0:\n mirostat_eta = None\n mirostat_tau = None\n else:\n mirostat_eta = self.mirostat_eta\n mirostat_tau = self.mirostat_tau\n\n # Mapping system settings to their corresponding values\n llm_params = {\n \"base_url\": self.base_url,\n \"model\": self.model_name,\n \"mirostat\": mirostat_value,\n \"format\": self.format,\n \"metadata\": self.metadata,\n \"tags\": self.tags.split(\",\") if self.tags else None,\n \"mirostat_eta\": mirostat_eta,\n \"mirostat_tau\": mirostat_tau,\n \"num_ctx\": self.num_ctx or None,\n \"num_gpu\": self.num_gpu or None,\n \"num_thread\": self.num_thread or None,\n \"repeat_last_n\": self.repeat_last_n or None,\n \"repeat_penalty\": self.repeat_penalty or None,\n \"temperature\": self.temperature or None,\n \"stop\": self.stop_tokens.split(\",\") if self.stop_tokens else None,\n \"system\": self.system,\n \"template\": self.template,\n \"tfs_z\": self.tfs_z or None,\n \"timeout\": self.timeout or None,\n \"top_k\": self.top_k or None,\n \"top_p\": self.top_p or None,\n \"verbose\": self.verbose,\n }\n\n # Remove parameters with None values\n llm_params = {k: v for k, v in llm_params.items() if v is not None}\n\n try:\n output = ChatOllama(**llm_params)\n except Exception as e:\n msg = \"Could not initialize Ollama LLM.\"\n raise ValueError(msg) from e\n\n return output\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "format": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "format", | |
| "value": "", | |
| "display_name": "Format", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Specify the format of the output (e.g., json).", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "input_value": { | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "input_value", | |
| "value": "", | |
| "display_name": "Input", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageInput" | |
| }, | |
| "metadata": { | |
| "trace_as_input": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "metadata", | |
| "value": {}, | |
| "display_name": "Metadata", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Metadata to add to the run trace.", | |
| "title_case": false, | |
| "type": "dict", | |
| "_input_type": "DictInput" | |
| }, | |
| "mirostat": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "Disabled", | |
| "Mirostat", | |
| "Mirostat 2.0" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "mirostat", | |
| "value": "Disabled", | |
| "display_name": "Mirostat", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Enable/disable Mirostat sampling for controlling perplexity.", | |
| "real_time_refresh": true, | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| }, | |
| "mirostat_eta": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "mirostat_eta", | |
| "value": "", | |
| "display_name": "Mirostat Eta", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Learning rate for Mirostat algorithm. (Default: 0.1)", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "mirostat_tau": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "mirostat_tau", | |
| "value": "", | |
| "display_name": "Mirostat Tau", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Controls the balance between coherence and diversity of the output. (Default: 5.0)", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "model_name": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "qwen2.5:14b", | |
| "llama3.2-vision:latest", | |
| "vanilj/Phi-4:Q4_K_M", | |
| "llama3.1:8b", | |
| "nomic-embed-text:latest", | |
| "exaone3.5:7.8b-instruct-q8_0", | |
| "llama3.2:1b", | |
| "research-phi3-q3kl:latest", | |
| "phi3:14b-medium-128k-instruct-q3_K_L", | |
| "research-phi3-q4km:latest", | |
| "marco-o1:latest", | |
| "tulu3:latest", | |
| "llama3.1:8b-instruct-q8_0", | |
| "qwen2.5-coder:14b" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "model_name", | |
| "value": "llama3.1:8b", | |
| "display_name": "Model Name", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Refer to https://ollama.com/library for more models.", | |
| "refresh_button": true, | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| }, | |
| "num_ctx": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "num_ctx", | |
| "value": "", | |
| "display_name": "Context Window Size", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Size of the context window for generating tokens. (Default: 2048)", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "num_gpu": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "num_gpu", | |
| "value": "", | |
| "display_name": "Number of GPUs", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "num_thread": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "num_thread", | |
| "value": "", | |
| "display_name": "Number of Threads", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Number of threads to use during computation. (Default: detected for optimal performance)", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "repeat_last_n": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "repeat_last_n", | |
| "value": "", | |
| "display_name": "Repeat Last N", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "repeat_penalty": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "repeat_penalty", | |
| "value": "", | |
| "display_name": "Repeat Penalty", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Penalty for repetitions in generated text. (Default: 1.1)", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "stop_tokens": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "stop_tokens", | |
| "value": "", | |
| "display_name": "Stop Tokens", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Comma-separated list of tokens to signal the model to stop generating text.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "stream": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "stream", | |
| "value": false, | |
| "display_name": "Stream", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Stream the response from the model. Streaming works only in Chat.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "system": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "system", | |
| "value": "", | |
| "display_name": "System", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "System to use for generating text.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "system_message": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "system_message", | |
| "value": "", | |
| "display_name": "System Message", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "System message to pass to the model.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "tags": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "tags", | |
| "value": "", | |
| "display_name": "Tags", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Comma-separated list of tags to add to the run trace.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "temperature": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "temperature", | |
| "value": 0.2, | |
| "display_name": "Temperature", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Controls the creativity of model responses.", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "template": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "template", | |
| "value": "", | |
| "display_name": "Template", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Template to use for generating text.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "tfs_z": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "tfs_z", | |
| "value": "", | |
| "display_name": "TFS Z", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Tail free sampling value. (Default: 1)", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "timeout": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "timeout", | |
| "value": "", | |
| "display_name": "Timeout", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Timeout for the request stream.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "top_k": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "top_k", | |
| "value": "", | |
| "display_name": "Top K", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Limits token selection to top K. (Default: 40)", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "top_p": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "top_p", | |
| "value": "", | |
| "display_name": "Top P", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Works together with top-k. (Default: 0.9)", | |
| "title_case": false, | |
| "type": "float", | |
| "_input_type": "FloatInput" | |
| }, | |
| "verbose": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "verbose", | |
| "value": false, | |
| "display_name": "Verbose", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Whether to print out response text.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| } | |
| }, | |
| "description": "Generate text using Ollama Local LLMs.", | |
| "icon": "Ollama", | |
| "base_classes": [ | |
| "LanguageModel", | |
| "Message" | |
| ], | |
| "display_name": "Ollama", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Message" | |
| ], | |
| "selected": "Message", | |
| "name": "text_output", | |
| "hidden": null, | |
| "display_name": "Text", | |
| "method": "text_response", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| }, | |
| { | |
| "types": [ | |
| "LanguageModel" | |
| ], | |
| "selected": "LanguageModel", | |
| "name": "model_output", | |
| "hidden": null, | |
| "display_name": "Language Model", | |
| "method": "build_model", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| } | |
| ], | |
| "field_order": [ | |
| "base_url", | |
| "model_name", | |
| "temperature", | |
| "format", | |
| "metadata", | |
| "mirostat", | |
| "mirostat_eta", | |
| "mirostat_tau", | |
| "num_ctx", | |
| "num_gpu", | |
| "num_thread", | |
| "repeat_last_n", | |
| "repeat_penalty", | |
| "tfs_z", | |
| "timeout", | |
| "top_k", | |
| "top_p", | |
| "verbose", | |
| "tags", | |
| "stop_tokens", | |
| "system", | |
| "template", | |
| "output_parser", | |
| "input_value", | |
| "system_message", | |
| "stream" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "OllamaModel", | |
| "id": "OllamaModel-s3Gtg" | |
| }, | |
| "selected": true, | |
| "width": 320, | |
| "height": 669, | |
| "positionAbsolute": { | |
| "x": 835.2958364585486, | |
| "y": 454.5846273774677 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "Prompt-K9oDg", | |
| "type": "genericNode", | |
| "position": { | |
| "x": 273.88486263922505, | |
| "y": 838.480314948909 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "template": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "template", | |
| "value": "Context:\n{context}\n\n\nyou are Teacher . you uses pdf or other documents to answer quesions\nUse provided context for answering the question . if answer does not exist in context , then respond with \"i dont know\"\n\nQuestion:\n{question}", | |
| "display_name": "Template", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "prompt", | |
| "_input_type": "PromptInput" | |
| }, | |
| "question": { | |
| "field_type": "str", | |
| "required": false, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "name": "question", | |
| "display_name": "question", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message", | |
| "Text" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false, | |
| "type": "str" | |
| }, | |
| "context": { | |
| "field_type": "str", | |
| "required": false, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "name": "context", | |
| "display_name": "context", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message", | |
| "Text" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false, | |
| "type": "str" | |
| } | |
| }, | |
| "description": "Create a prompt template with dynamic variables.", | |
| "icon": "prompts", | |
| "is_input": null, | |
| "is_output": null, | |
| "is_composition": null, | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "name": "", | |
| "display_name": "Prompt", | |
| "documentation": "", | |
| "custom_fields": { | |
| "template": [ | |
| "context", | |
| "question" | |
| ] | |
| }, | |
| "output_types": [], | |
| "full_path": null, | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Message" | |
| ], | |
| "selected": "Message", | |
| "name": "prompt", | |
| "hidden": null, | |
| "display_name": "Prompt Message", | |
| "method": "build_prompt", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": null | |
| } | |
| ], | |
| "field_order": [ | |
| "template" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "error": null, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "Prompt", | |
| "id": "Prompt-K9oDg" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 431, | |
| "positionAbsolute": { | |
| "x": 273.88486263922505, | |
| "y": 838.480314948909 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "Chroma-u9YZ4", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -2114.2485581100273, | |
| "y": 1177.323575143643 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "embedding": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "embedding", | |
| "value": "", | |
| "display_name": "Embedding", | |
| "advanced": false, | |
| "input_types": [ | |
| "Embeddings" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "HandleInput" | |
| }, | |
| "ingest_data": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "list": true, | |
| "trace_as_input": true, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "ingest_data", | |
| "value": "", | |
| "display_name": "Ingest Data", | |
| "advanced": false, | |
| "input_types": [ | |
| "Data" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "DataInput" | |
| }, | |
| "allow_duplicates": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "allow_duplicates", | |
| "value": false, | |
| "display_name": "Allow Duplicates", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "If false, will not add documents that are already in the Vector Store.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "chroma_server_cors_allow_origins": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_cors_allow_origins", | |
| "value": "", | |
| "display_name": "Server CORS Allow Origins", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "chroma_server_grpc_port": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_grpc_port", | |
| "value": "", | |
| "display_name": "Server gRPC Port", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "chroma_server_host": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_host", | |
| "value": "", | |
| "display_name": "Server Host", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "chroma_server_http_port": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_http_port", | |
| "value": "", | |
| "display_name": "Server HTTP Port", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "chroma_server_ssl_enabled": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_ssl_enabled", | |
| "value": false, | |
| "display_name": "Server SSL Enabled", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from copy import deepcopy\n\nfrom chromadb.config import Settings\nfrom langchain_chroma import Chroma\nfrom loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.base.vectorstores.utils import chroma_collection_to_data\nfrom langflow.io import BoolInput, DataInput, DropdownInput, HandleInput, IntInput, MultilineInput, StrInput\nfrom langflow.schema import Data\n\n\nclass ChromaVectorStoreComponent(LCVectorStoreComponent):\n \"\"\"Chroma Vector Store with search capabilities.\"\"\"\n\n display_name: str = \"Chroma DB\"\n description: str = \"Chroma Vector Store with search capabilities\"\n documentation = \"https://python.langchain.com/docs/integrations/vectorstores/chroma\"\n name = \"Chroma\"\n icon = \"Chroma\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n value=\"langflow\",\n ),\n StrInput(\n name=\"persist_directory\",\n display_name=\"Persist Directory\",\n ),\n MultilineInput(\n name=\"search_query\",\n display_name=\"Search Query\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n HandleInput(name=\"embedding\", display_name=\"Embedding\", input_types=[\"Embeddings\"]),\n StrInput(\n name=\"chroma_server_cors_allow_origins\",\n display_name=\"Server CORS Allow Origins\",\n advanced=True,\n ),\n StrInput(\n name=\"chroma_server_host\",\n display_name=\"Server Host\",\n advanced=True,\n ),\n IntInput(\n name=\"chroma_server_http_port\",\n display_name=\"Server HTTP Port\",\n advanced=True,\n ),\n IntInput(\n name=\"chroma_server_grpc_port\",\n display_name=\"Server gRPC Port\",\n advanced=True,\n ),\n BoolInput(\n name=\"chroma_server_ssl_enabled\",\n display_name=\"Server SSL Enabled\",\n advanced=True,\n ),\n BoolInput(\n name=\"allow_duplicates\",\n display_name=\"Allow Duplicates\",\n advanced=True,\n info=\"If false, will not add documents that are already in the Vector Store.\",\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n options=[\"Similarity\", \"MMR\"],\n value=\"Similarity\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=10,\n ),\n IntInput(\n name=\"limit\",\n display_name=\"Limit\",\n advanced=True,\n info=\"Limit the number of records to compare when Allow Duplicates is False.\",\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self) -> Chroma:\n \"\"\"Builds the Chroma object.\"\"\"\n try:\n from chromadb import Client\n from langchain_chroma import Chroma\n except ImportError as e:\n msg = \"Could not import Chroma integration package. Please install it with `pip install langchain-chroma`.\"\n raise ImportError(msg) from e\n # Chroma settings\n chroma_settings = None\n client = None\n if self.chroma_server_host:\n chroma_settings = Settings(\n chroma_server_cors_allow_origins=self.chroma_server_cors_allow_origins or [],\n chroma_server_host=self.chroma_server_host,\n chroma_server_http_port=self.chroma_server_http_port or None,\n chroma_server_grpc_port=self.chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=self.chroma_server_ssl_enabled,\n )\n client = Client(settings=chroma_settings)\n\n # Check persist_directory and expand it if it is a relative path\n persist_directory = self.resolve_path(self.persist_directory) if self.persist_directory is not None else None\n\n chroma = Chroma(\n persist_directory=persist_directory,\n client=client,\n embedding_function=self.embedding,\n collection_name=self.collection_name,\n )\n\n self._add_documents_to_vector_store(chroma)\n self.status = chroma_collection_to_data(chroma.get(limit=self.limit))\n return chroma\n\n def _add_documents_to_vector_store(self, vector_store: \"Chroma\") -> None:\n \"\"\"Adds documents to the Vector Store.\"\"\"\n if not self.ingest_data:\n self.status = \"\"\n return\n\n _stored_documents_without_id = []\n if self.allow_duplicates:\n stored_data = []\n else:\n stored_data = chroma_collection_to_data(vector_store.get(limit=self.limit))\n for value in deepcopy(stored_data):\n del value.id\n _stored_documents_without_id.append(value)\n\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n if _input not in _stored_documents_without_id:\n documents.append(_input.to_lc_document())\n else:\n msg = \"Vector Store Inputs must be Data objects.\"\n raise TypeError(msg)\n\n if documents and self.embedding is not None:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n vector_store.add_documents(documents)\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "collection_name": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "collection_name", | |
| "value": "langflow", | |
| "display_name": "Collection Name", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "limit": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "limit", | |
| "value": "", | |
| "display_name": "Limit", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Limit the number of records to compare when Allow Duplicates is False.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "number_of_results": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "number_of_results", | |
| "value": 10, | |
| "display_name": "Number of Results", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Number of results to return.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "persist_directory": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "persist_directory", | |
| "value": "F:\\rag\\ragflow ai apps\\chroma for rag", | |
| "display_name": "Persist Directory", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "search_query": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "multiline": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "search_query", | |
| "value": "", | |
| "display_name": "Search Query", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MultilineInput" | |
| }, | |
| "search_type": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "Similarity", | |
| "MMR" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "search_type", | |
| "value": "Similarity", | |
| "display_name": "Search Type", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| } | |
| }, | |
| "description": "Chroma Vector Store with search capabilities", | |
| "icon": "Chroma", | |
| "base_classes": [ | |
| "Data", | |
| "Retriever" | |
| ], | |
| "display_name": "Chroma DB", | |
| "documentation": "https://python.langchain.com/docs/integrations/vectorstores/chroma", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Retriever" | |
| ], | |
| "selected": "Retriever", | |
| "name": "base_retriever", | |
| "display_name": "Retriever", | |
| "method": "build_base_retriever", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| }, | |
| { | |
| "types": [ | |
| "Data" | |
| ], | |
| "selected": "Data", | |
| "name": "search_results", | |
| "display_name": "Search Results", | |
| "method": "search_documents", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| } | |
| ], | |
| "field_order": [ | |
| "collection_name", | |
| "persist_directory", | |
| "search_query", | |
| "ingest_data", | |
| "embedding", | |
| "chroma_server_cors_allow_origins", | |
| "chroma_server_host", | |
| "chroma_server_http_port", | |
| "chroma_server_grpc_port", | |
| "chroma_server_ssl_enabled", | |
| "allow_duplicates", | |
| "search_type", | |
| "number_of_results", | |
| "limit" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "Chroma", | |
| "id": "Chroma-u9YZ4" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 549, | |
| "dragging": false, | |
| "positionAbsolute": { | |
| "x": -2114.2485581100273, | |
| "y": 1177.323575143643 | |
| } | |
| }, | |
| { | |
| "id": "File-8FCE5", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -3240.8326971391325, | |
| "y": 1083.8177747002546 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "path": { | |
| "trace_as_metadata": true, | |
| "file_path": "b979378b-cae3-4091-8163-d88f898511f3\\2024-12-27_16-01-41_Unit_5 DC notes.pdf", | |
| "fileTypes": [ | |
| "txt", | |
| "md", | |
| "mdx", | |
| "csv", | |
| "json", | |
| "yaml", | |
| "yml", | |
| "xml", | |
| "html", | |
| "htm", | |
| "pdf", | |
| "docx", | |
| "py", | |
| "sh", | |
| "sql", | |
| "js", | |
| "ts", | |
| "tsx", | |
| "zip" | |
| ], | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "path", | |
| "value": "", | |
| "display_name": "Path", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx, zip", | |
| "title_case": false, | |
| "type": "file", | |
| "_input_type": "FileInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from pathlib import Path\nfrom tempfile import NamedTemporaryFile\nfrom zipfile import ZipFile, is_zipfile\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, IntInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n \"\"\"Handles loading of individual or zipped text files.\n\n Processes multiple valid files within a zip archive if provided.\n\n Attributes:\n display_name: Display name of the component.\n description: Brief component description.\n icon: Icon to represent the component.\n name: Identifier for the component.\n inputs: Inputs required by the component.\n outputs: Output of the component after processing files.\n \"\"\"\n\n display_name = \"File\"\n description = \"Load a file to be used in your project.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=[*TEXT_FILE_TYPES, \"zip\"],\n info=f\"Supported file types: {', '.join([*TEXT_FILE_TYPES, 'zip'])}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n BoolInput(\n name=\"use_multithreading\",\n display_name=\"Use Multithreading\",\n advanced=True,\n info=\"If true, parallel processing will be enabled for zip files.\",\n ),\n IntInput(\n name=\"concurrency_multithreading\",\n display_name=\"Multithreading Concurrency\",\n advanced=True,\n info=\"The maximum number of workers to use, if concurrency is enabled\",\n value=4,\n ),\n ]\n\n outputs = [Output(display_name=\"Data\", name=\"data\", method=\"load_file\")]\n\n def load_file(self) -> Data:\n \"\"\"Load and parse file(s) from a zip archive.\n\n Raises:\n ValueError: If no file is uploaded or file path is invalid.\n\n Returns:\n Data: Parsed data from file(s).\n \"\"\"\n # Check if the file path is provided\n if not self.path:\n self.log(\"File path is missing.\")\n msg = \"Please upload a file for processing.\"\n\n raise ValueError(msg)\n\n resolved_path = Path(self.resolve_path(self.path))\n try:\n # Check if the file is a zip archive\n if is_zipfile(resolved_path):\n self.log(f\"Processing zip file: {resolved_path.name}.\")\n\n return self._process_zip_file(\n resolved_path,\n silent_errors=self.silent_errors,\n parallel=self.use_multithreading,\n )\n\n self.log(f\"Processing single file: {resolved_path.name}.\")\n\n return self._process_single_file(resolved_path, silent_errors=self.silent_errors)\n except FileNotFoundError:\n self.log(f\"File not found: {resolved_path.name}.\")\n\n raise\n\n def _process_zip_file(self, zip_path: Path, *, silent_errors: bool = False, parallel: bool = False) -> Data:\n \"\"\"Process text files within a zip archive.\n\n Args:\n zip_path: Path to the zip file.\n silent_errors: Suppresses errors if True.\n parallel: Enables parallel processing if True.\n\n Returns:\n list[Data]: Combined data from all valid files.\n\n Raises:\n ValueError: If no valid files found in the archive.\n \"\"\"\n data: list[Data] = []\n with ZipFile(zip_path, \"r\") as zip_file:\n # Filter file names based on extensions in TEXT_FILE_TYPES and ignore hidden files\n valid_files = [\n name\n for name in zip_file.namelist()\n if (\n any(name.endswith(ext) for ext in TEXT_FILE_TYPES)\n and not name.startswith(\"__MACOSX\")\n and not name.startswith(\".\")\n )\n ]\n\n # Raise an error if no valid files found\n if not valid_files:\n self.log(\"No valid files in the zip archive.\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return data # type: ignore[return-value]\n\n # Raise an error if no valid files found\n msg = \"No valid files in the zip archive.\"\n raise ValueError(msg)\n\n # Define a function to process each file\n def process_file(file_name, silent_errors=silent_errors):\n with NamedTemporaryFile(delete=False) as temp_file:\n temp_path = Path(temp_file.name).with_name(file_name)\n with zip_file.open(file_name) as file_content:\n temp_path.write_bytes(file_content.read())\n try:\n return self._process_single_file(temp_path, silent_errors=silent_errors)\n finally:\n temp_path.unlink()\n\n # Process files in parallel if specified\n if parallel:\n self.log(\n f\"Initializing parallel Thread Pool Executor with max workers: \"\n f\"{self.concurrency_multithreading}.\"\n )\n\n # Process files in parallel\n initial_data = parallel_load_data(\n valid_files,\n silent_errors=silent_errors,\n load_function=process_file,\n max_concurrency=self.concurrency_multithreading,\n )\n\n # Filter out empty data\n data = list(filter(None, initial_data))\n else:\n # Sequential processing\n data = [process_file(file_name) for file_name in valid_files]\n\n self.log(f\"Successfully processed zip file: {zip_path.name}.\")\n\n return data # type: ignore[return-value]\n\n def _process_single_file(self, file_path: Path, *, silent_errors: bool = False) -> Data:\n \"\"\"Process a single file.\n\n Args:\n file_path: Path to the file.\n silent_errors: Suppresses errors if True.\n\n Returns:\n Data: Parsed data from the file.\n\n Raises:\n ValueError: For unsupported file formats.\n \"\"\"\n # Check if the file type is supported\n if not any(file_path.suffix == ext for ext in [\".\" + f for f in TEXT_FILE_TYPES]):\n self.log(f\"Unsupported file type: {file_path.suffix}\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return Data()\n\n msg = f\"Unsupported file type: {file_path.suffix}\"\n raise ValueError(msg)\n\n try:\n # Parse the text file as appropriate\n data = parse_text_file_to_data(str(file_path), silent_errors=silent_errors) # type: ignore[assignment]\n if not data:\n data = Data()\n\n self.log(f\"Successfully processed file: {file_path.name}.\")\n except Exception as e:\n self.log(f\"Error processing file {file_path.name}: {e}\")\n\n # Return empty data if silent_errors is True\n if not silent_errors:\n raise\n\n data = Data()\n\n return data\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "concurrency_multithreading": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "concurrency_multithreading", | |
| "value": 4, | |
| "display_name": "Multithreading Concurrency", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "The maximum number of workers to use, if concurrency is enabled", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "silent_errors": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "silent_errors", | |
| "value": false, | |
| "display_name": "Silent Errors", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "If true, errors will not raise an exception.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "use_multithreading": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "use_multithreading", | |
| "value": false, | |
| "display_name": "Use Multithreading", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "If true, parallel processing will be enabled for zip files.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| } | |
| }, | |
| "description": "Load a file to be used in your project.", | |
| "icon": "file-text", | |
| "base_classes": [ | |
| "Data" | |
| ], | |
| "display_name": "File", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Data" | |
| ], | |
| "selected": "Data", | |
| "name": "data", | |
| "display_name": "Data", | |
| "method": "load_file", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "path", | |
| "silent_errors", | |
| "use_multithreading", | |
| "concurrency_multithreading" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "File", | |
| "id": "File-8FCE5" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 231, | |
| "positionAbsolute": { | |
| "x": -3240.8326971391325, | |
| "y": 1083.8177747002546 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "SplitText-su1Lz", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -2684.7300902561956, | |
| "y": 1237.11116422028 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "data_inputs": { | |
| "trace_as_metadata": true, | |
| "list": true, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "data_inputs", | |
| "value": "", | |
| "display_name": "Data Inputs", | |
| "advanced": false, | |
| "input_types": [ | |
| "Data" | |
| ], | |
| "dynamic": false, | |
| "info": "The data to split.", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "HandleInput" | |
| }, | |
| "chunk_overlap": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chunk_overlap", | |
| "value": 200, | |
| "display_name": "Chunk Overlap", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "Number of characters to overlap between chunks.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "chunk_size": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chunk_size", | |
| "value": 1000, | |
| "display_name": "Chunk Size", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "The maximum number of characters in each chunk.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n name = \"SplitText\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n return [Data(text=doc.page_content, data=doc.metadata) for doc in docs]\n\n def split_text(self) -> list[Data]:\n separator = unescape_string(self.separator)\n\n documents = [_input.to_lc_document() for _input in self.data_inputs if isinstance(_input, Data)]\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "separator": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "separator", | |
| "value": "\n", | |
| "display_name": "Separator", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The character to split on. Defaults to newline.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| } | |
| }, | |
| "description": "Split text into chunks based on specified criteria.", | |
| "icon": "scissors-line-dashed", | |
| "base_classes": [ | |
| "Data" | |
| ], | |
| "display_name": "Split Text", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Data" | |
| ], | |
| "selected": "Data", | |
| "name": "chunks", | |
| "display_name": "Chunks", | |
| "method": "split_text", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "data_inputs", | |
| "chunk_overlap", | |
| "chunk_size", | |
| "separator" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "SplitText", | |
| "id": "SplitText-su1Lz" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 473, | |
| "positionAbsolute": { | |
| "x": -2684.7300902561956, | |
| "y": 1237.11116422028 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "OllamaEmbeddings-bI0Sn", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -2920.0574290747554, | |
| "y": 1878.3802014564817 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "base_url": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "base_url", | |
| "value": "http://localhost:11434", | |
| "display_name": "Ollama Base URL", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langchain_ollama import OllamaEmbeddings\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import MessageTextInput, Output\n\n\nclass OllamaEmbeddingsComponent(LCModelComponent):\n display_name: str = \"Ollama Embeddings\"\n description: str = \"Generate embeddings using Ollama models.\"\n documentation = \"https://python.langchain.com/docs/integrations/text_embedding/ollama\"\n icon = \"Ollama\"\n name = \"OllamaEmbeddings\"\n\n inputs = [\n MessageTextInput(\n name=\"model\",\n display_name=\"Ollama Model\",\n value=\"nomic-embed-text\",\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Ollama Base URL\",\n value=\"http://localhost:11434\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Embeddings\", name=\"embeddings\", method=\"build_embeddings\"),\n ]\n\n def build_embeddings(self) -> Embeddings:\n try:\n output = OllamaEmbeddings(model=self.model, base_url=self.base_url)\n except Exception as e:\n msg = \"Could not connect to Ollama API.\"\n raise ValueError(msg) from e\n return output\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "model": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "model", | |
| "value": "nomic-embed-text", | |
| "display_name": "Ollama Model", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MessageTextInput" | |
| } | |
| }, | |
| "description": "Generate embeddings using Ollama models.", | |
| "icon": "Ollama", | |
| "base_classes": [ | |
| "Embeddings" | |
| ], | |
| "display_name": "Ollama Embeddings", | |
| "documentation": "https://python.langchain.com/docs/integrations/text_embedding/ollama", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Embeddings" | |
| ], | |
| "selected": "Embeddings", | |
| "name": "embeddings", | |
| "display_name": "Embeddings", | |
| "method": "build_embeddings", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "model", | |
| "base_url" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "OllamaEmbeddings", | |
| "id": "OllamaEmbeddings-bI0Sn" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 319, | |
| "dragging": false, | |
| "positionAbsolute": { | |
| "x": -2920.0574290747554, | |
| "y": 1878.3802014564817 | |
| } | |
| }, | |
| { | |
| "id": "Chroma-DxtBN", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -516.0152291152134, | |
| "y": 718.6153049589478 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "embedding": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "embedding", | |
| "value": "", | |
| "display_name": "Embedding", | |
| "advanced": false, | |
| "input_types": [ | |
| "Embeddings" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "HandleInput" | |
| }, | |
| "ingest_data": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "list": true, | |
| "trace_as_input": true, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "ingest_data", | |
| "value": "", | |
| "display_name": "Ingest Data", | |
| "advanced": false, | |
| "input_types": [ | |
| "Data" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "DataInput" | |
| }, | |
| "allow_duplicates": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "allow_duplicates", | |
| "value": false, | |
| "display_name": "Allow Duplicates", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "If false, will not add documents that are already in the Vector Store.", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "chroma_server_cors_allow_origins": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_cors_allow_origins", | |
| "value": "", | |
| "display_name": "Server CORS Allow Origins", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "chroma_server_grpc_port": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_grpc_port", | |
| "value": "", | |
| "display_name": "Server gRPC Port", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "chroma_server_host": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_host", | |
| "value": "", | |
| "display_name": "Server Host", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "chroma_server_http_port": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_http_port", | |
| "value": "", | |
| "display_name": "Server HTTP Port", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "chroma_server_ssl_enabled": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "chroma_server_ssl_enabled", | |
| "value": false, | |
| "display_name": "Server SSL Enabled", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "bool", | |
| "_input_type": "BoolInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from copy import deepcopy\n\nfrom chromadb.config import Settings\nfrom langchain_chroma import Chroma\nfrom loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.base.vectorstores.utils import chroma_collection_to_data\nfrom langflow.io import BoolInput, DataInput, DropdownInput, HandleInput, IntInput, MultilineInput, StrInput\nfrom langflow.schema import Data\n\n\nclass ChromaVectorStoreComponent(LCVectorStoreComponent):\n \"\"\"Chroma Vector Store with search capabilities.\"\"\"\n\n display_name: str = \"Chroma DB\"\n description: str = \"Chroma Vector Store with search capabilities\"\n documentation = \"https://python.langchain.com/docs/integrations/vectorstores/chroma\"\n name = \"Chroma\"\n icon = \"Chroma\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n value=\"langflow\",\n ),\n StrInput(\n name=\"persist_directory\",\n display_name=\"Persist Directory\",\n ),\n MultilineInput(\n name=\"search_query\",\n display_name=\"Search Query\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n HandleInput(name=\"embedding\", display_name=\"Embedding\", input_types=[\"Embeddings\"]),\n StrInput(\n name=\"chroma_server_cors_allow_origins\",\n display_name=\"Server CORS Allow Origins\",\n advanced=True,\n ),\n StrInput(\n name=\"chroma_server_host\",\n display_name=\"Server Host\",\n advanced=True,\n ),\n IntInput(\n name=\"chroma_server_http_port\",\n display_name=\"Server HTTP Port\",\n advanced=True,\n ),\n IntInput(\n name=\"chroma_server_grpc_port\",\n display_name=\"Server gRPC Port\",\n advanced=True,\n ),\n BoolInput(\n name=\"chroma_server_ssl_enabled\",\n display_name=\"Server SSL Enabled\",\n advanced=True,\n ),\n BoolInput(\n name=\"allow_duplicates\",\n display_name=\"Allow Duplicates\",\n advanced=True,\n info=\"If false, will not add documents that are already in the Vector Store.\",\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n options=[\"Similarity\", \"MMR\"],\n value=\"Similarity\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=10,\n ),\n IntInput(\n name=\"limit\",\n display_name=\"Limit\",\n advanced=True,\n info=\"Limit the number of records to compare when Allow Duplicates is False.\",\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self) -> Chroma:\n \"\"\"Builds the Chroma object.\"\"\"\n try:\n from chromadb import Client\n from langchain_chroma import Chroma\n except ImportError as e:\n msg = \"Could not import Chroma integration package. Please install it with `pip install langchain-chroma`.\"\n raise ImportError(msg) from e\n # Chroma settings\n chroma_settings = None\n client = None\n if self.chroma_server_host:\n chroma_settings = Settings(\n chroma_server_cors_allow_origins=self.chroma_server_cors_allow_origins or [],\n chroma_server_host=self.chroma_server_host,\n chroma_server_http_port=self.chroma_server_http_port or None,\n chroma_server_grpc_port=self.chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=self.chroma_server_ssl_enabled,\n )\n client = Client(settings=chroma_settings)\n\n # Check persist_directory and expand it if it is a relative path\n persist_directory = self.resolve_path(self.persist_directory) if self.persist_directory is not None else None\n\n chroma = Chroma(\n persist_directory=persist_directory,\n client=client,\n embedding_function=self.embedding,\n collection_name=self.collection_name,\n )\n\n self._add_documents_to_vector_store(chroma)\n self.status = chroma_collection_to_data(chroma.get(limit=self.limit))\n return chroma\n\n def _add_documents_to_vector_store(self, vector_store: \"Chroma\") -> None:\n \"\"\"Adds documents to the Vector Store.\"\"\"\n if not self.ingest_data:\n self.status = \"\"\n return\n\n _stored_documents_without_id = []\n if self.allow_duplicates:\n stored_data = []\n else:\n stored_data = chroma_collection_to_data(vector_store.get(limit=self.limit))\n for value in deepcopy(stored_data):\n del value.id\n _stored_documents_without_id.append(value)\n\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n if _input not in _stored_documents_without_id:\n documents.append(_input.to_lc_document())\n else:\n msg = \"Vector Store Inputs must be Data objects.\"\n raise TypeError(msg)\n\n if documents and self.embedding is not None:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n vector_store.add_documents(documents)\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "collection_name": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "collection_name", | |
| "value": "langflow", | |
| "display_name": "Collection Name", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "limit": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "limit", | |
| "value": "", | |
| "display_name": "Limit", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Limit the number of records to compare when Allow Duplicates is False.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "number_of_results": { | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "number_of_results", | |
| "value": 10, | |
| "display_name": "Number of Results", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "Number of results to return.", | |
| "title_case": false, | |
| "type": "int", | |
| "_input_type": "IntInput" | |
| }, | |
| "persist_directory": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "persist_directory", | |
| "value": "F:\\rag\\ragflow ai apps\\chroma for rag", | |
| "display_name": "Persist Directory", | |
| "advanced": false, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "search_query": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "multiline": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "search_query", | |
| "value": "", | |
| "display_name": "Search Query", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MultilineInput" | |
| }, | |
| "search_type": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "options": [ | |
| "Similarity", | |
| "MMR" | |
| ], | |
| "combobox": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "search_type", | |
| "value": "Similarity", | |
| "display_name": "Search Type", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "DropdownInput" | |
| } | |
| }, | |
| "description": "Chroma Vector Store with search capabilities", | |
| "icon": "Chroma", | |
| "base_classes": [ | |
| "Data", | |
| "Retriever" | |
| ], | |
| "display_name": "Chroma DB", | |
| "documentation": "https://python.langchain.com/docs/integrations/vectorstores/chroma", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Retriever" | |
| ], | |
| "selected": "Retriever", | |
| "name": "base_retriever", | |
| "display_name": "Retriever", | |
| "method": "build_base_retriever", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| }, | |
| { | |
| "types": [ | |
| "Data" | |
| ], | |
| "selected": "Data", | |
| "name": "search_results", | |
| "display_name": "Search Results", | |
| "method": "search_documents", | |
| "value": "__UNDEFINED__", | |
| "cache": true, | |
| "required_inputs": [] | |
| } | |
| ], | |
| "field_order": [ | |
| "collection_name", | |
| "persist_directory", | |
| "search_query", | |
| "ingest_data", | |
| "embedding", | |
| "chroma_server_cors_allow_origins", | |
| "chroma_server_host", | |
| "chroma_server_http_port", | |
| "chroma_server_grpc_port", | |
| "chroma_server_ssl_enabled", | |
| "allow_duplicates", | |
| "search_type", | |
| "number_of_results", | |
| "limit" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "Chroma", | |
| "id": "Chroma-DxtBN" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 549, | |
| "positionAbsolute": { | |
| "x": -516.0152291152134, | |
| "y": 718.6153049589478 | |
| }, | |
| "dragging": false | |
| }, | |
| { | |
| "id": "ParseData-MTs3I", | |
| "type": "genericNode", | |
| "position": { | |
| "x": -95.25727991837161, | |
| "y": 1199.8338263410526 | |
| }, | |
| "data": { | |
| "node": { | |
| "template": { | |
| "_type": "Component", | |
| "data": { | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "list": false, | |
| "trace_as_input": true, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "data", | |
| "value": "", | |
| "display_name": "Data", | |
| "advanced": false, | |
| "input_types": [ | |
| "Data" | |
| ], | |
| "dynamic": false, | |
| "info": "The data to convert to text.", | |
| "title_case": false, | |
| "type": "other", | |
| "_input_type": "DataInput" | |
| }, | |
| "code": { | |
| "type": "code", | |
| "required": true, | |
| "placeholder": "", | |
| "list": false, | |
| "show": true, | |
| "multiline": true, | |
| "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "password": false, | |
| "name": "code", | |
| "advanced": true, | |
| "dynamic": true, | |
| "info": "", | |
| "load_from_db": false, | |
| "title_case": false | |
| }, | |
| "sep": { | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "sep", | |
| "value": "\n", | |
| "display_name": "Separator", | |
| "advanced": true, | |
| "dynamic": false, | |
| "info": "", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "StrInput" | |
| }, | |
| "template": { | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "multiline": true, | |
| "trace_as_metadata": true, | |
| "load_from_db": false, | |
| "list": false, | |
| "required": false, | |
| "placeholder": "", | |
| "show": true, | |
| "name": "template", | |
| "value": "{text}", | |
| "display_name": "Template", | |
| "advanced": false, | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "dynamic": false, | |
| "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", | |
| "title_case": false, | |
| "type": "str", | |
| "_input_type": "MultilineInput" | |
| } | |
| }, | |
| "description": "Convert Data into plain text following a specified template.", | |
| "icon": "braces", | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "display_name": "Parse Data", | |
| "documentation": "", | |
| "custom_fields": {}, | |
| "output_types": [], | |
| "pinned": false, | |
| "conditional_paths": [], | |
| "frozen": false, | |
| "outputs": [ | |
| { | |
| "types": [ | |
| "Message" | |
| ], | |
| "selected": "Message", | |
| "name": "text", | |
| "display_name": "Text", | |
| "method": "parse_data", | |
| "value": "__UNDEFINED__", | |
| "cache": true | |
| } | |
| ], | |
| "field_order": [ | |
| "data", | |
| "template", | |
| "sep" | |
| ], | |
| "beta": false, | |
| "legacy": false, | |
| "edited": false, | |
| "metadata": {}, | |
| "tool_mode": false, | |
| "category": "processing", | |
| "key": "ParseData", | |
| "score": 0.007568328950209746, | |
| "lf_version": "1.1.1" | |
| }, | |
| "type": "ParseData", | |
| "id": "ParseData-MTs3I" | |
| }, | |
| "selected": false, | |
| "width": 320, | |
| "height": 301 | |
| } | |
| ], | |
| "edges": [ | |
| { | |
| "source": "OllamaModel-s3Gtg", | |
| "sourceHandle": "{œdataTypeœ:œOllamaModelœ,œidœ:œOllamaModel-s3Gtgœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "ChatOutput-QK9Qv", | |
| "targetHandle": "{œfieldNameœ:œsender_nameœ,œidœ:œChatOutput-QK9Qvœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "sender_name", | |
| "id": "ChatOutput-QK9Qv", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "OllamaModel", | |
| "id": "OllamaModel-s3Gtg", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-OllamaModel-s3Gtg{œdataTypeœ:œOllamaModelœ,œidœ:œOllamaModel-s3Gtgœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-QK9Qv{œfieldNameœ:œsender_nameœ,œidœ:œChatOutput-QK9Qvœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "ChatInput-VybAX", | |
| "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-VybAXœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-K9oDg", | |
| "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-K9oDgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "question", | |
| "id": "Prompt-K9oDg", | |
| "inputTypes": [ | |
| "Message", | |
| "Text" | |
| ], | |
| "type": "str" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "ChatInput", | |
| "id": "ChatInput-VybAX", | |
| "name": "message", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-ChatInput-VybAX{œdataTypeœ:œChatInputœ,œidœ:œChatInput-VybAXœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-K9oDg{œfieldNameœ:œquestionœ,œidœ:œPrompt-K9oDgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "Prompt-K9oDg", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-K9oDgœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "OllamaModel-s3Gtg", | |
| "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOllamaModel-s3Gtgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "input_value", | |
| "id": "OllamaModel-s3Gtg", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-K9oDg", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-K9oDg{œdataTypeœ:œPromptœ,œidœ:œPrompt-K9oDgœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OllamaModel-s3Gtg{œfieldNameœ:œinput_valueœ,œidœ:œOllamaModel-s3Gtgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "File-8FCE5", | |
| "sourceHandle": "{œdataTypeœ:œFileœ,œidœ:œFile-8FCE5œ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}", | |
| "target": "SplitText-su1Lz", | |
| "targetHandle": "{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-su1Lzœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "data_inputs", | |
| "id": "SplitText-su1Lz", | |
| "inputTypes": [ | |
| "Data" | |
| ], | |
| "type": "other" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "File", | |
| "id": "File-8FCE5", | |
| "name": "data", | |
| "output_types": [ | |
| "Data" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-File-8FCE5{œdataTypeœ:œFileœ,œidœ:œFile-8FCE5œ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-su1Lz{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-su1Lzœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "animated": false, | |
| "className": "", | |
| "selected": false | |
| }, | |
| { | |
| "source": "SplitText-su1Lz", | |
| "sourceHandle": "{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-su1Lzœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}", | |
| "target": "Chroma-u9YZ4", | |
| "targetHandle": "{œfieldNameœ:œingest_dataœ,œidœ:œChroma-u9YZ4œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "ingest_data", | |
| "id": "Chroma-u9YZ4", | |
| "inputTypes": [ | |
| "Data" | |
| ], | |
| "type": "other" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "SplitText", | |
| "id": "SplitText-su1Lz", | |
| "name": "chunks", | |
| "output_types": [ | |
| "Data" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-SplitText-su1Lz{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-su1Lzœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-Chroma-u9YZ4{œfieldNameœ:œingest_dataœ,œidœ:œChroma-u9YZ4œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "animated": false, | |
| "className": "", | |
| "selected": false | |
| }, | |
| { | |
| "source": "OllamaEmbeddings-bI0Sn", | |
| "sourceHandle": "{œdataTypeœ:œOllamaEmbeddingsœ,œidœ:œOllamaEmbeddings-bI0Snœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}", | |
| "target": "Chroma-u9YZ4", | |
| "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œChroma-u9YZ4œ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "embedding", | |
| "id": "Chroma-u9YZ4", | |
| "inputTypes": [ | |
| "Embeddings" | |
| ], | |
| "type": "other" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "OllamaEmbeddings", | |
| "id": "OllamaEmbeddings-bI0Sn", | |
| "name": "embeddings", | |
| "output_types": [ | |
| "Embeddings" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-OllamaEmbeddings-bI0Sn{œdataTypeœ:œOllamaEmbeddingsœ,œidœ:œOllamaEmbeddings-bI0Snœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-Chroma-u9YZ4{œfieldNameœ:œembeddingœ,œidœ:œChroma-u9YZ4œ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", | |
| "animated": false, | |
| "className": "", | |
| "selected": false | |
| }, | |
| { | |
| "source": "ChatInput-VybAX", | |
| "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-VybAXœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Chroma-DxtBN", | |
| "targetHandle": "{œfieldNameœ:œsearch_queryœ,œidœ:œChroma-DxtBNœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "search_query", | |
| "id": "Chroma-DxtBN", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "ChatInput", | |
| "id": "ChatInput-VybAX", | |
| "name": "message", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-ChatInput-VybAX{œdataTypeœ:œChatInputœ,œidœ:œChatInput-VybAXœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Chroma-DxtBN{œfieldNameœ:œsearch_queryœ,œidœ:œChroma-DxtBNœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "Chroma-DxtBN", | |
| "sourceHandle": "{œdataTypeœ:œChromaœ,œidœ:œChroma-DxtBNœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}", | |
| "target": "ParseData-MTs3I", | |
| "targetHandle": "{œfieldNameœ:œdataœ,œidœ:œParseData-MTs3Iœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "data", | |
| "id": "ParseData-MTs3I", | |
| "inputTypes": [ | |
| "Data" | |
| ], | |
| "type": "other" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "Chroma", | |
| "id": "Chroma-DxtBN", | |
| "name": "search_results", | |
| "output_types": [ | |
| "Data" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-Chroma-DxtBN{œdataTypeœ:œChromaœ,œidœ:œChroma-DxtBNœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-MTs3I{œfieldNameœ:œdataœ,œidœ:œParseData-MTs3Iœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "ParseData-MTs3I", | |
| "sourceHandle": "{œdataTypeœ:œParseDataœ,œidœ:œParseData-MTs3Iœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-K9oDg", | |
| "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-K9oDgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "context", | |
| "id": "Prompt-K9oDg", | |
| "inputTypes": [ | |
| "Message", | |
| "Text" | |
| ], | |
| "type": "str" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "ParseData", | |
| "id": "ParseData-MTs3I", | |
| "name": "text", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-ParseData-MTs3I{œdataTypeœ:œParseDataœ,œidœ:œParseData-MTs3Iœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-K9oDg{œfieldNameœ:œcontextœ,œidœ:œPrompt-K9oDgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
| "animated": false, | |
| "className": "" | |
| }, | |
| { | |
| "source": "OllamaEmbeddings-bI0Sn", | |
| "sourceHandle": "{œdataTypeœ:œOllamaEmbeddingsœ,œidœ:œOllamaEmbeddings-bI0Snœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}", | |
| "target": "Chroma-DxtBN", | |
| "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œChroma-DxtBNœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", | |
| "data": { | |
| "targetHandle": { | |
| "fieldName": "embedding", | |
| "id": "Chroma-DxtBN", | |
| "inputTypes": [ | |
| "Embeddings" | |
| ], | |
| "type": "other" | |
| }, | |
| "sourceHandle": { | |
| "dataType": "OllamaEmbeddings", | |
| "id": "OllamaEmbeddings-bI0Sn", | |
| "name": "embeddings", | |
| "output_types": [ | |
| "Embeddings" | |
| ] | |
| } | |
| }, | |
| "id": "reactflow__edge-OllamaEmbeddings-bI0Sn{œdataTypeœ:œOllamaEmbeddingsœ,œidœ:œOllamaEmbeddings-bI0Snœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-Chroma-DxtBN{œfieldNameœ:œembeddingœ,œidœ:œChroma-DxtBNœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", | |
| "animated": false, | |
| "className": "" | |
| } | |
| ], | |
| "viewport": { | |
| "x": 705.618645865864, | |
| "y": -186.03309627876752, | |
| "zoom": 0.6909565958870666 | |
| } | |
| }, | |
| "description": "rag", | |
| "name": "chatbot", | |
| "last_tested_version": "1.1.1", | |
| "endpoint_name": null, | |
| "is_component": false | |
| } |