diff --git a/src/backend/base/langflow/components/tessai/__init__.py b/src/backend/base/langflow/components/tessai/__init__.py
new file mode 100644
index 000000000000..33d1570d2506
--- /dev/null
+++ b/src/backend/base/langflow/components/tessai/__init__.py
@@ -0,0 +1,5 @@
+from .associate_file_to_agent import TessAIAssociateFileToAgentComponent
+from .execute_agent import TessAIExecuteAgentComponent
+from .upload_file import TessAIUploadFileComponent
+
+__all__ = ["TessAIAssociateFileToAgentComponent", "TessAIExecuteAgentComponent", "TessAIUploadFileComponent"]
diff --git a/src/backend/base/langflow/components/tessai/associate_file_to_agent.py b/src/backend/base/langflow/components/tessai/associate_file_to_agent.py
new file mode 100644
index 000000000000..9bb562e53529
--- /dev/null
+++ b/src/backend/base/langflow/components/tessai/associate_file_to_agent.py
@@ -0,0 +1,58 @@
+import requests
+
+from langflow.custom import Component
+from langflow.inputs import DataInput, SecretStrInput, StrInput
+from langflow.io import Output
+from langflow.schema import Data
+
+
+class TessAIAssociateFileToAgentComponent(Component):
+ display_name = "Associate File to Agent"
+ description = "Associates a file with an agent in the TessAI platform."
+ documentation = "https://docs.tess.pareto.io/"
+ icon = "TessAI"
+
+ inputs = [
+ SecretStrInput(
+ name="api_key",
+ display_name="Tess AI API Key",
+ info="The API key to use for TessAI.",
+ advanced=False,
+ input_types=[]
+ ),
+ StrInput(
+ name="agent_id",
+ display_name="User-Owned Agent ID",
+ info="The ID of an agent you created in the Tess AI platform.",
+ required=True,
+ ),
+ DataInput(
+ name="files",
+ display_name="File(s)",
+ info="The file(s) to associate with the agent.",
+ required=True,
+ is_list=True
+ ),
+ ]
+
+ outputs = [Output(display_name="Association Result", name="association_result", method="associate_file_to_agent")]
+
+ BASE_URL = "https://tess.pareto.io/api"
+
+ def associate_file_to_agent(self) -> Data:
+ headers = self._get_headers()
+ endpoint = f"{self.BASE_URL}/agents/{self.agent_id}/files?waitExecution=True"
+
+ try:
+ payload = {"file_ids": [int(file.data["id"]) for file in self.files]}
+
+ response = requests.post(endpoint, headers=headers, json=payload)
+ response.raise_for_status()
+ result = response.json()
+
+ return Data(data=result)
+ except requests.RequestException as e:
+ raise RuntimeError(f"Error associating file to agent: {e!s}") from e
+
+ def _get_headers(self) -> dict:
+ return {"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"}
\ No newline at end of file
diff --git a/src/backend/base/langflow/components/tessai/execute_agent.py b/src/backend/base/langflow/components/tessai/execute_agent.py
new file mode 100644
index 000000000000..622eb98b69dc
--- /dev/null
+++ b/src/backend/base/langflow/components/tessai/execute_agent.py
@@ -0,0 +1,189 @@
+import json
+from copy import deepcopy
+
+import requests
+
+from langflow.custom import Component
+from langflow.inputs import BoolInput, DropdownInput, IntInput, MultilineInput, MultiselectInput, SecretStrInput, StrInput
+from langflow.io import Output
+from langflow.schema.message import Message
+
+class TessAIExecuteAgentComponent(Component):
+ display_name = "Execute Agent"
+ description = "Executes a TessAI agent."
+ documentation = "https://docs.tess.pareto.io/"
+ icon = "TessAI"
+
+ inputs = [
+ SecretStrInput(
+ name="api_key",
+ display_name="Tess AI API Key",
+ info="The API key to use for TessAI.",
+ advanced=False,
+ input_types=[]
+ ),
+ StrInput(
+ name="agent_id",
+ display_name="Agent ID",
+ required=True,
+ info="The ID of the agent to execute.",
+ real_time_refresh=True,
+ ),
+ ]
+
+ outputs = [Output(display_name="Output", name="output", method="execute_agent")]
+
+ BASE_URL = "https://tess.pareto.io/api"
+ FIELD_SUFFIX = "_tess_ai_dynamic_field"
+ CHAT_MESSAGE_INPUT_SUFFIX = "_tess_ai_chat_message_input"
+
+ def execute_agent(self) -> Message:
+ headers = self._get_headers()
+ execute_endpoint = f"{self.BASE_URL}/agents/{self.agent_id.strip()}/execute?waitExecution=true"
+ attributes = self._collect_dynamic_attributes()
+
+ try:
+ response = requests.post(execute_endpoint, headers=headers, json=attributes)
+ response.raise_for_status()
+ execution_data = response.json()
+
+ if execution_data["responses"][0]["status"] not in ["succeeded", "failed", "error"]:
+ raise ValueError(json.dumps(execution_data))
+
+ response_id = execution_data["responses"][0]["id"]
+ response = self._get_agent_response(headers, response_id)
+ return Message(text=response.get("output", ""))
+ except requests.RequestException as e:
+ error_json = e.response.json() if e.response is not None else {"error": str(e)}
+ raise RuntimeError(json.dumps(error_json)) from e
+
+ def update_build_config(self, build_config: dict, field_value: str, field_name: str|None = None) -> dict:
+ if field_name == "agent_id" and field_value and build_config.get("api_key", {}).get("value"):
+ try:
+ agent = self._get_agent(field_value)
+ old_build_config = deepcopy(dict(build_config))
+
+ for key in list(build_config.keys()):
+ if key.endswith(self.FIELD_SUFFIX):
+ del build_config[key]
+
+ questions = agent.get("questions", [])
+ for question in questions:
+ name = question.get("name", "")
+
+ if name == "messages" and agent.get("type") == "chat":
+ name += self.CHAT_MESSAGE_INPUT_SUFFIX
+
+ key = name + self.FIELD_SUFFIX
+ old_config = old_build_config.get(key, {})
+
+ field = self._create_field(key, question, old_config.get("value"))
+ config = field.model_dump(by_alias=True, exclude_none=True)
+
+ self.inputs.append(field)
+ build_config[key] = config
+
+ except requests.RequestException:
+ for key in list(build_config.keys()):
+ if key.endswith(self.FIELD_SUFFIX):
+ del build_config[key]
+
+ self.map_inputs(self.inputs)
+ self.build_inputs()
+
+ return build_config
+
+ def _get_headers(self) -> dict:
+ return {"Authorization": f"Bearer {self.api_key}", "accept": "*/*", "Content-Type": "application/json"}
+
+ def _get_agent(self, agent_id):
+ endpoint = f"{self.BASE_URL}/agents/{agent_id}"
+ response = requests.get(endpoint, headers=self._get_headers())
+
+ if response.status_code not in [200, 404]:
+ raise Exception(json.dumps(response.json()))
+
+ return response.json()
+
+ def _get_agent_response(self, headers: dict, response_id: str) -> str:
+ endpoint = f"{self.BASE_URL}/agent-responses/{response_id}"
+ try:
+ response = requests.get(endpoint, headers=headers)
+ response.raise_for_status()
+ return response.json()
+ except requests.RequestException as e:
+ error_json = e.response.json() if e.response is not None else {"error": str(e)}
+ raise RuntimeError(json.dumps(error_json)) from e
+
+ def _create_field(self, key: str, question: dict, value: str|None = None) -> dict:
+ field_type = question.get("type", "text")
+
+ args = {
+ "name": key,
+ "display_name": question["name"],
+ "required": question.get("required", False),
+ "info": question.get("description", ""),
+ "placeholder": question.get("placeholder", ""),
+ }
+
+ if value:
+ args["value"] = value
+ elif question.get("default"):
+ args["value"] = question.get("default")
+
+ if field_type == "textarea":
+ input_class = MultilineInput
+ elif field_type == "select":
+ options = question.get("options", [])
+ if all(isinstance(option, bool) for option in options):
+ input_class = BoolInput
+ if value:
+ args["value"] = value
+ elif args["required"]:
+ args["value"] = args.get("default", options[0])
+ else:
+ input_class = DropdownInput
+ args["options"] = [str(option) for option in options]
+ if value and value in args["options"]:
+ args["value"] = value
+ elif args["required"]:
+ args["value"] = args.get("default", args["options"][0])
+ elif field_type == "number":
+ input_class = IntInput
+ args["input_types"] = ["Message"]
+ elif field_type == "multiselect":
+ input_class = MultiselectInput
+ args["options"] = question.get("description", "").split(",")
+ if value and isinstance(value, list):
+ args["value"] = [val for val in value if val in args["options"]]
+ else:
+ args["value"] = []
+ else:
+ input_class = StrInput
+ if field_type == "file":
+ args["display_name"] += " (direct URL)"
+ args["input_types"] = ["Message"]
+
+ return input_class(**args)
+
+ def _collect_dynamic_attributes(self) -> dict:
+ attributes = {}
+ suffix = self.FIELD_SUFFIX
+ suffix_length = len(suffix)
+
+ for key in self._attributes:
+ if key.endswith(suffix):
+ value = self._attributes[key]
+ name = key[:-suffix_length]
+
+ if isinstance(value, Message):
+ value = value.text
+
+ if name.endswith(self.CHAT_MESSAGE_INPUT_SUFFIX):
+ name = name[:-len(self.CHAT_MESSAGE_INPUT_SUFFIX)]
+ attributes[name] = [{"role": "user", "content": value}]
+ elif isinstance(value, list):
+ attributes[name] = ",".join(str(val) for val in value)
+ elif value not in ["", None]:
+ attributes[name] = value
+ return attributes
\ No newline at end of file
diff --git a/src/backend/base/langflow/components/tessai/upload_file.py b/src/backend/base/langflow/components/tessai/upload_file.py
new file mode 100644
index 000000000000..454b5da3ed1a
--- /dev/null
+++ b/src/backend/base/langflow/components/tessai/upload_file.py
@@ -0,0 +1,111 @@
+import time
+from pathlib import Path
+
+import requests
+
+from langflow.custom import Component
+from langflow.inputs import BoolInput, FileInput, SecretStrInput
+from langflow.io import Output
+from langflow.schema import Data
+
+
+class TessAIUploadFileComponent(Component):
+ display_name = "Upload File"
+ description = "Uploads a file to TessAI platform."
+ documentation = "https://docs.tess.pareto.io/"
+ icon = "TessAI"
+
+ inputs = [
+ SecretStrInput(
+ name="api_key",
+ display_name="Tess AI API Key",
+ info="The API key to use for TessAI.",
+ advanced=False,
+ input_types=[]
+ ),
+ FileInput(
+ name="file",
+ display_name="File",
+ info="The file to upload.",
+ required=True,
+ file_types=[
+ "pdf",
+ "docx",
+ "txt",
+ "csv",
+ "xlsx",
+ "xls",
+ "ppt",
+ "pptx",
+ "png",
+ "jpg",
+ "jpeg",
+ "gif",
+ "bmp",
+ "tiff",
+ "ico",
+ "webp",
+ "mp3",
+ "mp4",
+ "wav",
+ "webm",
+ "m4a",
+ "m4v",
+ "mov",
+ "avi",
+ "mkv",
+ "webm"
+ ],
+ ),
+ BoolInput(
+ name="process",
+ display_name="Process File",
+ info="Whether to process the file after upload.",
+ ),
+ ]
+
+ outputs = [Output(display_name="File Data", name="file_data", method="upload_file")]
+
+ BASE_URL = "https://tess.pareto.io/api"
+
+ def upload_file(self) -> Data:
+ headers = self._get_headers()
+ upload_endpoint = f"{self.BASE_URL}/files"
+
+ try:
+ files = {"file": open(Path(self.file), "rb")}
+ data = {"process": str(self.process).lower()}
+
+ response = requests.post(upload_endpoint, headers=headers, files=files, data=data)
+ response.raise_for_status()
+ file_data = response.json()
+
+ if file_data["status"] == "waiting":
+ return self._poll_file_status(headers, file_data["id"])
+
+ return Data(data=file_data)
+ except requests.RequestException as e:
+ raise RuntimeError(f"Error uploading file: {e!s}") from e
+
+ def _get_headers(self) -> dict:
+ return {"Authorization": f"Bearer {self.api_key}"}
+
+ def _poll_file_status(self, headers: dict, file_id: int) -> dict:
+ endpoint = f"{self.BASE_URL}/api/files/{file_id}"
+ start_time = time.time()
+ timeout = 300
+
+ while time.time() - start_time < timeout:
+ try:
+ response = requests.get(endpoint, headers=headers)
+ response.raise_for_status()
+ file_data = response.json()
+
+ if file_data["status"] != "waiting":
+ return file_data
+
+ time.sleep(2)
+ except requests.RequestException as e:
+ raise RuntimeError(f"Error polling file status: {e!s}") from e
+
+ raise TimeoutError("File processing timed out after 5 minutes")
diff --git a/src/frontend/src/icons/TessAI/index.tsx b/src/frontend/src/icons/TessAI/index.tsx
new file mode 100644
index 000000000000..bce16ee2bfc7
--- /dev/null
+++ b/src/frontend/src/icons/TessAI/index.tsx
@@ -0,0 +1,9 @@
+import React, { forwardRef } from "react";
+import SvgTessAIIcon from "./tessAIIcon";
+
+export const TessAIIcon = forwardRef<
+ SVGSVGElement,
+ React.PropsWithChildren<{}>
+>((props, ref) => {
+ return ;
+});
diff --git a/src/frontend/src/icons/TessAI/tessAIIcon.jsx b/src/frontend/src/icons/TessAI/tessAIIcon.jsx
new file mode 100644
index 000000000000..d57876a3adb0
--- /dev/null
+++ b/src/frontend/src/icons/TessAI/tessAIIcon.jsx
@@ -0,0 +1,3163 @@
+const SvgTessAIICon = (props) => (
+
+);
+
+export default SvgTessAIICon;
diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts
index 05b950e5da19..27a322c369b1 100644
--- a/src/frontend/src/utils/styleUtils.ts
+++ b/src/frontend/src/utils/styleUtils.ts
@@ -308,6 +308,7 @@ import { Share2Icon } from "../icons/Share2";
import SvgSlackIcon from "../icons/Slack/SlackIcon";
import { SpiderIcon } from "../icons/Spider";
import { Streamlit } from "../icons/Streamlit";
+import { TessAIIcon } from "../icons/TessAI";
import { UpstashSvgIcon } from "../icons/Upstash";
import { VectaraIcon } from "../icons/VectaraIcon";
import { VertexAIIcon } from "../icons/VertexAI";
@@ -549,6 +550,7 @@ export const SIDEBAR_BUNDLES = [
{ display_name: "Git", name: "git", icon: "GitLoader" },
{ display_name: "Confluence", name: "confluence", icon: "Confluence" },
{ display_name: "Mem0", name: "mem0", icon: "Mem0" },
+ { display_name: "Tess AI", name: "tessai", icon: "TessAI" },
{ display_name: "Youtube", name: "youtube", icon: "YouTube" },
{ display_name: "ScrapeGraph AI", name: "scrapegraph", icon: "ScrapeGraph" },
];
@@ -745,6 +747,7 @@ export const nodeIconsLucide: iconsType = {
SearchAPI: SearchAPIIcon,
Wikipedia: WikipediaIcon,
Arize: ArizeIcon,
+ TessAI: TessAIIcon,
Apify: ApifyIcon,
//Node Icons