{"id":"072c51b6-6619-47bc-a67b-79aa4eeff519","data":{"nodes":[{"id":"PromptComponent-tz9Ce","type":"genericNode","position":{"x":121.98240848413263,"y":-119.01673116501726},"data":{"type":"PromptComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Translate the following text into {language}. Do not engage in conversations. Only translate the text.\nGive an initial response that is only the direct translation.\n\nImportant Notes:\n\nDon't be overly formal. Feel free to use casual or informal speech, similar to how a native speaker might speak informally with friends or family. Use slang or informal expressions when appropriate.\n\nTake into account any cultural idioms and translate them into idioms that make sense in {language}.\nIf the user's language matches that of {language}, don't provide a translation, just repeat the statement exactly.\nDo not provide any explanation, meaning, or have a conversation. Only translate the text, including questions, without answering them.\n\nInput Text:\n{user_input}\n\nAnswer:","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt"},"language":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"language","display_name":"language","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Translate","documentation":"","custom_fields":{"template":["language","user_input"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":false,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":true,"lf_version":"1.0.15"},"id":"PromptComponent-tz9Ce","description":"Create a prompt template with dynamic variables.","display_name":"Prompt","edited":false},"selected":false,"width":384,"height":494,"dragging":false,"positionAbsolute":{"x":121.98240848413263,"y":-119.01673116501726}},{"id":"ChatInput-gbkut","type":"genericNode","position":{"x":-343.1453900268143,"y":84.41902680957963},"data":{"type":"ChatInput","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"Take me to your leader!","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true,"hidden":false}],"field_order":["input_value","sender","sender_name","session_id","files"],"beta":false,"edited":true,"lf_version":"1.0.15"},"id":"ChatInput-gbkut","description":"Get chat inputs from the Playground.","display_name":"Chat Input","edited":false},"selected":false,"width":384,"height":294,"positionAbsolute":{"x":-343.1453900268143,"y":84.41902680957963},"dragging":false},{"id":"PromptComponent-tc7jK","type":"genericNode","position":{"x":153.9117775385124,"y":651.9634949717491},"data":{"type":"PromptComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Detect the language used in the following text Do not provide explanations or engage in conversations. Only detect the language and provide an answer. \nProvide both the language name and the full BCP-47 code, including country for the language being spoken separated be a comma.\n\nInput Text:\n{user_input}\n\nAnswer:","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt"},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Language detect","documentation":"","custom_fields":{"template":["user_input"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":false,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":true,"lf_version":"1.0.15"},"id":"PromptComponent-tc7jK","description":"Create a prompt template with dynamic variables.","display_name":"Language detect","edited":false},"selected":false,"width":384,"height":408,"positionAbsolute":{"x":153.9117775385124,"y":651.9634949717491},"dragging":false},{"id":"ChatOutput-Ny5Dc","type":"genericNode","position":{"x":1102.0726819191532,"y":773.9993079632533},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"Detected language","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Detected Language","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","sender","sender_name","session_id","data_template"],"beta":false,"edited":true,"lf_version":"1.0.15"},"id":"ChatOutput-Ny5Dc","description":"Display a chat message in the Playground.","display_name":"Detected Language","edited":false},"selected":false,"width":384,"height":294,"dragging":false,"positionAbsolute":{"x":1102.0726819191532,"y":773.9993079632533}},{"id":"PromptComponent-UIp0x","type":"genericNode","position":{"x":159.93496326476838,"y":1341.365518127703},"data":{"type":"PromptComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":"Your task is to analyze the provided user input and identify the primary tone and sentiment expressed by the user. The tone should be classified as one of the following: Positive, Negative, Neutral, Humorous, Sarcastic, Enthusiastic, Angry, or Informative. The sentiment should be classified as Positive, Negative, or Neutral. Provide a single emoticon that you think sums up their feelings.\n\nUse the following styles to construct your response:\nNeutral Positive 😊\nAngry Negative 😠\n\nInput Text:\n{user_input}\n\nAnswer:","name":"template","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt"},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Sentiment detect","documentation":"","custom_fields":{"template":["user_input"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":false,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":true,"lf_version":"1.0.15"},"id":"PromptComponent-UIp0x","description":"Create a prompt template with dynamic variables.","display_name":"Sentiment detect","edited":false},"selected":false,"width":384,"height":408,"positionAbsolute":{"x":159.93496326476838,"y":1341.365518127703},"dragging":false},{"id":"ChatOutput-CptyP","type":"genericNode","position":{"x":1100.773970357587,"y":1465.9250393537704},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Sentiment","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","sender","sender_name","session_id","data_template"],"beta":false,"edited":true,"lf_version":"1.0.15"},"id":"ChatOutput-CptyP","description":"Display a chat message in the Playground.","display_name":"Detected Sentiment","edited":false},"selected":false,"width":384,"height":294,"positionAbsolute":{"x":1100.773970357587,"y":1465.9250393537704},"dragging":false},{"id":"TextInput-UFUC6","type":"genericNode","position":{"x":-344.99790592835063,"y":-259.8232564291708},"data":{"type":"TextInput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"French","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as input.","title_case":false,"type":"str"}},"description":"Get text inputs from the Playground.","icon":"type","base_classes":["Message"],"display_name":"Language","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"TextInput-UFUC6","description":"Get text inputs from the Playground.","display_name":"Language"},"selected":false,"width":384,"height":294,"dragging":false,"positionAbsolute":{"x":-344.99790592835063,"y":-259.8232564291708}},{"id":"ChatOutput-dyUSv","type":"genericNode","position":{"x":1090.0257686539758,"y":75.66838199833694},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"Translation","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Translation","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"ChatOutput-dyUSv","description":"Display a chat message in the Playground.","display_name":"Chat Output"},"selected":false,"width":384,"height":294,"dragging":false,"positionAbsolute":{"x":1090.0257686539758,"y":75.66838199833694}},{"id":"OpenAIModel-TadRC","type":"genericNode","position":{"x":621.876814910567,"y":1137.5582026404147},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"api_key","display_name":"OpenAI API Key","advanced":false,"input_types":[],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"json_mode","display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"value":{},"name":"model_kwargs","display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"gpt-4o-mini","name":"model_name","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"openai_api_base","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"value":{},"name":"output_schema","display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":1,"name":"seed","display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":false,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"OpenAIModel-TadRC"},"selected":false,"width":384,"height":624,"positionAbsolute":{"x":621.876814910567,"y":1137.5582026404147},"dragging":false},{"id":"GroqModel-te9cD","type":"genericNode","position":{"x":620.3661612811397,"y":-271.8263401728457},"data":{"type":"GroqModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import List\nfrom langchain_groq import ChatGroq\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass GroqModel(LCModelComponent):\n display_name: str = \"Groq\"\n description: str = \"Generate text using Groq.\"\n icon = \"Groq\"\n name = \"GroqModel\"\n\n inputs = LCModelComponent._base_inputs + [\n SecretStrInput(\n name=\"groq_api_key\",\n display_name=\"Groq API Key\",\n info=\"API key for the Groq API.\",\n ),\n MessageTextInput(\n name=\"groq_api_base\",\n display_name=\"Groq API Base\",\n info=\"Base URL path for API requests, leave blank if not using a proxy or service emulator.\",\n advanced=True,\n value=\"https://api.groq.com\",\n ),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n advanced=True,\n ),\n FloatInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n value=0.1,\n ),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[],\n refresh_button=True,\n ),\n ]\n\n def get_models(self) -> List[str]:\n api_key = self.groq_api_key\n base_url = self.groq_api_base or \"https://api.groq.com\"\n url = f\"{base_url}/openai/v1/models\"\n\n headers = {\"Authorization\": f\"Bearer {api_key}\", \"Content-Type\": \"application/json\"}\n\n try:\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n model_list = response.json()\n return [model[\"id\"] for model in model_list.get(\"data\", [])]\n except requests.RequestException as e:\n self.status = f\"Error fetching models: {str(e)}\"\n return []\n\n def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):\n if field_name == \"groq_api_key\" or field_name == \"groq_api_base\" or field_name == \"model_name\":\n models = self.get_models()\n build_config[\"model_name\"][\"options\"] = models\n return build_config\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n groq_api_key = self.groq_api_key\n model_name = self.model_name\n max_tokens = self.max_tokens\n temperature = self.temperature\n groq_api_base = self.groq_api_base\n n = self.n\n stream = self.stream\n\n output = ChatGroq( # type: ignore\n model=model_name,\n max_tokens=max_tokens or None,\n temperature=temperature,\n base_url=groq_api_base,\n n=n or 1,\n api_key=SecretStr(groq_api_key),\n streaming=stream,\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"groq_api_base":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"https://api.groq.com","name":"groq_api_base","display_name":"Groq API Base","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Base URL path for API requests, leave blank if not using a proxy or service emulator.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"groq_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"groq_api_key","display_name":"Groq API Key","advanced":false,"input_types":[],"dynamic":false,"info":"API key for the Groq API.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Output Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int","_input_type":"IntInput"},"model_name":{"trace_as_metadata":true,"options":["gemma2-9b-it","gemma-7b-it","llama-3.1-70b-versatile","llama-3.1-8b-instant","llama3-70b-8192","llama3-8b-8192","llama3-groq-70b-8192-tool-use-preview","llama3-groq-8b-8192-tool-use-preview","llama-guard-3-8b","mixtral-8x7b-32768","whisper-large-v3"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"llama-3.1-8b-instant","name":"model_name","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","refresh_button":true,"title_case":false,"type":"str","_input_type":"DropdownInput"},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":false,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Groq.","icon":"Groq","base_classes":["LanguageModel","Message"],"display_name":"Groq","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["input_value","system_message","stream","groq_api_key","groq_api_base","max_tokens","temperature","n","model_name"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"GroqModel-te9cD"},"selected":false,"width":384,"height":640,"positionAbsolute":{"x":620.3661612811397,"y":-271.8263401728457},"dragging":false},{"id":"Prompt-wzH5m","type":"genericNode","position":{"x":1968.5893359861345,"y":280.302409789667},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"Original text: {user_input}\n-----\n\nTranslation: {translated_text}\n-----\n\nProvide an explanation for the translation from \"{user_input}\" to \"{translated_text}\" above using {detected_langauge}.\nEnsure your response is accurate, especially as it concerns idioms. If you don't know something, just say you don't know, do not hallucinate, lie, or make up responses.\nGive a short and concise, but informational response, no longer than a couple sentences.\n\nImportant Notes:\n\nTake into account any cultural idioms.\n\nAnswer:","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput","load_from_db":false},"user_input":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"user_input","display_name":"user_input","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"translated_text":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"translated_text","display_name":"translated_text","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"detected_langauge":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"detected_langauge","display_name":"detected_langauge","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Translation explanation","documentation":"","custom_fields":{"template":["user_input","translated_text","detected_langauge"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false,"lf_version":"1.0.15"},"id":"Prompt-wzH5m","description":"Create a prompt template with dynamic variables.","display_name":"Translation explanation"},"selected":false,"width":384,"height":580,"positionAbsolute":{"x":1968.5893359861345,"y":280.302409789667},"dragging":false},{"id":"ChatOutput-0RiZA","type":"genericNode","position":{"x":2842.279386374322,"y":537.2012713473141},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Explanation","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"ChatOutput-0RiZA"},"selected":false,"width":384,"height":294,"positionAbsolute":{"x":2842.279386374322,"y":537.2012713473141},"dragging":false},{"id":"GroqModel-MFjPY","type":"genericNode","position":{"x":2411.1424379752248,"y":278.7017714350353},"data":{"type":"GroqModel","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import List\nfrom langchain_groq import ChatGroq\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass GroqModel(LCModelComponent):\n display_name: str = \"Groq\"\n description: str = \"Generate text using Groq.\"\n icon = \"Groq\"\n name = \"GroqModel\"\n\n inputs = LCModelComponent._base_inputs + [\n SecretStrInput(\n name=\"groq_api_key\",\n display_name=\"Groq API Key\",\n info=\"API key for the Groq API.\",\n ),\n MessageTextInput(\n name=\"groq_api_base\",\n display_name=\"Groq API Base\",\n info=\"Base URL path for API requests, leave blank if not using a proxy or service emulator.\",\n advanced=True,\n value=\"https://api.groq.com\",\n ),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Output Tokens\",\n info=\"The maximum number of tokens to generate.\",\n advanced=True,\n ),\n FloatInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n value=0.1,\n ),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=[],\n refresh_button=True,\n ),\n ]\n\n def get_models(self) -> List[str]:\n api_key = self.groq_api_key\n base_url = self.groq_api_base or \"https://api.groq.com\"\n url = f\"{base_url}/openai/v1/models\"\n\n headers = {\"Authorization\": f\"Bearer {api_key}\", \"Content-Type\": \"application/json\"}\n\n try:\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n model_list = response.json()\n return [model[\"id\"] for model in model_list.get(\"data\", [])]\n except requests.RequestException as e:\n self.status = f\"Error fetching models: {str(e)}\"\n return []\n\n def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):\n if field_name == \"groq_api_key\" or field_name == \"groq_api_base\" or field_name == \"model_name\":\n models = self.get_models()\n build_config[\"model_name\"][\"options\"] = models\n return build_config\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n groq_api_key = self.groq_api_key\n model_name = self.model_name\n max_tokens = self.max_tokens\n temperature = self.temperature\n groq_api_base = self.groq_api_base\n n = self.n\n stream = self.stream\n\n output = ChatGroq( # type: ignore\n model=model_name,\n max_tokens=max_tokens or None,\n temperature=temperature,\n base_url=groq_api_base,\n n=n or 1,\n api_key=SecretStr(groq_api_key),\n streaming=stream,\n )\n\n return output # type: ignore\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"groq_api_base":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"https://api.groq.com","name":"groq_api_base","display_name":"Groq API Base","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Base URL path for API requests, leave blank if not using a proxy or service emulator.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"groq_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"value":"","name":"groq_api_key","display_name":"Groq API Key","advanced":false,"input_types":[],"dynamic":false,"info":"API key for the Groq API.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"max_tokens","display_name":"Max Output Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate.","title_case":false,"type":"int","_input_type":"IntInput"},"model_name":{"trace_as_metadata":true,"options":["gemma2-9b-it","gemma-7b-it","llama-3.1-70b-versatile","llama-3.1-8b-instant","llama3-70b-8192","llama3-8b-8192","llama3-groq-70b-8192-tool-use-preview","llama3-groq-8b-8192-tool-use-preview","mixtral-8x7b-32768","whisper-large-v3"],"combobox":false,"required":false,"placeholder":"","show":true,"value":"llama-3.1-8b-instant","name":"model_name","display_name":"Model","advanced":false,"dynamic":false,"info":"The name of the model to use.","refresh_button":true,"title_case":false,"type":"str","_input_type":"DropdownInput"},"n":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"n","display_name":"N","advanced":true,"dynamic":false,"info":"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":false,"name":"stream","display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"system_message","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":0.1,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Groq.","icon":"Groq","base_classes":["LanguageModel","Message"],"display_name":"Groq","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["input_value","system_message","stream","groq_api_key","groq_api_base","max_tokens","temperature","n","model_name"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"GroqModel-MFjPY"},"selected":false,"width":384,"height":568,"positionAbsolute":{"x":2411.1424379752248,"y":278.7017714350353},"dragging":false},{"id":"AnthropicModel-QwBVA","type":"genericNode","position":{"x":622.0781059610647,"y":482.7582473929134},"data":{"type":"AnthropicModel","node":{"template":{"_type":"Component","anthropic_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"anthropic_api_key","value":"","display_name":"Anthropic API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Your Anthropic API key.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"anthropic_api_url":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"anthropic_api_url","value":"","display_name":"Anthropic API URL","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langchain_anthropic.chat_models import ChatAnthropic\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=[\n \"claude-3-5-sonnet-20240620\",\n \"claude-3-opus-20240229\",\n \"claude-3-sonnet-20240229\",\n \"claude-3-haiku-20240307\",\n ],\n info=\"https://python.langchain.com/docs/integrations/chat/anthropic\",\n value=\"claude-3-5-sonnet-20240620\",\n ),\n SecretStrInput(\n name=\"anthropic_api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n MessageTextInput(\n name=\"anthropic_api_url\",\n display_name=\"Anthropic API URL\",\n advanced=True,\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n ),\n MessageTextInput(\n name=\"prefill\",\n display_name=\"Prefill\",\n info=\"Prefill text to guide the model's response.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n model = self.model\n anthropic_api_key = self.anthropic_api_key\n max_tokens = self.max_tokens\n temperature = self.temperature\n anthropic_api_url = self.anthropic_api_url or \"https://api.anthropic.com\"\n\n try:\n output = ChatAnthropic(\n model=model,\n anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),\n max_tokens_to_sample=max_tokens, # type: ignore\n temperature=temperature,\n anthropic_api_url=anthropic_api_url,\n streaming=self.stream,\n )\n except Exception as e:\n raise ValueError(\"Could not connect to Anthropic API.\") from e\n\n return output # type: ignore\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"\n Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\") # type: ignore\n if message:\n return message\n return None\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":4096,"display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model":{"trace_as_metadata":true,"options":["claude-3-5-sonnet-20240620","claude-3-opus-20240229","claude-3-sonnet-20240229","claude-3-haiku-20240307"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model","value":"claude-3-5-sonnet-20240620","display_name":"Model Name","advanced":false,"dynamic":false,"info":"https://python.langchain.com/docs/integrations/chat/anthropic","title_case":false,"type":"str","_input_type":"DropdownInput"},"prefill":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"prefill","value":"","display_name":"Prefill","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Prefill text to guide the model's response.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Anthropic Chat&Completion LLMs with prefill support.","icon":"Anthropic","base_classes":["LanguageModel","Message"],"display_name":"Anthropic","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["input_value","system_message","stream","max_tokens","model","anthropic_api_key","temperature","anthropic_api_url","prefill"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"AnthropicModel-QwBVA"},"selected":false,"width":384,"height":580,"positionAbsolute":{"x":622.0781059610647,"y":482.7582473929134},"dragging":false},{"id":"AnthropicModel-dKS9P","type":"genericNode","position":{"x":618.7374991861375,"y":-931.9846126209916},"data":{"type":"AnthropicModel","node":{"template":{"_type":"Component","anthropic_api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"anthropic_api_key","value":"","display_name":"Anthropic API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Your Anthropic API key.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"anthropic_api_url":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"anthropic_api_url","value":"","display_name":"Anthropic API URL","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langchain_anthropic.chat_models import ChatAnthropic\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=[\n \"claude-3-5-sonnet-20240620\",\n \"claude-3-opus-20240229\",\n \"claude-3-sonnet-20240229\",\n \"claude-3-haiku-20240307\",\n ],\n info=\"https://python.langchain.com/docs/integrations/chat/anthropic\",\n value=\"claude-3-5-sonnet-20240620\",\n ),\n SecretStrInput(\n name=\"anthropic_api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n MessageTextInput(\n name=\"anthropic_api_url\",\n display_name=\"Anthropic API URL\",\n advanced=True,\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n ),\n MessageTextInput(\n name=\"prefill\",\n display_name=\"Prefill\",\n info=\"Prefill text to guide the model's response.\",\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n model = self.model\n anthropic_api_key = self.anthropic_api_key\n max_tokens = self.max_tokens\n temperature = self.temperature\n anthropic_api_url = self.anthropic_api_url or \"https://api.anthropic.com\"\n\n try:\n output = ChatAnthropic(\n model=model,\n anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),\n max_tokens_to_sample=max_tokens, # type: ignore\n temperature=temperature,\n anthropic_api_url=anthropic_api_url,\n streaming=self.stream,\n )\n except Exception as e:\n raise ValueError(\"Could not connect to Anthropic API.\") from e\n\n return output # type: ignore\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"\n Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\") # type: ignore\n if message:\n return message\n return None\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"max_tokens":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":4096,"display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model":{"trace_as_metadata":true,"options":["claude-3-5-sonnet-20240620","claude-3-opus-20240229","claude-3-sonnet-20240229","claude-3-haiku-20240307"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model","value":"claude-3-5-sonnet-20240620","display_name":"Model Name","advanced":false,"dynamic":false,"info":"https://python.langchain.com/docs/integrations/chat/anthropic","title_case":false,"type":"str","_input_type":"DropdownInput"},"prefill":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"prefill","value":"","display_name":"Prefill","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Prefill text to guide the model's response.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generate text using Anthropic Chat&Completion LLMs with prefill support.","icon":"Anthropic","base_classes":["LanguageModel","Message"],"display_name":"Anthropic","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model","anthropic_api_key","temperature","anthropic_api_url","prefill"],"beta":false,"edited":false,"lf_version":"1.0.15"},"id":"AnthropicModel-dKS9P"},"selected":false,"width":384,"height":621,"positionAbsolute":{"x":618.7374991861375,"y":-931.9846126209916},"dragging":false}],"edges":[{"source":"ChatInput-gbkut","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"PromptComponent-tz9Ce","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-tz9Ceœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"user_input","id":"PromptComponent-tz9Ce","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-gbkut","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-gbkut{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-PromptComponent-tz9Ce{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-tz9Ceœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"TextInput-UFUC6","sourceHandle":"{œdataTypeœ:œTextInputœ,œidœ:œTextInput-UFUC6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"PromptComponent-tz9Ce","targetHandle":"{œfieldNameœ:œlanguageœ,œidœ:œPromptComponent-tz9Ceœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"language","id":"PromptComponent-tz9Ce","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"TextInput","id":"TextInput-UFUC6","name":"text","output_types":["Message"]}},"id":"reactflow__edge-TextInput-UFUC6{œdataTypeœ:œTextInputœ,œidœ:œTextInput-UFUC6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-PromptComponent-tz9Ce{œfieldNameœ:œlanguageœ,œidœ:œPromptComponent-tz9Ceœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"ChatInput-gbkut","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"PromptComponent-tc7jK","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-tc7jKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"user_input","id":"PromptComponent-tc7jK","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-gbkut","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-gbkut{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-PromptComponent-tc7jK{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-tc7jKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"ChatInput-gbkut","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"PromptComponent-UIp0x","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-UIp0xœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"user_input","id":"PromptComponent-UIp0x","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-gbkut","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-gbkut{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-PromptComponent-UIp0x{œfieldNameœ:œuser_inputœ,œidœ:œPromptComponent-UIp0xœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"PromptComponent-UIp0x","sourceHandle":"{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-UIp0xœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-TadRC","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-TadRCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-TadRC","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"PromptComponent","id":"PromptComponent-UIp0x","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-PromptComponent-UIp0x{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-UIp0xœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-TadRC{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-TadRCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"OpenAIModel-TadRC","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-TadRCœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-CptyP","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-CptyPœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-CptyP","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-TadRC","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-OpenAIModel-TadRC{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-TadRCœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-CptyP{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-CptyPœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"Prompt-wzH5m","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-wzH5mœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"GroqModel-MFjPY","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œGroqModel-MFjPYœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"GroqModel-MFjPY","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-wzH5m","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-wzH5m{œdataTypeœ:œPromptœ,œidœ:œPrompt-wzH5mœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GroqModel-MFjPY{œfieldNameœ:œinput_valueœ,œidœ:œGroqModel-MFjPYœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"ChatInput-gbkut","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-wzH5m","targetHandle":"{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"user_input","id":"Prompt-wzH5m","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-gbkut","name":"message","output_types":["Message"]}},"id":"reactflow__edge-ChatInput-gbkut{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gbkutœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-wzH5m{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"GroqModel-MFjPY","sourceHandle":"{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-MFjPYœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-0RiZA","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-0RiZAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-0RiZA","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"GroqModel","id":"GroqModel-MFjPY","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-GroqModel-MFjPY{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-MFjPYœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-0RiZA{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-0RiZAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":"","selected":false},{"source":"PromptComponent-tc7jK","sourceHandle":"{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-tc7jKœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"AnthropicModel-QwBVA","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-QwBVAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"AnthropicModel-QwBVA","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"PromptComponent","id":"PromptComponent-tc7jK","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-PromptComponent-tc7jK{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-tc7jKœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-QwBVA{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-QwBVAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"AnthropicModel-QwBVA","sourceHandle":"{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-QwBVAœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-wzH5m","targetHandle":"{œfieldNameœ:œdetected_langaugeœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"detected_langauge","id":"Prompt-wzH5m","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"AnthropicModel","id":"AnthropicModel-QwBVA","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-AnthropicModel-QwBVA{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-QwBVAœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-wzH5m{œfieldNameœ:œdetected_langaugeœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"AnthropicModel-QwBVA","sourceHandle":"{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-QwBVAœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-Ny5Dc","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Ny5Dcœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-Ny5Dc","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"AnthropicModel","id":"AnthropicModel-QwBVA","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-AnthropicModel-QwBVA{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-QwBVAœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Ny5Dc{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Ny5Dcœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":""},{"source":"GroqModel-te9cD","sourceHandle":"{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-te9cDœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-wzH5m","targetHandle":"{œfieldNameœ:œtranslated_textœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"translated_text","id":"Prompt-wzH5m","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"GroqModel","id":"GroqModel-te9cD","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-GroqModel-te9cD{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-te9cDœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-wzH5m{œfieldNameœ:œtranslated_textœ,œidœ:œPrompt-wzH5mœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"source":"PromptComponent-tz9Ce","sourceHandle":"{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-tz9Ceœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"GroqModel-te9cD","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œGroqModel-te9cDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"GroqModel-te9cD","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"PromptComponent","id":"PromptComponent-tz9Ce","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-PromptComponent-tz9Ce{œdataTypeœ:œPromptComponentœ,œidœ:œPromptComponent-tz9Ceœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GroqModel-te9cD{œfieldNameœ:œinput_valueœ,œidœ:œGroqModel-te9cDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"},{"source":"GroqModel-te9cD","sourceHandle":"{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-te9cDœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-dyUSv","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-dyUSvœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-dyUSv","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"GroqModel","id":"GroqModel-te9cD","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-GroqModel-te9cD{œdataTypeœ:œGroqModelœ,œidœ:œGroqModel-te9cDœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-dyUSv{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-dyUSvœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":158.62817101882587,"y":118.13183433621415,"zoom":0.2654188450666186}},"description":"This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ","name":"Babbelfish.ai","last_tested_version":"1.0.15","endpoint_name":null,"is_component":false}