{ "nodes": [ { "parameters": { "options": { "systemMessage": "You are a helpful assistant. Use the Pinecone Assistant Tool to retrieve data about Pinecone releases. Include the file name and file url in citations wherever referenced in output." } }, "type": "@n8n/n8n-nodes-langchain.agent", "typeVersion": 2.2, "position": [ 1216, 816 ], "id": "52bd17c4-81c6-485f-8e08-1477220f508f", "name": "AI Agent" }, { "parameters": { "model": { "__rl": true, "mode": "list", "value": "gpt-4.1-mini" }, "options": {} }, "type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", "typeVersion": 1.2, "position": [ 1152, 1040 ], "id": "327b94c2-06f3-4a7b-bd3d-3dcfd92e28ef", "name": "OpenAI Chat Model", "credentials": { "openAiApi": { "id": "wRaEYGibKa7GoCaK", "name": "OpenAi account" } } }, { "parameters": { "content": "## 1. Upload files to Pinecone Assistant", "height": 640, "width": 1776, "color": 7 }, "type": "n8n-nodes-base.stickyNote", "position": [ 768, 0 ], "typeVersion": 1, "id": "ae8f7bfc-d7e7-43e3-87ff-1863274cc3e1", "name": "Sticky Note1" }, { "parameters": { "content": "## 2. Chat with your docs", "height": 576, "width": 1344, "color": 7 }, "type": "n8n-nodes-base.stickyNote", "position": [ 768, 704 ], "typeVersion": 1, "id": "082e1b5a-edc0-4bb3-bbe8-163f81b82805", "name": "Sticky Note2" }, { "parameters": { "options": {} }, "type": "@n8n/n8n-nodes-langchain.chatTrigger", "typeVersion": 1.3, "position": [ 848, 816 ], "id": "02b8830a-6457-4e5e-bf9a-09a1a9d02183", "name": "Chat input", "webhookId": "4672d1f8-d2bb-4059-8761-7aa5792814c0" }, { "parameters": { "content": "![Pinecone logo](https://www.pinecone.io/images/pinecone-logo-for-n8n-templates.png)\n\n\n## Try it out\n\nThis n8n workflow template lets you chat with Pinecone release notes files downloaded via HTTP (.docx, .json, .md, .txt, .pdf) using OpenAI and Pinecone Assistant. It retrieves relevant context from your files in real time so you can get accurate, context-aware answers about your proprietary data—without the need to train your own LLM.\n\n### What is Pinecone Assistant?\n\n[Pinecone Assistant](https://docs.pinecone.io/guides/assistant/overview) allows you to build production-grade chat and agent-based applications quickly. It abstracts the complexities of implementing retrieval-augmented (RAG) systems by managing the chunking, embedding, storage, query planning, vector search, model orchestration, reranking for you.\n\n### Prerequisites\n\n* A [Pinecone account](https://app.pinecone.io/) and [API key](https://app.pinecone.io/organizations/-/projects/-/keys)\n* An [Open AI account](https://auth.openai.com/create-account) and [API key](https://platform.openai.com/settings/organization/api-keys)\n\n### Setup\n\n1. Create a Pinecone Assistant in the Pinecone Console [here](https://app.pinecone.io/organizations/-/projects/-/assistant) \n\t1. Name your Assistant `n8n-assistant`\n\t2. No need to configure a Chat model or Assistant instructions\n2. Setup Pinecone API key credential in n8n\n\t1. In the Upload file to Assistant node -> PineconeApi section, select Create new credential\n\t2. Paste in your Pinecone API key in the API Key field\n3. Select your Assistant Name in each of the Pinecone Assistant nodes\n4. Setup the Open AI credential in n8n\n\t1. In the OpenAI Chat Model node -> Credential to connect with, select Create new credential\n\t2. Set the API Key field to your OpenAI API key\n5. Activate the workflow or test it with a manual execution to ingest the documents\n6. Chat with your docs!\n\n### Ideas for customizing this workflow\n\n- Change the urls in Set file urls node to use your own files\n- Customize the System Message on the AI Agent node to your use case to indicate what kind of knowledge is stored in Pinecone Assistant\n- Change the Top K and/or Snippet Size values to help manage token consumption by adding the Top K and/or Snippet Size parameters to Get context from Assistant node\n- Filter the context snippets even further by adding metadata filters to the Get context from Assistant node\n\n### Need help?\n\nYou can find help by asking in the [Pinecone Discord community](https://discord.gg/tJ8V62S3sH) or [filing an issue](https://github.com/pinecone-io/n8n-templates/issues/new/choose) on this repo.", "height": 1280, "width": 636 }, "id": "b65a87ae-090f-4362-92d0-1b4a91f539a9", "name": "Sticky Note7", "type": "n8n-nodes-base.stickyNote", "position": [ 0, 0 ], "typeVersion": 1 }, { "parameters": {}, "type": "n8n-nodes-base.manualTrigger", "typeVersion": 1, "position": [ 928, 256 ], "id": "796a1e00-78ca-4dbe-82cd-c523f8fc3f4a", "name": "When clicking ‘Execute workflow’" }, { "parameters": { "assignments": { "assignments": [ { "id": "d0e724df-685f-4661-b2ec-3cdd3c2ba0f1", "name": "urls", "value": "[\"https://docs.pinecone.io/release-notes/2026.md\", \"https://docs.pinecone.io/release-notes/2025.md\", \"https://docs.pinecone.io/release-notes/2024.md\", \"https://docs.pinecone.io/release-notes/2023.md\", \"https://docs.pinecone.io/release-notes/2022.md\"]", "type": "array" } ] }, "options": {} }, "type": "n8n-nodes-base.set", "typeVersion": 3.4, "position": [ 1088, 256 ], "id": "466d063b-4b6a-487d-935c-14b2a8f08b49", "name": "Set file urls" }, { "parameters": { "fieldToSplitOut": "urls", "options": {} }, "type": "n8n-nodes-base.splitOut", "typeVersion": 1, "position": [ 1248, 256 ], "id": "e4a64e93-bf9c-4f05-97b8-3f9e65958ca3", "name": "Split to list" }, { "parameters": { "url": "={{ $json.urls }}", "options": { "response": { "response": { "responseFormat": "text" } } } }, "type": "n8n-nodes-base.httpRequest", "typeVersion": 4.2, "position": [ 1408, 256 ], "id": "d84c0b2d-06d3-405e-b6c8-afdbd18f3d83", "name": "Download file" }, { "parameters": { "operation": "toText", "sourceProperty": "data", "options": { "fileName": "={{ $('Split to list').item.json.urls }}" } }, "type": "n8n-nodes-base.convertToFile", "typeVersion": 1.1, "position": [ 1568, 256 ], "id": "30666a64-1671-4069-8af3-1cd0c2f97302", "name": "Convert to md file" }, { "parameters": { "resource": "file", "operation": "uploadFile", "assistantData": "{\"name\":\"n8n-assistant\",\"host\":\"https://prod-1-data.ke.pinecone.io\"}", "externalFileId": "={{ $('Split to list').item.json.urls }}", "additionalFields": { "sourceTag": "n8n:n8n_nodes_pinecone_assistant:quickstart" } }, "type": "@pinecone-database/n8n-nodes-pinecone-assistant.pineconeAssistant", "typeVersion": 1, "position": [ 1776, 256 ], "id": "591a71fe-4afa-4d06-8053-a2c1b81c026d", "name": "Upload file to Assistant", "credentials": { "pineconeAssistantApi": { "id": "GwHylpB6WoRjIGlA", "name": "Pinecone Assistant account" } } }, { "parameters": { "assistantData": "{\"name\":\"n8n-assistant\",\"host\":\"https://prod-1-data.ke.pinecone.io\"}", "additionalFields": { "sourceTag": "n8n:n8n_nodes_pinecone_assistant:quickstart" } }, "type": "@pinecone-database/n8n-nodes-pinecone-assistant.pineconeAssistantTool", "typeVersion": 1, "position": [ 1376, 1024 ], "id": "80ff0124-519b-421b-ba00-a7a11aa6af38", "name": "Get context from Assistant", "credentials": { "pineconeAssistantApi": { "id": "GwHylpB6WoRjIGlA", "name": "Pinecone Assistant account" } } } ], "connections": { "OpenAI Chat Model": { "ai_languageModel": [ [ { "node": "AI Agent", "type": "ai_languageModel", "index": 0 } ] ] }, "Chat input": { "main": [ [ { "node": "AI Agent", "type": "main", "index": 0 } ] ] }, "When clicking ‘Execute workflow’": { "main": [ [ { "node": "Set file urls", "type": "main", "index": 0 } ] ] }, "Set file urls": { "main": [ [ { "node": "Split to list", "type": "main", "index": 0 } ] ] }, "Split to list": { "main": [ [ { "node": "Download file", "type": "main", "index": 0 } ] ] }, "Download file": { "main": [ [ { "node": "Convert to md file", "type": "main", "index": 0 } ] ] }, "Convert to md file": { "main": [ [ { "node": "Upload file to Assistant", "type": "main", "index": 0 } ] ] }, "Get context from Assistant": { "ai_tool": [ [ { "node": "AI Agent", "type": "ai_tool", "index": 0 } ] ] } }, "pinData": {}, "meta": { "templateCredsSetupCompleted": true, "instanceId": "6ae6d27e57788e6797af2bf16734f2cac83ad4b8f8ebbf57445f55ef5d3a52da" } }