Workflow: Noop Stickynote Automation

Workflow Details

Download Workflow
{
    "id": "HMoUOg8J7RzEcslH",
    "meta": {
        "instanceId": "3f91626b10fcfa8a3d3ab8655534ff3e94151838fd2709ecd2dcb14afb3d061a",
        "templateCredsSetupCompleted": true
    },
    "name": "Extract personal data with a self-hosted LLM Mistral NeMo",
    "tags": [],
    "nodes": [
        {
            "id": "7e67ae65-88aa-4e48-aa63-2d3a4208cf4b",
            "name": "When chat message received",
            "type": "@n8n\/n8n-nodes-langchain.chatTrigger",
            "position": [
                -500,
                20
            ],
            "webhookId": "3a7b0ea1-47f3-4a94-8ff2-f5e1f3d9dc32",
            "parameters": {
                "options": []
            },
            "typeVersion": 1.100000000000000088817841970012523233890533447265625
        },
        {
            "id": "e064921c-69e6-4cfe-a86e-4e3aa3a5314a",
            "name": "Ollama Chat Model",
            "type": "@n8n\/n8n-nodes-langchain.lmChatOllama",
            "position": [
                -280,
                420
            ],
            "parameters": {
                "model": "mistral-nemo:latest",
                "options": {
                    "useMLock": true,
                    "keepAlive": "2h",
                    "temperature": 0.1000000000000000055511151231257827021181583404541015625
                }
            },
            "credentials": {
                "ollamaApi": {
                    "id": "vgKP7LGys9TXZ0KK",
                    "name": "Ollama account"
                }
            },
            "typeVersion": 1
        },
        {
            "id": "fe1379da-a12e-4051-af91-9d67a7c9a76b",
            "name": "Auto-fixing Output Parser",
            "type": "@n8n\/n8n-nodes-langchain.outputParserAutofixing",
            "position": [
                -200,
                220
            ],
            "parameters": {
                "options": {
                    "prompt": "Instructions:\n--------------\n{instructions}\n--------------\nCompletion:\n--------------\n{completion}\n--------------\n\nAbove, the Completion did not satisfy the constraints given in the Instructions.\nError:\n--------------\n{error}\n--------------\n\nPlease try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:"
                }
            },
            "typeVersion": 1
        },
        {
            "id": "b6633b00-6ebb-43ca-8e5c-664a53548c17",
            "name": "Structured Output Parser",
            "type": "@n8n\/n8n-nodes-langchain.outputParserStructured",
            "position": [
                60,
                400
            ],
            "parameters": {
                "schemaType": "manual",
                "inputSchema": "{\n \"type\": \"object\",\n \"properties\": {\n \"name\": {\n \"type\": \"string\",\n \"description\": \"Name of the user\"\n },\n \"surname\": {\n \"type\": \"string\",\n \"description\": \"Surname of the user\"\n },\n \"commtype\": {\n \"type\": \"string\",\n \"enum\": [\"email\", \"phone\", \"other\"],\n \"description\": \"Method of communication\"\n },\n \"contacts\": {\n \"type\": \"string\",\n \"description\": \"Contact details. ONLY IF PROVIDED\"\n },\n \"timestamp\": {\n \"type\": \"string\",\n \"format\": \"date-time\",\n \"description\": \"When the communication occurred\"\n },\n \"subject\": {\n \"type\": \"string\",\n \"description\": \"Brief description of the communication topic\"\n }\n },\n \"required\": [\"name\", \"commtype\"]\n}"
            },
            "typeVersion": 1.1999999999999999555910790149937383830547332763671875
        },
        {
            "id": "23681a6c-cf62-48cb-86ee-08d5ce39bc0a",
            "name": "Basic LLM Chain",
            "type": "@n8n\/n8n-nodes-langchain.chainLlm",
            "onError": "continueErrorOutput",
            "position": [
                -240,
                20
            ],
            "parameters": {
                "messages": {
                    "messageValues": [
                        {
                            "message": "=Please analyse the incoming user request. Extract information according to the JSON schema. Today is: \"{{ $now.toISO() }}\""
                        }
                    ]
                },
                "hasOutputParser": true
            },
            "typeVersion": 1.5
        },
        {
            "id": "8f4d1b4b-58c0-41ec-9636-ac555e440821",
            "name": "On Error",
            "type": "n8n-nodes-base.noOp",
            "position": [
                200,
                140
            ],
            "parameters": [],
            "typeVersion": 1
        },
        {
            "id": "f4d77736-4470-48b4-8f61-149e09b70e3e",
            "name": "Sticky Note",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                -560,
                -160
            ],
            "parameters": {
                "color": 2,
                "width": 960,
                "height": 500,
                "content": "## Update data source\nWhen you change the data source, remember to update the `Prompt Source (User Message)` setting in the **Basic LLM Chain node**."
            },
            "typeVersion": 1
        },
        {
            "id": "5fd273c8-e61d-452b-8eac-8ac4b7fff6c2",
            "name": "Sticky Note1",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                -560,
                340
            ],
            "parameters": {
                "color": 2,
                "width": 440,
                "height": 220,
                "content": "## Configure local LLM\nOllama offers additional settings \nto optimize model performance\nor memory usage."
            },
            "typeVersion": 1
        },
        {
            "id": "63cbf762-0134-48da-a6cd-0363e870decd",
            "name": "Sticky Note2",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                0,
                340
            ],
            "parameters": {
                "color": 2,
                "width": 400,
                "height": 220,
                "content": "## Define JSON Schema"
            },
            "typeVersion": 1
        },
        {
            "id": "9625294f-3cb4-4465-9dae-9976e0cf5053",
            "name": "Extract JSON Output",
            "type": "n8n-nodes-base.set",
            "position": [
                200,
                -80
            ],
            "parameters": {
                "mode": "raw",
                "options": [],
                "jsonOutput": "={{ $json.output }}\n"
            },
            "typeVersion": 3.399999999999999911182158029987476766109466552734375
        },
        {
            "id": "2c6fba3b-0ffe-4112-b904-823f52cc220b",
            "name": "Sticky Note3",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                -560,
                200
            ],
            "parameters": {
                "width": 960,
                "height": 120,
                "content": "If the LLM response does not pass \nthe **Structured Output Parser** checks,\n**Auto-Fixer** will call the model again with a different \nprompt to correct the original response."
            },
            "typeVersion": 1
        },
        {
            "id": "c73ba1ca-d727-4904-a5fd-01dd921a4738",
            "name": "Sticky Note6",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                -560,
                460
            ],
            "parameters": {
                "height": 80,
                "content": "The same LLM connects to both **Basic LLM Chain** and to the **Auto-fixing Output Parser**. \n"
            },
            "typeVersion": 1
        },
        {
            "id": "193dd153-8511-4326-aaae-47b89d0cd049",
            "name": "Sticky Note7",
            "type": "n8n-nodes-base.stickyNote",
            "position": [
                200,
                440
            ],
            "parameters": {
                "width": 200,
                "height": 100,
                "content": "When the LLM model responds, the output is checked in the **Structured Output Parser**"
            },
            "typeVersion": 1
        }
    ],
    "active": false,
    "pinData": [],
    "settings": {
        "executionOrder": "v1"
    },
    "versionId": "9f3721a8-f340-43d5-89e7-3175c29c2f3a",
    "connections": {
        "Basic LLM Chain": {
            "main": [
                [
                    {
                        "node": "Extract JSON Output",
                        "type": "main",
                        "index": 0
                    }
                ],
                [
                    {
                        "node": "On Error",
                        "type": "main",
                        "index": 0
                    }
                ]
            ]
        },
        "Ollama Chat Model": {
            "ai_languageModel": [
                [
                    {
                        "node": "Auto-fixing Output Parser",
                        "type": "ai_languageModel",
                        "index": 0
                    },
                    {
                        "node": "Basic LLM Chain",
                        "type": "ai_languageModel",
                        "index": 0
                    }
                ]
            ]
        },
        "Structured Output Parser": {
            "ai_outputParser": [
                [
                    {
                        "node": "Auto-fixing Output Parser",
                        "type": "ai_outputParser",
                        "index": 0
                    }
                ]
            ]
        },
        "Auto-fixing Output Parser": {
            "ai_outputParser": [
                [
                    {
                        "node": "Basic LLM Chain",
                        "type": "ai_outputParser",
                        "index": 0
                    }
                ]
            ]
        },
        "When chat message received": {
            "main": [
                [
                    {
                        "node": "Basic LLM Chain",
                        "type": "main",
                        "index": 0
                    }
                ]
            ]
        }
    }
}
Back to Workflows

Related Workflows

Auto WordPress Blog Generator (GPT + Postgres + WP Media)
View
Build a Phone Agent to qualify outbound leads and inbound calls with RetellAI -vide
View
Standup Bot - Initialize
View
Respondtowebhook Form Automation Webhook
View
Import Odoo Product Images from Google Drive
View
Splitout Code Create Webhook
View
HubSpot Mailchimp Create Scheduled
View
Create a contact in Drift
View
Googletranslate Noop Create Webhook
View
Telegram AI-bot
View