adding the workflow collection to git
This commit is contained in:
105
workflows/1980_workflow_1980.json
Normal file
105
workflows/1980_workflow_1980.json
Normal file
@@ -0,0 +1,105 @@
|
||||
{
|
||||
"meta": {
|
||||
"instanceId": "408f9fb9940c3cb18ffdef0e0150fe342d6e655c3a9fac21f0f644e8bedabcd9",
|
||||
"templateCredsSetupCompleted": true
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"id": "27e5f0c0-ba88-4c28-b3be-99c973be15cb",
|
||||
"name": "Sticky Note",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-480,
|
||||
-140
|
||||
],
|
||||
"parameters": {
|
||||
"width": 1083,
|
||||
"height": 357,
|
||||
"content": "## This is an example of basic LLM Chain connected to an open-source model\n### The Chain is connected to the Mistral-7B-Instruct-v0.1 model, but you can change this\n\nPlease note the initial prompt that guides the model:\n```\nYou are a helpful assistant.\nPlease reply politely to the users.\nUse emojis and a text.\nQ: {{ $json.input }}\nA: \n```\n\nThis way the model \"knows\" that it needs to answer the question right after the `A: `.\n\nSince Hugging Face node is this is an inference mode, it does not support LangChain Agents at the moment. Please use [Ollama Chat Model](https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatollama/) node for that"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "4756d5a8-7027-4942-b214-a5ff8310869a",
|
||||
"name": "When chat message received",
|
||||
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
|
||||
"position": [
|
||||
-200,
|
||||
280
|
||||
],
|
||||
"webhookId": "bf2e38b8-566a-4aeb-8efe-28240f4a6991",
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"typeVersion": 1.1
|
||||
},
|
||||
{
|
||||
"id": "20a36351-8579-4ac6-9746-526b072aeaa6",
|
||||
"name": "Basic LLM Chain",
|
||||
"type": "@n8n/n8n-nodes-langchain.chainLlm",
|
||||
"position": [
|
||||
20,
|
||||
280
|
||||
],
|
||||
"parameters": {
|
||||
"messages": {
|
||||
"messageValues": [
|
||||
{
|
||||
"message": "=You are a helpful assistant. Please reply politely to the users. Use emojis and a text."
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 1.5
|
||||
},
|
||||
{
|
||||
"id": "9b88e307-3ad5-4167-8c5f-e5827f7444ac",
|
||||
"name": "Hugging Face Inference Model",
|
||||
"type": "@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference",
|
||||
"position": [
|
||||
120,
|
||||
440
|
||||
],
|
||||
"parameters": {
|
||||
"model": "mistralai/Mistral-7B-Instruct-v0.1",
|
||||
"options": {
|
||||
"maxTokens": 512,
|
||||
"temperature": 0.8,
|
||||
"frequencyPenalty": 2
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"huggingFaceApi": {
|
||||
"id": "ARQ5mOhvBxi283Qk",
|
||||
"name": "HuggingFaceApi account"
|
||||
}
|
||||
},
|
||||
"typeVersion": 1
|
||||
}
|
||||
],
|
||||
"pinData": {},
|
||||
"connections": {
|
||||
"When chat message received": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Basic LLM Chain",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Hugging Face Inference Model": {
|
||||
"ai_languageModel": [
|
||||
[
|
||||
{
|
||||
"node": "Basic LLM Chain",
|
||||
"type": "ai_languageModel",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user