Compare commits
17 Commits
dev_roeiba
...
dev_json
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
683db34625 | ||
|
|
a1346dcce3 | ||
|
|
9f34f0073d | ||
|
|
e0a92adcbc | ||
|
|
30ea8fcb74 | ||
|
|
bdad1cd2e9 | ||
|
|
8a54e700d3 | ||
|
|
f667fed3fc | ||
|
|
b70d844fe3 | ||
|
|
2e4c33ef18 | ||
|
|
b068ce2ab8 | ||
|
|
b574577a1c | ||
|
|
71f3e50c73 | ||
|
|
ac1c90b26e | ||
|
|
99f9db0639 | ||
|
|
ab584af811 | ||
|
|
cf308dee02 |
133
.github/workflows/validate-workflows.yml
vendored
Normal file
133
.github/workflows/validate-workflows.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Validate n8n Workflows
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, master ]
|
||||
pull_request:
|
||||
branches: [ main, master ]
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
validate-workflows:
|
||||
name: Validate n8n Workflows
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./lib
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -e .
|
||||
|
||||
- name: Run workflow validation
|
||||
id: validate
|
||||
working-directory: ./lib
|
||||
run: |
|
||||
# Run the validator on all JSON files in the repository
|
||||
# This will fail if any workflow is invalid
|
||||
echo "Validating all n8n workflows..."
|
||||
if ! n8n-validate ..; then
|
||||
echo "::error::One or more workflow validations failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All workflows are valid!"
|
||||
|
||||
- name: Create visualization artifacts
|
||||
if: always() # Run this step even if validation fails
|
||||
working-directory: ./lib
|
||||
run: |
|
||||
echo "Creating visualizations for all workflows..."
|
||||
mkdir -p ../workflow-visualizations
|
||||
|
||||
# Find all JSON files that might be n8n workflows
|
||||
find .. -type f -name "*.json" -not -path "*/node_modules/*" -not -path "*/.git/*" -not -path "*/workflow-visualizations/*" | while read -r file; do
|
||||
# Try to validate the file first
|
||||
if n8n-validate "$file" 2>/dev/null; then
|
||||
# If validation passes, create a visualization
|
||||
echo "Creating visualization for $file"
|
||||
filename=$(basename "$file" .json)
|
||||
output_file="../workflow-visualizations/${filename}.png"
|
||||
if ! n8n-visualize "$file" -o "$output_file" --no-show 2>/dev/null; then
|
||||
echo "::warning::Failed to create visualization for $file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Count the number of visualizations created
|
||||
VIS_COUNT=$(find ../workflow-visualizations -type f -name "*.png" | wc -l)
|
||||
echo "Created $VIS_COUNT workflow visualizations"
|
||||
|
||||
# Set an output with the visualization count
|
||||
echo "visualization_count=$VIS_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload workflow visualizations
|
||||
if: always() && steps.validate.outcome == 'success'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: workflow-visualizations
|
||||
path: workflow-visualizations/
|
||||
if-no-files-found: ignore
|
||||
retention-days: 7
|
||||
|
||||
- name: Comment on PR with validation results
|
||||
if: github.event_name == 'pull_request' && steps.validate.outcome == 'success'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
// Get the list of workflow files that were validated
|
||||
const workflowFiles = execSync('find .. -type f -name "*.json" -not -path "*/node_modules/*" -not -path "*/.git/*" -not -path "*/workflow-visualizations/*"')
|
||||
.toString()
|
||||
.split('\n')
|
||||
.filter(Boolean);
|
||||
|
||||
// Count visualizations
|
||||
let visCount = 0;
|
||||
try {
|
||||
visCount = fs.readdirSync('../workflow-visualizations').length;
|
||||
} catch (e) {
|
||||
// Directory might not exist if no visualizations were created
|
||||
}
|
||||
|
||||
// Create a comment
|
||||
const comment = `✅ All ${workflowFiles.length} n8n workflow files are valid!\n` +
|
||||
`📊 ${visCount} workflow visualizations were generated and attached as artifacts.`;
|
||||
|
||||
// Add a comment to the PR
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(comment =>
|
||||
comment.user.login === 'github-actions[bot]' &&
|
||||
comment.body.includes('n8n workflow')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: comment,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: comment,
|
||||
});
|
||||
}
|
||||
609
AI product imagines.json
Normal file
609
AI product imagines.json
Normal file
@@ -0,0 +1,609 @@
|
||||
{
|
||||
"name": "AI product Images",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {},
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"id": "1ddfbdfd-f3c0-4ef5-8b48-a3ae77a92955",
|
||||
"name": "When clicking ‘Test workflow’"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"resource": "fileFolder",
|
||||
"filter": {
|
||||
"folderId": {
|
||||
"__rl": true,
|
||||
"value": "1NQ_9HXkMrjm_DPZENAmwRo0JJkvQm8BU",
|
||||
"mode": "id"
|
||||
}
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.googleDrive",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
220,
|
||||
0
|
||||
],
|
||||
"id": "6feb317d-1d43-4174-a5a3-b9e4a2bf46e6",
|
||||
"name": "Google Drive",
|
||||
"credentials": {
|
||||
"googleDriveOAuth2Api": {
|
||||
"id": "mVYRcVX1PvkdODpc",
|
||||
"name": "Google Drive account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "download",
|
||||
"fileId": {
|
||||
"__rl": true,
|
||||
"value": "={{$json[\"id\"]}}",
|
||||
"mode": "id"
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.googleDrive",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
460,
|
||||
0
|
||||
],
|
||||
"id": "b5232274-25a6-43b7-a424-dcfff47057ba",
|
||||
"name": "Google Drive1",
|
||||
"credentials": {
|
||||
"googleDriveOAuth2Api": {
|
||||
"id": "mVYRcVX1PvkdODpc",
|
||||
"name": "Google Drive account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"resource": "image",
|
||||
"operation": "analyze",
|
||||
"modelId": {
|
||||
"__rl": true,
|
||||
"value": "gpt-4o",
|
||||
"mode": "list",
|
||||
"cachedResultName": "GPT-4O"
|
||||
},
|
||||
"text": "Describe the visual style of this image, what stands out. if you had to have a holistic overview, as a professional facebook ads designer. How would you explain this image / or images to be able to reproduce the elements that make it work for other ads.\n\nThe core goal of the output here should be to create a template of the style for inspirations. As later we will take ideas from these to generate our own high converting facebook ads.\n\nensure you do not make this product specific, rather focusing on creating outlines for static ad styles. so keep ti vague in terms of what exactly is in the ad, but rather the principles of the ad",
|
||||
"inputType": "base64",
|
||||
"options": {}
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.openAi",
|
||||
"typeVersion": 1.8,
|
||||
"position": [
|
||||
720,
|
||||
-20
|
||||
],
|
||||
"id": "ff9ab47d-5980-4d2d-ab5e-1e76d0df87ab",
|
||||
"name": "OpenAI",
|
||||
"credentials": {
|
||||
"openAiApi": {
|
||||
"id": "hLlMCh2BqN9e4ile",
|
||||
"name": "OpenAi account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"resource": "fileFolder",
|
||||
"searchMethod": "query",
|
||||
"filter": {
|
||||
"folderId": {
|
||||
"__rl": true,
|
||||
"value": "11t72SNGpHJvGk-UurfuLwGpdMz37-cwW",
|
||||
"mode": "id"
|
||||
}
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.googleDrive",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
220,
|
||||
240
|
||||
],
|
||||
"id": "b5e0b994-cde8-4069-bdac-3173ecf8ccfb",
|
||||
"name": "Google Drive2",
|
||||
"credentials": {
|
||||
"googleDriveOAuth2Api": {
|
||||
"id": "mVYRcVX1PvkdODpc",
|
||||
"name": "Google Drive account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "download",
|
||||
"fileId": {
|
||||
"__rl": true,
|
||||
"value": "={{$json[\"id\"]}}",
|
||||
"mode": "id"
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.googleDrive",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
460,
|
||||
240
|
||||
],
|
||||
"id": "41b44442-46d1-40b7-9a26-57f410d0426b",
|
||||
"name": "Google Drive3",
|
||||
"credentials": {
|
||||
"googleDriveOAuth2Api": {
|
||||
"id": "mVYRcVX1PvkdODpc",
|
||||
"name": "Google Drive account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"resource": "image",
|
||||
"operation": "analyze",
|
||||
"modelId": {
|
||||
"__rl": true,
|
||||
"value": "gpt-4o",
|
||||
"mode": "list",
|
||||
"cachedResultName": "GPT-4O"
|
||||
},
|
||||
"text": "Analyse our product image. Identify the core emotions behind it and the main product. we will use this later to connect the product image with some ad styles and generate our own ads",
|
||||
"inputType": "base64",
|
||||
"options": {}
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.openAi",
|
||||
"typeVersion": 1.8,
|
||||
"position": [
|
||||
700,
|
||||
240
|
||||
],
|
||||
"id": "1eea9b08-38d1-48dc-b578-88d0818de342",
|
||||
"name": "OpenAI1",
|
||||
"credentials": {
|
||||
"openAiApi": {
|
||||
"id": "hLlMCh2BqN9e4ile",
|
||||
"name": "OpenAi account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"model": {
|
||||
"__rl": true,
|
||||
"value": "gpt-4",
|
||||
"mode": "list",
|
||||
"cachedResultName": "gpt-4"
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.lmChatOpenAi",
|
||||
"typeVersion": 1.2,
|
||||
"position": [
|
||||
1040,
|
||||
420
|
||||
],
|
||||
"id": "6a97f2fe-fc62-4591-bb03-37d4ae24343c",
|
||||
"name": "OpenAI Chat Model",
|
||||
"credentials": {
|
||||
"openAiApi": {
|
||||
"id": "hLlMCh2BqN9e4ile",
|
||||
"name": "OpenAi account"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"promptType": "define",
|
||||
"text": "=You’ve been given an outline that includes: \n\n(use all the data from here when creating the prompts {{ $json.choices[0].message.content }} - it is also critical our product image is displayed in here: {{ $('OpenAI1').item.json.content }} )\n\nWhat the product is and who it’s for\n\nWhat’s visible in the product image (e.g. background, angle, lighting)\n\nPatterns and emotional triggers from top-performing ad examples\n\nYour task:\nUsing this outline, generate 10 image ad prompts that follow this format exactly:\n\nprompt: [Detailed visual description of the ad concept]\n\nEach prompt should include:\n\nA bold, testimonial-style headline (in quotation marks at the top)\n\nA clear description of the emotional transformation the image should convey\n\nWhat is shown in the image (body parts, facial expressions, environment, etc.)\n\nWhere and how the product is placed\n\nThe color palette and visual tone (e.g., icy blue for calm, red for pain)\n\nWhere the headline should appear on the image\n\nRendering style (e.g., cinematic, hyperrealistic, dramatic shadows)\n\nFinal format for Facebook or Instagram (1:1 format always)\n\nRepeat this 10 times. Each one must be unique, emotionally powerful, and visually clear.",
|
||||
"hasOutputParser": true,
|
||||
"options": {}
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.agent",
|
||||
"typeVersion": 1.9,
|
||||
"position": [
|
||||
1120,
|
||||
240
|
||||
],
|
||||
"id": "16138c72-2b3d-4c4b-9d11-ad91d15c2e4c",
|
||||
"name": "AI prompt agent"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsonSchemaExample": "[\n {\n \"Prompt\": \"Sun-drenched poolside shot of the product on a marble ledge at golden hour, with soft shadows and warm tones. Aspect ratio 1:1.\"\n },\n {\n \"Prompt\": \"Cool lavender-tinted sunset beach backdrop behind the product, highlighting reflective metallic accents. Aspect ratio 4:5.\"\n },\n {\n \"Prompt\": \"...\"\n }\n]\n"
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.outputParserStructured",
|
||||
"typeVersion": 1.2,
|
||||
"position": [
|
||||
1380,
|
||||
420
|
||||
],
|
||||
"id": "8fbfad99-d724-45a9-9b3e-e45e486dcfc5",
|
||||
"name": "Structured Output Parser"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fieldToSplitOut": "output",
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.splitOut",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1560,
|
||||
160
|
||||
],
|
||||
"id": "508946cd-0aed-4858-bf5a-c25e64371ea4",
|
||||
"name": "Split Out"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/images/generations",
|
||||
"sendHeaders": true,
|
||||
"headerParameters": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Authorization",
|
||||
"value": "Bearer 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"
|
||||
},
|
||||
{
|
||||
"name": "Content-type",
|
||||
"value": "application/json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sendBody": true,
|
||||
"bodyParameters": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "model",
|
||||
"value": "gpt-image-1"
|
||||
},
|
||||
{
|
||||
"name": "prompt",
|
||||
"value": "={{ $json.Prompt }}"
|
||||
},
|
||||
{
|
||||
"name": "size",
|
||||
"value": "1024x1024"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"typeVersion": 4.2,
|
||||
"position": [
|
||||
2320,
|
||||
160
|
||||
],
|
||||
"id": "ab08c200-9ff9-4dd9-86be-09b8f1e219a9",
|
||||
"name": "HTTP Request1"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.splitInBatches",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
1840,
|
||||
160
|
||||
],
|
||||
"id": "dc69258e-7c57-4158-92af-7257ba85102e",
|
||||
"name": "Loop Over Items1"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"amount": 15
|
||||
},
|
||||
"type": "n8n-nodes-base.wait",
|
||||
"typeVersion": 1.1,
|
||||
"position": [
|
||||
2100,
|
||||
160
|
||||
],
|
||||
"id": "1af8885e-00a3-49f1-b159-1a02eba84a84",
|
||||
"name": "Wait",
|
||||
"webhookId": "9f2950cd-2ab2-405f-83d7-4f44e15e16f2"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "toBinary",
|
||||
"sourceProperty": "data[0].b64_json",
|
||||
"options": {
|
||||
"fileName": "image.png",
|
||||
"mimeType": "image/png"
|
||||
}
|
||||
},
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1.1,
|
||||
"position": [
|
||||
2540,
|
||||
160
|
||||
],
|
||||
"id": "535a5a20-11ab-476a-be3b-07e23073d5f5",
|
||||
"name": "Convert to File"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"modelId": {
|
||||
"__rl": true,
|
||||
"value": "gpt-4",
|
||||
"mode": "list",
|
||||
"cachedResultName": "GPT-4"
|
||||
},
|
||||
"messages": {
|
||||
"values": [
|
||||
{
|
||||
"content": "=Analyse this prompt which is a template of a high converting facebook ad we have built {{ $json.content }}\n\nNow we will take this template, and add our product in as the hero for our ads{{ $json.content}}\n\nensure the template is applied to make our product the hereo, with all copy, colours and vibe being focused on making the product stand out, in a facebook ad, deisgn to convert.\n\nMake a relevant prompt & outline as our next step in the flow is to break down this prompt (combining the facebook ad visual style, with our product image) and spit it into 10 prompts, all to create individual statics.\n\nDo what you think is best to pass this information forward.",
|
||||
"role": "system"
|
||||
},
|
||||
{
|
||||
"content": "Put both pieces of data together and only generate 1 output prompt\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
"simplify": false,
|
||||
"options": {}
|
||||
},
|
||||
"type": "@n8n/n8n-nodes-langchain.openAi",
|
||||
"typeVersion": 1.8,
|
||||
"position": [
|
||||
1020,
|
||||
-20
|
||||
],
|
||||
"id": "a01ff233-7e6f-456e-b691-f54a5c73aee0",
|
||||
"name": "OpenAI2",
|
||||
"executeOnce": false,
|
||||
"alwaysOutputData": false,
|
||||
"credentials": {
|
||||
"openAiApi": {
|
||||
"id": "CiqduZPbaJF5yveA",
|
||||
"name": "OpenAi account 2"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"driveId": {
|
||||
"__rl": true,
|
||||
"mode": "list",
|
||||
"value": "My Drive"
|
||||
},
|
||||
"folderId": {
|
||||
"__rl": true,
|
||||
"value": "1V_USzVT-v-6LIjk3HPd0nlr2vnv4nJAr",
|
||||
"mode": "list",
|
||||
"cachedResultName": "n8n testing",
|
||||
"cachedResultUrl": "https://drive.google.com/drive/folders/1V_USzVT-v-6LIjk3HPd0nlr2vnv4nJAr"
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.googleDrive",
|
||||
"typeVersion": 3,
|
||||
"position": [
|
||||
2780,
|
||||
160
|
||||
],
|
||||
"id": "3deb2f20-abc3-439e-b181-24c1956a4657",
|
||||
"name": "Google Drive4",
|
||||
"credentials": {
|
||||
"googleDriveOAuth2Api": {
|
||||
"id": "mVYRcVX1PvkdODpc",
|
||||
"name": "Google Drive account"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"pinData": {},
|
||||
"connections": {
|
||||
"When clicking ‘Test workflow’": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Google Drive",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Google Drive": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Google Drive1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Google Drive1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "OpenAI",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Google Drive2": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Google Drive3",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Google Drive3": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "OpenAI1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"OpenAI": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Google Drive2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"OpenAI1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "OpenAI2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"OpenAI Chat Model": {
|
||||
"ai_languageModel": [
|
||||
[
|
||||
{
|
||||
"node": "AI prompt agent",
|
||||
"type": "ai_languageModel",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"AI prompt agent": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Split Out",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Structured Output Parser": {
|
||||
"ai_outputParser": [
|
||||
[
|
||||
{
|
||||
"node": "AI prompt agent",
|
||||
"type": "ai_outputParser",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Split Out": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Loop Over Items1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"HTTP Request1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Convert to File",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Loop Over Items1": {
|
||||
"main": [
|
||||
[],
|
||||
[
|
||||
{
|
||||
"node": "Wait",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Wait": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "HTTP Request1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Google Drive4",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"OpenAI2": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "AI prompt agent",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Google Drive4": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Loop Over Items1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": false,
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "85462564-6f39-41df-b09c-9507c177c96a",
|
||||
"meta": {
|
||||
"templateCredsSetupCompleted": true,
|
||||
"instanceId": "aa305389a9f146cc99db373653903c47ecf7fe4bb66df261da9bedf94add0f72"
|
||||
},
|
||||
"id": "XZrlhnSYaHKcAdp4",
|
||||
"tags": []
|
||||
}
|
||||
@@ -1,544 +0,0 @@
|
||||
{
|
||||
"id": "itzURpN5wbUNOXOw",
|
||||
"meta": {
|
||||
"instanceId": "205b3bc06c96f2dc835b4f00e1cbf9a937a74eeb3b47c99d0c30b0586dbf85aa"
|
||||
},
|
||||
"name": "[2/2] KNN classifier (lands dataset)",
|
||||
"tags": [
|
||||
{
|
||||
"id": "QN7etptCmdcGIpkS",
|
||||
"name": "classifier",
|
||||
"createdAt": "2024-12-08T22:08:15.968Z",
|
||||
"updatedAt": "2024-12-09T19:25:04.113Z"
|
||||
}
|
||||
],
|
||||
"nodes": [
|
||||
{
|
||||
"id": "33373ccb-164e-431c-8a9a-d68668fc70be",
|
||||
"name": "Embed image",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"position": [
|
||||
-140,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"url": "https://api.voyageai.com/v1/multimodalembeddings",
|
||||
"method": "POST",
|
||||
"options": {},
|
||||
"jsonBody": "={{\n{\n \"inputs\": [\n {\n \"content\": [\n {\n \"type\": \"image_url\",\n \"image_url\": $json.imageURL\n }\n ]\n }\n ],\n \"model\": \"voyage-multimodal-3\",\n \"input_type\": \"document\"\n}\n}}",
|
||||
"sendBody": true,
|
||||
"specifyBody": "json",
|
||||
"authentication": "genericCredentialType",
|
||||
"genericAuthType": "httpHeaderAuth"
|
||||
},
|
||||
"credentials": {
|
||||
"httpHeaderAuth": {
|
||||
"id": "Vb0RNVDnIHmgnZOP",
|
||||
"name": "Voyage API"
|
||||
}
|
||||
},
|
||||
"typeVersion": 4.2
|
||||
},
|
||||
{
|
||||
"id": "58adecfa-45c7-4928-b850-053ea6f3b1c5",
|
||||
"name": "Query Qdrant",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"position": [
|
||||
440,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"url": "={{ $json.qdrantCloudURL }}/collections/{{ $json.collectionName }}/points/query",
|
||||
"method": "POST",
|
||||
"options": {},
|
||||
"jsonBody": "={{\n{\n \"query\": $json.ImageEmbedding,\n \"using\": \"voyage\",\n \"limit\": $json.limitKNN,\n \"with_payload\": true\n}\n}}",
|
||||
"sendBody": true,
|
||||
"specifyBody": "json",
|
||||
"authentication": "predefinedCredentialType",
|
||||
"nodeCredentialType": "qdrantApi"
|
||||
},
|
||||
"credentials": {
|
||||
"qdrantApi": {
|
||||
"id": "it3j3hP9FICqhgX6",
|
||||
"name": "QdrantApi account"
|
||||
}
|
||||
},
|
||||
"typeVersion": 4.2
|
||||
},
|
||||
{
|
||||
"id": "258026b7-2dda-4165-bfe1-c4163b9caf78",
|
||||
"name": "Majority Vote",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"position": [
|
||||
840,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"language": "python",
|
||||
"pythonCode": "from collections import Counter\n\ninput_json = _input.all()[0]\npoints = input_json['json']['result']['points']\nmajority_vote_two_most_common = Counter([point[\"payload\"][\"landscape_name\"] for point in points]).most_common(2)\n\nreturn [{\n \"json\": {\n \"result\": majority_vote_two_most_common \n }\n}]\n"
|
||||
},
|
||||
"typeVersion": 2
|
||||
},
|
||||
{
|
||||
"id": "e83e7a0c-cb36-46d0-8908-86ee1bddf638",
|
||||
"name": "Increase limitKNN",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"position": [
|
||||
1240,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"assignments": {
|
||||
"assignments": [
|
||||
{
|
||||
"id": "0b5d257b-1b27-48bc-bec2-78649bc844cc",
|
||||
"name": "limitKNN",
|
||||
"type": "number",
|
||||
"value": "={{ $('Propagate loop variables').item.json.limitKNN + 5}}"
|
||||
},
|
||||
{
|
||||
"id": "afee4bb3-f78b-4355-945d-3776e33337a4",
|
||||
"name": "ImageEmbedding",
|
||||
"type": "array",
|
||||
"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.ImageEmbedding }}"
|
||||
},
|
||||
{
|
||||
"id": "701ed7ba-d112-4699-a611-c0c134757a6c",
|
||||
"name": "qdrantCloudURL",
|
||||
"type": "string",
|
||||
"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.qdrantCloudURL }}"
|
||||
},
|
||||
{
|
||||
"id": "f5612f78-e7d8-4124-9c3a-27bd5870c9bf",
|
||||
"name": "collectionName",
|
||||
"type": "string",
|
||||
"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.collectionName }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 3.4
|
||||
},
|
||||
{
|
||||
"id": "8edbff53-cba6-4491-9d5e-bac7ad6db418",
|
||||
"name": "Propagate loop variables",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"position": [
|
||||
640,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"assignments": {
|
||||
"assignments": [
|
||||
{
|
||||
"id": "880838bf-2be2-4f5f-9417-974b3cbee163",
|
||||
"name": "=limitKNN",
|
||||
"type": "number",
|
||||
"value": "={{ $json.result.points.length}}"
|
||||
},
|
||||
{
|
||||
"id": "5fff2bea-f644-4fd9-ad04-afbecd19a5bc",
|
||||
"name": "result",
|
||||
"type": "object",
|
||||
"value": "={{ $json.result }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 3.4
|
||||
},
|
||||
{
|
||||
"id": "6fad4cc0-f02c-429d-aa4e-0d69ebab9d65",
|
||||
"name": "Image Test URL",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"position": [
|
||||
-320,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"assignments": {
|
||||
"assignments": [
|
||||
{
|
||||
"id": "46ceba40-fb25-450c-8550-d43d8b8aa94c",
|
||||
"name": "imageURL",
|
||||
"type": "string",
|
||||
"value": "={{ $json.query.imageURL }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 3.4
|
||||
},
|
||||
{
|
||||
"id": "f02e79e2-32c8-4af0-8bf9-281119b23cc0",
|
||||
"name": "Return class",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"position": [
|
||||
1240,
|
||||
0
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"assignments": {
|
||||
"assignments": [
|
||||
{
|
||||
"id": "bd8ca541-8758-4551-b667-1de373231364",
|
||||
"name": "class",
|
||||
"type": "string",
|
||||
"value": "={{ $json.result[0][0] }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 3.4
|
||||
},
|
||||
{
|
||||
"id": "83ca90fb-d5d5-45f4-8957-4363a4baf8ed",
|
||||
"name": "Check tie",
|
||||
"type": "n8n-nodes-base.if",
|
||||
"position": [
|
||||
1040,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"conditions": {
|
||||
"options": {
|
||||
"version": 2,
|
||||
"leftValue": "",
|
||||
"caseSensitive": true,
|
||||
"typeValidation": "strict"
|
||||
},
|
||||
"combinator": "and",
|
||||
"conditions": [
|
||||
{
|
||||
"id": "980663f6-9d7d-4e88-87b9-02030882472c",
|
||||
"operator": {
|
||||
"type": "number",
|
||||
"operation": "gt"
|
||||
},
|
||||
"leftValue": "={{ $json.result.length }}",
|
||||
"rightValue": 1
|
||||
},
|
||||
{
|
||||
"id": "9f46fdeb-0f89-4010-99af-624c1c429d6a",
|
||||
"operator": {
|
||||
"type": "number",
|
||||
"operation": "equals"
|
||||
},
|
||||
"leftValue": "={{ $json.result[0][1] }}",
|
||||
"rightValue": "={{ $json.result[1][1] }}"
|
||||
},
|
||||
{
|
||||
"id": "c59bc4fe-6821-4639-8595-fdaf4194c1e1",
|
||||
"operator": {
|
||||
"type": "number",
|
||||
"operation": "lte"
|
||||
},
|
||||
"leftValue": "={{ $('Propagate loop variables').item.json.limitKNN }}",
|
||||
"rightValue": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 2.2
|
||||
},
|
||||
{
|
||||
"id": "847ced21-4cfd-45d8-98fa-b578adc054d6",
|
||||
"name": "Qdrant variables + embedding + KNN neigbours",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"position": [
|
||||
120,
|
||||
-240
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"assignments": {
|
||||
"assignments": [
|
||||
{
|
||||
"id": "de66070d-5e74-414e-8af7-d094cbc26f62",
|
||||
"name": "ImageEmbedding",
|
||||
"type": "array",
|
||||
"value": "={{ $json.data[0].embedding }}"
|
||||
},
|
||||
{
|
||||
"id": "58b7384d-fd0c-44aa-9f8e-0306a99be431",
|
||||
"name": "qdrantCloudURL",
|
||||
"type": "string",
|
||||
"value": "=https://152bc6e2-832a-415c-a1aa-fb529f8baf8d.eu-central-1-0.aws.cloud.qdrant.io"
|
||||
},
|
||||
{
|
||||
"id": "e34c4d88-b102-43cc-a09e-e0553f2da23a",
|
||||
"name": "collectionName",
|
||||
"type": "string",
|
||||
"value": "=land-use"
|
||||
},
|
||||
{
|
||||
"id": "db37e18d-340b-4624-84f6-df993af866d6",
|
||||
"name": "limitKNN",
|
||||
"type": "number",
|
||||
"value": "=10"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"typeVersion": 3.4
|
||||
},
|
||||
{
|
||||
"id": "d1bc4edc-37d2-43ac-8d8b-560453e68d1f",
|
||||
"name": "Sticky Note",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-940,
|
||||
-120
|
||||
],
|
||||
"parameters": {
|
||||
"color": 6,
|
||||
"width": 320,
|
||||
"height": 540,
|
||||
"content": "Here we're classifying existing types of satellite imagery of land types:\n- 'agricultural',\n- 'airplane',\n- 'baseballdiamond',\n- 'beach',\n- 'buildings',\n- 'chaparral',\n- 'denseresidential',\n- 'forest',\n- 'freeway',\n- 'golfcourse',\n- 'harbor',\n- 'intersection',\n- 'mediumresidential',\n- 'mobilehomepark',\n- 'overpass',\n- 'parkinglot',\n- 'river',\n- 'runway',\n- 'sparseresidential',\n- 'storagetanks',\n- 'tenniscourt'\n"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "13560a31-3c72-43b8-9635-3f9ca11f23c9",
|
||||
"name": "Sticky Note1",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-520,
|
||||
-460
|
||||
],
|
||||
"parameters": {
|
||||
"color": 6,
|
||||
"content": "I tested this KNN classifier on a whole `test` set of a dataset (it's not a part of the collection, only `validation` + `train` parts). Accuracy of classification on `test` is **93.24%**, no fine-tuning, no metric learning."
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "8c9dcbcb-a1ad-430f-b7dd-e19b5645b0f6",
|
||||
"name": "Execute Workflow Trigger",
|
||||
"type": "n8n-nodes-base.executeWorkflowTrigger",
|
||||
"position": [
|
||||
-520,
|
||||
-240
|
||||
],
|
||||
"parameters": {},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "b36fb270-2101-45e9-bb5c-06c4e07b769c",
|
||||
"name": "Sticky Note2",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-1080,
|
||||
-520
|
||||
],
|
||||
"parameters": {
|
||||
"width": 460,
|
||||
"height": 380,
|
||||
"content": "## KNN classification workflow-tool\n### This n8n template takes an image URL (as anomaly detection tool does), and as output, it returns a class of the object on the image (out of land types list)\n\n* An image URL is received via the Execute Workflow Trigger, which is then sent to the Voyage.ai Multimodal Embeddings API to fetch its embedding.\n* The image's embedding vector is then used to query Qdrant, returning a set of X similar images with pre-labeled classes.\n* Majority voting is done for classes of neighbouring images.\n* A loop is used to resolve scenarios where there is a tie in Majority Voting (for example, we have 5 \"forest\" and 5 \"beach\"), and we increase the number of neighbours to retrieve.\n* When the loop finally resolves, the identified class is returned to the calling workflow."
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "51ece7fc-fd85-4d20-ae26-4df2d3893251",
|
||||
"name": "Sticky Note3",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
120,
|
||||
-40
|
||||
],
|
||||
"parameters": {
|
||||
"height": 200,
|
||||
"content": "Variables define another Qdrant's collection with landscapes (uploaded similarly as the crops collection, don't forget to switch it with your data) + amount of neighbours **limitKNN** in the database we'll use for an input image classification."
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "7aad5904-eb0b-4389-9d47-cc91780737ba",
|
||||
"name": "Sticky Note4",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-180,
|
||||
-60
|
||||
],
|
||||
"parameters": {
|
||||
"height": 80,
|
||||
"content": "Similarly to anomaly detection tool, we're embedding input image with the Voyage model"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "d3702707-ee4a-481f-82ca-d9386f5b7c8a",
|
||||
"name": "Sticky Note5",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
440,
|
||||
-500
|
||||
],
|
||||
"parameters": {
|
||||
"width": 740,
|
||||
"height": 200,
|
||||
"content": "## Tie loop\nHere we're [querying](https://api.qdrant.tech/api-reference/search/query-points) Qdrant, getting **limitKNN** nearest neighbours to our image <*Query Qdrant node*>, parsing their classes from payloads (images were pre-labeled & uploaded with their labels to Qdrant) & calculating the most frequent class name <*Majority Vote node*>. If there is a tie <*check tie node*> in 2 most common classes, for example, we have 5 \"forest\" and 5 \"harbor\", we repeat the procedure with the number of neighbours increased by 5 <*propagate loop variables node* and *increase limitKNN node*>.\nIf there is no tie, or we have already checked 100 neighbours, we exit the loop <*check tie node*> and return the class-answer."
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "d26911bb-0442-4adc-8511-7cec2d232393",
|
||||
"name": "Sticky Note6",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
1240,
|
||||
160
|
||||
],
|
||||
"parameters": {
|
||||
"height": 80,
|
||||
"content": "Here, we extract the name of the input image class decided by the Majority Vote\n"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "84ffc859-1d5c-4063-9051-3587f30a0017",
|
||||
"name": "Sticky Note10",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
-520,
|
||||
80
|
||||
],
|
||||
"parameters": {
|
||||
"color": 4,
|
||||
"width": 540,
|
||||
"height": 260,
|
||||
"content": "### KNN (k nearest neighbours) classification\n1. The first pipeline is uploading (lands) dataset to Qdrant's collection.\n2. **This is the KNN classifier tool, which takes any image as input and classifies it based on queries to the Qdrant (lands) collection.**\n\n### To recreate it\nYou'll have to upload [lands](https://www.kaggle.com/datasets/apollo2506/landuse-scene-classification) dataset from Kaggle to your own Google Storage bucket, and re-create APIs/connections to [Qdrant Cloud](https://qdrant.tech/documentation/quickstart-cloud/) (you can use **Free Tier** cluster), Voyage AI API & Google Cloud Storage\n\n**In general, pipelines are adaptable to any dataset of images**\n"
|
||||
},
|
||||
"typeVersion": 1
|
||||
}
|
||||
],
|
||||
"active": false,
|
||||
"pinData": {
|
||||
"Execute Workflow Trigger": [
|
||||
{
|
||||
"json": {
|
||||
"query": {
|
||||
"imageURL": "https://storage.googleapis.com/n8n-qdrant-demo/land-use/images_train_test_val/test/buildings/buildings_000323.png"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "c8cfe732-fd78-4985-9540-ed8cb2de7ef3",
|
||||
"connections": {
|
||||
"Check tie": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Increase limitKNN",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"node": "Return class",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Embed image": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Qdrant variables + embedding + KNN neigbours",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Query Qdrant": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Propagate loop variables",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Majority Vote": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Check tie",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Image Test URL": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Embed image",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Increase limitKNN": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Query Qdrant",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Execute Workflow Trigger": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Image Test URL",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Propagate loop variables": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Majority Vote",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Qdrant variables + embedding + KNN neigbours": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Query Qdrant",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,177 +0,0 @@
|
||||
{
|
||||
"id": "QnVdtKiTf3nbrNkh",
|
||||
"meta": {
|
||||
"instanceId": "558d88703fb65b2d0e44613bc35916258b0f0bf983c5d4730c00c424b77ca36a",
|
||||
"templateCredsSetupCompleted": true
|
||||
},
|
||||
"name": "Summarize emails with A.I. then send to messenger",
|
||||
"tags": [],
|
||||
"nodes": [
|
||||
{
|
||||
"id": "50e12e63-df28-45ac-9208-48cbf5116d09",
|
||||
"name": "Read emails (IMAP)",
|
||||
"type": "n8n-nodes-base.emailReadImap",
|
||||
"position": [
|
||||
340,
|
||||
260
|
||||
],
|
||||
"parameters": {
|
||||
"options": {},
|
||||
"postProcessAction": "nothing"
|
||||
},
|
||||
"credentials": {
|
||||
"imap": {
|
||||
"id": "gXtdakU9M02LBQc3",
|
||||
"name": "IMAP account"
|
||||
}
|
||||
},
|
||||
"typeVersion": 2
|
||||
},
|
||||
{
|
||||
"id": "6565350b-2269-44e3-8f36-8797f32d3e09",
|
||||
"name": "Send email to A.I. to summarize",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"position": [
|
||||
700,
|
||||
260
|
||||
],
|
||||
"parameters": {
|
||||
"url": "https://openrouter.ai/api/v1/chat/completions",
|
||||
"method": "POST",
|
||||
"options": {},
|
||||
"jsonBody": "={\n \"model\": \"meta-llama/llama-3.1-70b-instruct:free\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": \"I want you to read and summarize all the emails. If it's not rimportant, just give me a short summary with less than 10 words.\\n\\nHighlight as important if it is, add an emoji to indicate it is urgent:\\nFor the relevant content, find any action items and deadlines. Sometimes I need to sign up before a certain date or pay before a certain date, please highlight that in the summary for me.\\n\\nPut the deadline in BOLD at the top. If the email is not important, keep the summary short to 1 sentence only.\\n\\nHere's the email content for you to read:\\nSender email address: {{ encodeURIComponent($json.from) }}\\nSubject: {{ encodeURIComponent($json.subject) }}\\n{{ encodeURIComponent($json.textHtml) }}\"\n }\n ]\n}",
|
||||
"sendBody": true,
|
||||
"specifyBody": "json",
|
||||
"authentication": "genericCredentialType",
|
||||
"genericAuthType": "httpHeaderAuth"
|
||||
},
|
||||
"credentials": {
|
||||
"httpHeaderAuth": {
|
||||
"id": "WY7UkF14ksPKq3S8",
|
||||
"name": "Header Auth account 2"
|
||||
}
|
||||
},
|
||||
"typeVersion": 4.2,
|
||||
"alwaysOutputData": false
|
||||
},
|
||||
{
|
||||
"id": "d04c422a-c000-4e48-82d0-0bf44bcd9fff",
|
||||
"name": "Send summarized content to messenger",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"position": [
|
||||
1100,
|
||||
260
|
||||
],
|
||||
"parameters": {
|
||||
"url": "https://api.line.me/v2/bot/message/push",
|
||||
"method": "POST",
|
||||
"options": {},
|
||||
"jsonBody": "={\n \"to\": \"U3ec262c49811f30cdc2d2f2b0a0df99a\",\n \"messages\": [\n {\n \"type\": \"text\",\n \"text\": \"{{ $json.choices[0].message.content.replace(/\\n/g, \"\\\\n\") }}\"\n }\n ]\n}\n\n\n ",
|
||||
"sendBody": true,
|
||||
"specifyBody": "json",
|
||||
"authentication": "genericCredentialType",
|
||||
"genericAuthType": "httpHeaderAuth"
|
||||
},
|
||||
"credentials": {
|
||||
"httpHeaderAuth": {
|
||||
"id": "SzcKjO9Nn9vZPL2H",
|
||||
"name": "Header Auth account 5"
|
||||
}
|
||||
},
|
||||
"typeVersion": 4.2
|
||||
},
|
||||
{
|
||||
"id": "57a1219c-4f40-407c-855b-86c4c7c468bb",
|
||||
"name": "Sticky Note",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
180,
|
||||
0
|
||||
],
|
||||
"parameters": {
|
||||
"width": 361,
|
||||
"height": 90,
|
||||
"content": "## Summarize emails with A.I.\nYou can find out more about the [use case](https://rumjahn.com/how-a-i-saved-my-kids-school-life-and-my-marriage/)"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "17686264-56ac-419e-a32b-dc5c75f15f1f",
|
||||
"name": "Sticky Note1",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
283,
|
||||
141
|
||||
],
|
||||
"parameters": {
|
||||
"color": 5,
|
||||
"width": 229,
|
||||
"height": 280,
|
||||
"content": "Find your email server's IMAP Settings. \n- Link for [gmail](https://www.getmailspring.com/setup/access-gmail-via-imap-smtp)"
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "1862abd6-7dca-4c66-90d6-110d4fcf4d99",
|
||||
"name": "Sticky Note2",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
580,
|
||||
0
|
||||
],
|
||||
"parameters": {
|
||||
"color": 6,
|
||||
"width": 365,
|
||||
"height": 442,
|
||||
"content": "For the A.I. you can use Openrouter.ai. \n- Set up a free account\n- The A.I. model selected is FREE to use.\n## Credentials\n- Use header auth\n- Username: Authorization\n- Password: Bearer {insert your API key}.\n- The password is \"Bearer\" space plus your API key."
|
||||
},
|
||||
"typeVersion": 1
|
||||
},
|
||||
{
|
||||
"id": "c4a3a76f-539d-4bbf-8f95-d7aaebf39a55",
|
||||
"name": "Sticky Note3",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"parameters": {
|
||||
"color": 4,
|
||||
"width": 307,
|
||||
"height": 439,
|
||||
"content": "Don't use the official Line node. It's outdated.\n## Credentials\n- Use header auth\n- Username: Authorization\n- Password: Bearer {channel access token}\n\nYou can find your channel access token at the [Line API console](https://developers.line.biz/console/). Go to Messaging API and scroll to the bottom."
|
||||
},
|
||||
"typeVersion": 1
|
||||
}
|
||||
],
|
||||
"active": false,
|
||||
"pinData": {},
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "81216e6a-2bd8-4215-8a96-376ee520469d",
|
||||
"connections": {
|
||||
"Read emails (IMAP)": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Send email to A.I. to summarize",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Send email to A.I. to summarize": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Send summarized content to messenger",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user