feat: add folder support for workflows (fixes #70)
This commit is contained in:
23
package.json
23
package.json
@@ -10,20 +10,25 @@
|
||||
"index": "node src/index-workflows.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"cors": "^2.8.5",
|
||||
"sqlite3": "^5.1.6",
|
||||
"compression": "^1.7.4",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"helmet": "^7.1.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"chokidar": "^3.5.3",
|
||||
"commander": "^11.1.0"
|
||||
"commander": "^11.1.0",
|
||||
"compression": "^1.8.1",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.21.2",
|
||||
"express-rate-limit": "^7.5.1",
|
||||
"fs-extra": "^11.3.0",
|
||||
"helmet": "^7.2.0",
|
||||
"sqlite3": "^5.1.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
},
|
||||
"keywords": ["n8n", "workflows", "documentation", "automation"],
|
||||
"keywords": [
|
||||
"n8n",
|
||||
"workflows",
|
||||
"documentation",
|
||||
"automation"
|
||||
],
|
||||
"author": "",
|
||||
"license": "MIT"
|
||||
}
|
||||
379
src/database.js
379
src/database.js
@@ -1,12 +1,25 @@
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const crypto = require('crypto');
|
||||
const sqlite3 = require("sqlite3").verbose();
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const crypto = require("crypto");
|
||||
|
||||
async function getAllJsonFiles(dir) {
|
||||
let results = [];
|
||||
const items = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const item of items) {
|
||||
const full = path.join(dir, item.name);
|
||||
if (item.isDirectory()) {
|
||||
results = results.concat(await getAllJsonFiles(full));
|
||||
} else if (item.isFile() && full.endsWith(".json")) {
|
||||
results.push(full);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
class WorkflowDatabase {
|
||||
constructor(dbPath = 'database/workflows.db') {
|
||||
constructor(dbPath = "database/workflows.db") {
|
||||
this.dbPath = dbPath;
|
||||
this.workflowsDir = 'workflows';
|
||||
this.workflowsDir = "workflows";
|
||||
this.db = null;
|
||||
this.initialized = false;
|
||||
}
|
||||
@@ -30,10 +43,10 @@ class WorkflowDatabase {
|
||||
}
|
||||
|
||||
// Enable WAL mode for better performance
|
||||
this.db.run('PRAGMA journal_mode=WAL');
|
||||
this.db.run('PRAGMA synchronous=NORMAL');
|
||||
this.db.run('PRAGMA cache_size=10000');
|
||||
this.db.run('PRAGMA temp_store=MEMORY');
|
||||
this.db.run("PRAGMA journal_mode=WAL");
|
||||
this.db.run("PRAGMA synchronous=NORMAL");
|
||||
this.db.run("PRAGMA cache_size=10000");
|
||||
this.db.run("PRAGMA temp_store=MEMORY");
|
||||
|
||||
this.createTables().then(resolve).catch(reject);
|
||||
});
|
||||
@@ -49,6 +62,7 @@ class WorkflowDatabase {
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
filename TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
folder TEXT DEFAULT '',
|
||||
workflow_id TEXT,
|
||||
active BOOLEAN DEFAULT 0,
|
||||
description TEXT,
|
||||
@@ -74,11 +88,11 @@ class WorkflowDatabase {
|
||||
)`,
|
||||
|
||||
// Indexes for performance
|
||||
'CREATE INDEX IF NOT EXISTS idx_trigger_type ON workflows(trigger_type)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_complexity ON workflows(complexity)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_active ON workflows(active)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_node_count ON workflows(node_count)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_filename ON workflows(filename)',
|
||||
"CREATE INDEX IF NOT EXISTS idx_trigger_type ON workflows(trigger_type)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_complexity ON workflows(complexity)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_active ON workflows(active)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_node_count ON workflows(node_count)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_filename ON workflows(filename)",
|
||||
|
||||
// Triggers to sync FTS table (simplified)
|
||||
`CREATE TRIGGER IF NOT EXISTS workflows_ai AFTER INSERT ON workflows BEGIN
|
||||
@@ -94,7 +108,7 @@ class WorkflowDatabase {
|
||||
DELETE FROM workflows_fts WHERE filename = old.filename;
|
||||
INSERT INTO workflows_fts(filename, name, description, integrations, tags)
|
||||
VALUES (new.filename, new.name, new.description, new.integrations, new.tags);
|
||||
END`
|
||||
END`,
|
||||
];
|
||||
|
||||
// Run queries sequentially to avoid race conditions
|
||||
@@ -121,33 +135,37 @@ class WorkflowDatabase {
|
||||
|
||||
getFileHash(filePath) {
|
||||
const buffer = fs.readFileSync(filePath);
|
||||
return crypto.createHash('md5').update(buffer).digest('hex');
|
||||
return crypto.createHash("md5").update(buffer).digest("hex");
|
||||
}
|
||||
|
||||
formatWorkflowName(filename) {
|
||||
// Remove .json extension and split by underscores
|
||||
const name = filename.replace('.json', '');
|
||||
const parts = name.split('_');
|
||||
const name = filename.replace(".json", "");
|
||||
const parts = name.split("_");
|
||||
|
||||
// Skip first part if it's just a number
|
||||
const startIndex = parts[0] && /^\d+$/.test(parts[0]) ? 1 : 0;
|
||||
const cleanParts = parts.slice(startIndex);
|
||||
|
||||
return cleanParts.map(part => {
|
||||
const lower = part.toLowerCase();
|
||||
const specialTerms = {
|
||||
'http': 'HTTP',
|
||||
'api': 'API',
|
||||
'webhook': 'Webhook',
|
||||
'automation': 'Automation',
|
||||
'automate': 'Automate',
|
||||
'scheduled': 'Scheduled',
|
||||
'triggered': 'Triggered',
|
||||
'manual': 'Manual'
|
||||
};
|
||||
return cleanParts
|
||||
.map((part) => {
|
||||
const lower = part.toLowerCase();
|
||||
const specialTerms = {
|
||||
http: "HTTP",
|
||||
api: "API",
|
||||
webhook: "Webhook",
|
||||
automation: "Automation",
|
||||
automate: "Automate",
|
||||
scheduled: "Scheduled",
|
||||
triggered: "Triggered",
|
||||
manual: "Manual",
|
||||
};
|
||||
|
||||
return specialTerms[lower] || part.charAt(0).toUpperCase() + part.slice(1);
|
||||
}).join(' ');
|
||||
return (
|
||||
specialTerms[lower] || part.charAt(0).toUpperCase() + part.slice(1)
|
||||
);
|
||||
})
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
analyzeWorkflow(filePath) {
|
||||
@@ -157,23 +175,32 @@ class WorkflowDatabase {
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const fileHash = this.getFileHash(filePath);
|
||||
|
||||
const rel = path.relative(this.workflowsDir, filePath);
|
||||
const parts = rel.split(path.sep);
|
||||
const folder = parts.length > 1 ? parts[0] : "";
|
||||
|
||||
const workflow = {
|
||||
filename,
|
||||
name: this.formatWorkflowName(filename),
|
||||
workflow_id: data.id || '',
|
||||
folder,
|
||||
workflow_id: data.id || "",
|
||||
active: data.active || false,
|
||||
nodes: data.nodes || [],
|
||||
connections: data.connections || {},
|
||||
tags: data.tags || [],
|
||||
created_at: data.createdAt || '',
|
||||
updated_at: data.updatedAt || '',
|
||||
created_at: data.createdAt || "",
|
||||
updated_at: data.updatedAt || "",
|
||||
file_hash: fileHash,
|
||||
file_size: fileSize
|
||||
file_size: fileSize,
|
||||
};
|
||||
|
||||
// Use meaningful JSON name if available
|
||||
const jsonName = data.name?.trim();
|
||||
if (jsonName && jsonName !== filename.replace('.json', '') && !jsonName.startsWith('My workflow')) {
|
||||
if (
|
||||
jsonName &&
|
||||
jsonName !== filename.replace(".json", "") &&
|
||||
!jsonName.startsWith("My workflow")
|
||||
) {
|
||||
workflow.name = jsonName;
|
||||
}
|
||||
|
||||
@@ -183,11 +210,11 @@ class WorkflowDatabase {
|
||||
|
||||
// Determine complexity
|
||||
if (nodeCount <= 5) {
|
||||
workflow.complexity = 'low';
|
||||
workflow.complexity = "low";
|
||||
} else if (nodeCount <= 15) {
|
||||
workflow.complexity = 'medium';
|
||||
workflow.complexity = "medium";
|
||||
} else {
|
||||
workflow.complexity = 'high';
|
||||
workflow.complexity = "high";
|
||||
}
|
||||
|
||||
// Analyze trigger type and integrations
|
||||
@@ -196,40 +223,48 @@ class WorkflowDatabase {
|
||||
workflow.integrations = Array.from(integrations);
|
||||
|
||||
// Generate description
|
||||
workflow.description = this.generateDescription(workflow, triggerType, integrations);
|
||||
workflow.description = this.generateDescription(
|
||||
workflow,
|
||||
triggerType,
|
||||
integrations
|
||||
);
|
||||
|
||||
return workflow;
|
||||
} catch (error) {
|
||||
console.error(`Error analyzing workflow ${filePath}:`, error.message);
|
||||
console.error(
|
||||
`Error analyzing workflow file "${filePath}": ${error.message}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
analyzeNodes(nodes) {
|
||||
const integrations = new Set();
|
||||
let triggerType = 'Manual';
|
||||
let triggerType = "Manual";
|
||||
|
||||
nodes.forEach(node => {
|
||||
const nodeType = node.type || '';
|
||||
nodes.forEach((node) => {
|
||||
const nodeType = node.type || "";
|
||||
|
||||
// Extract integration name from node type
|
||||
if (nodeType.includes('.')) {
|
||||
const parts = nodeType.split('.');
|
||||
if (nodeType.includes(".")) {
|
||||
const parts = nodeType.split(".");
|
||||
if (parts.length >= 2) {
|
||||
const integration = parts[1];
|
||||
if (integration !== 'core' && integration !== 'base') {
|
||||
integrations.add(integration.charAt(0).toUpperCase() + integration.slice(1));
|
||||
if (integration !== "core" && integration !== "base") {
|
||||
integrations.add(
|
||||
integration.charAt(0).toUpperCase() + integration.slice(1)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine trigger type based on node types
|
||||
if (nodeType.includes('webhook')) {
|
||||
triggerType = 'Webhook';
|
||||
} else if (nodeType.includes('cron') || nodeType.includes('schedule')) {
|
||||
triggerType = 'Scheduled';
|
||||
} else if (nodeType.includes('trigger')) {
|
||||
triggerType = 'Triggered';
|
||||
if (nodeType.includes("webhook")) {
|
||||
triggerType = "Webhook";
|
||||
} else if (nodeType.includes("cron") || nodeType.includes("schedule")) {
|
||||
triggerType = "Scheduled";
|
||||
} else if (nodeType.includes("trigger")) {
|
||||
triggerType = "Triggered";
|
||||
}
|
||||
});
|
||||
|
||||
@@ -240,10 +275,10 @@ class WorkflowDatabase {
|
||||
const parts = [];
|
||||
|
||||
// Add trigger info
|
||||
if (triggerType !== 'Manual') {
|
||||
if (triggerType !== "Manual") {
|
||||
parts.push(`${triggerType} workflow`);
|
||||
} else {
|
||||
parts.push('Manual workflow');
|
||||
parts.push("Manual workflow");
|
||||
}
|
||||
|
||||
// Add integration info
|
||||
@@ -252,13 +287,15 @@ class WorkflowDatabase {
|
||||
if (integrations.size > 3) {
|
||||
integrationList.push(`+${integrations.size - 3} more`);
|
||||
}
|
||||
parts.push(`integrating ${integrationList.join(', ')}`);
|
||||
parts.push(`integrating ${integrationList.join(", ")}`);
|
||||
}
|
||||
|
||||
// Add complexity info
|
||||
parts.push(`with ${workflow.node_count} nodes (${workflow.complexity} complexity)`);
|
||||
parts.push(
|
||||
`with ${workflow.node_count} nodes (${workflow.complexity} complexity)`
|
||||
);
|
||||
|
||||
return parts.join(' ');
|
||||
return parts.join(" ");
|
||||
}
|
||||
|
||||
async indexWorkflows(forceReindex = false) {
|
||||
@@ -266,15 +303,13 @@ class WorkflowDatabase {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
const workflowFiles = await fs.readdir(this.workflowsDir);
|
||||
const jsonFiles = workflowFiles.filter(file => file.endsWith('.json'));
|
||||
const jsonFiles = await getAllJsonFiles(this.workflowsDir);
|
||||
|
||||
let processed = 0;
|
||||
let skipped = 0;
|
||||
let errors = 0;
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
const filePath = path.join(this.workflowsDir, file);
|
||||
for (const filePath of jsonFiles) {
|
||||
const workflow = this.analyzeWorkflow(filePath);
|
||||
|
||||
if (!workflow) {
|
||||
@@ -283,9 +318,12 @@ class WorkflowDatabase {
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if workflow exists and if hash changed
|
||||
const existing = await this.getWorkflowByFilename(file);
|
||||
if (!forceReindex && existing && existing.file_hash === workflow.file_hash) {
|
||||
const existing = await this.getWorkflowByFilename(workflow.filename);
|
||||
if (
|
||||
!forceReindex &&
|
||||
existing &&
|
||||
existing.file_hash === workflow.file_hash
|
||||
) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
@@ -293,7 +331,9 @@ class WorkflowDatabase {
|
||||
await this.upsertWorkflow(workflow);
|
||||
processed++;
|
||||
} catch (error) {
|
||||
console.error(`Error indexing workflow ${file}:`, error.message);
|
||||
console.error(
|
||||
`Error indexing workflow ${workflow.filename}: ${error.message}`
|
||||
);
|
||||
errors++;
|
||||
}
|
||||
}
|
||||
@@ -304,7 +344,7 @@ class WorkflowDatabase {
|
||||
async getWorkflowByFilename(filename) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.db.get(
|
||||
'SELECT * FROM workflows WHERE filename = ?',
|
||||
"SELECT * FROM workflows WHERE filename = ?",
|
||||
[filename],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
@@ -318,15 +358,16 @@ class WorkflowDatabase {
|
||||
return new Promise((resolve, reject) => {
|
||||
const sql = `
|
||||
INSERT OR REPLACE INTO workflows (
|
||||
filename, name, workflow_id, active, description, trigger_type,
|
||||
filename, name, folder, workflow_id, active, description, trigger_type,
|
||||
complexity, node_count, integrations, tags, created_at, updated_at,
|
||||
file_hash, file_size, analyzed_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
`;
|
||||
|
||||
const params = [
|
||||
workflow.filename,
|
||||
workflow.name,
|
||||
workflow.folder,
|
||||
workflow.workflow_id,
|
||||
workflow.active,
|
||||
workflow.description,
|
||||
@@ -338,10 +379,10 @@ class WorkflowDatabase {
|
||||
workflow.created_at,
|
||||
workflow.updated_at,
|
||||
workflow.file_hash,
|
||||
workflow.file_size
|
||||
workflow.file_size,
|
||||
];
|
||||
|
||||
this.db.run(sql, params, function(err) {
|
||||
this.db.run(sql, params, function (err) {
|
||||
if (err) reject(err);
|
||||
else resolve(this.lastID);
|
||||
});
|
||||
@@ -351,10 +392,10 @@ class WorkflowDatabase {
|
||||
buildFTSQuery(query) {
|
||||
// Escape FTS5 special characters and build partial matching query
|
||||
let cleanQuery = query
|
||||
.replace(/[^\w\s"'-]/g, ' ') // Remove special chars except quotes, hyphens, apostrophes
|
||||
.replace(/[^\w\s"'-]/g, " ") // Remove special chars except quotes, hyphens, apostrophes
|
||||
.trim();
|
||||
|
||||
if (!cleanQuery) return '*';
|
||||
if (!cleanQuery) return "*";
|
||||
|
||||
// Handle quoted phrases
|
||||
const phrases = [];
|
||||
@@ -363,14 +404,14 @@ class WorkflowDatabase {
|
||||
|
||||
while ((match = quotedRegex.exec(cleanQuery)) !== null) {
|
||||
phrases.push(`"${match[1]}"`); // Keep exact phrases
|
||||
cleanQuery = cleanQuery.replace(match[0], ' ');
|
||||
cleanQuery = cleanQuery.replace(match[0], " ");
|
||||
}
|
||||
|
||||
// Split remaining terms and add wildcards for partial matching
|
||||
const terms = cleanQuery
|
||||
.split(/\s+/)
|
||||
.filter(term => term.length > 0)
|
||||
.map(term => {
|
||||
.filter((term) => term.length > 0)
|
||||
.map((term) => {
|
||||
// Add wildcard suffix for prefix matching
|
||||
if (term.length >= 2) {
|
||||
return `${term}*`;
|
||||
@@ -381,20 +422,26 @@ class WorkflowDatabase {
|
||||
// Combine phrases and wildcard terms
|
||||
const allTerms = [...phrases, ...terms];
|
||||
|
||||
if (allTerms.length === 0) return '*';
|
||||
if (allTerms.length === 0) return "*";
|
||||
|
||||
// Join with AND for more precise results
|
||||
return allTerms.join(' AND ');
|
||||
return allTerms.join(" AND ");
|
||||
}
|
||||
|
||||
async searchWorkflows(query = '', triggerFilter = 'all', complexityFilter = 'all',
|
||||
activeOnly = false, limit = 50, offset = 0) {
|
||||
async searchWorkflows(
|
||||
query = "",
|
||||
triggerFilter = "all",
|
||||
complexityFilter = "all",
|
||||
activeOnly = false,
|
||||
limit = 50,
|
||||
offset = 0
|
||||
) {
|
||||
if (!this.initialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let sql = '';
|
||||
let sql = "";
|
||||
let params = [];
|
||||
|
||||
if (query.trim()) {
|
||||
@@ -408,22 +455,22 @@ class WorkflowDatabase {
|
||||
params.push(ftsQuery);
|
||||
} else {
|
||||
// Regular search
|
||||
sql = 'SELECT * FROM workflows WHERE 1=1';
|
||||
sql = "SELECT * FROM workflows WHERE 1=1";
|
||||
}
|
||||
|
||||
// Add filters
|
||||
if (triggerFilter !== 'all') {
|
||||
sql += ' AND trigger_type = ?';
|
||||
if (triggerFilter !== "all") {
|
||||
sql += " AND trigger_type = ?";
|
||||
params.push(triggerFilter);
|
||||
}
|
||||
|
||||
if (complexityFilter !== 'all') {
|
||||
sql += ' AND complexity = ?';
|
||||
if (complexityFilter !== "all") {
|
||||
sql += " AND complexity = ?";
|
||||
params.push(complexityFilter);
|
||||
}
|
||||
|
||||
if (activeOnly) {
|
||||
sql += ' AND active = 1';
|
||||
sql += " AND active = 1";
|
||||
}
|
||||
|
||||
// Count total - rebuild query for FTS compatibility
|
||||
@@ -440,18 +487,18 @@ class WorkflowDatabase {
|
||||
countParams = [this.buildFTSQuery(query.trim())];
|
||||
|
||||
// Add filters to count query
|
||||
if (triggerFilter !== 'all') {
|
||||
countSql += ' AND trigger_type = ?';
|
||||
if (triggerFilter !== "all") {
|
||||
countSql += " AND trigger_type = ?";
|
||||
countParams.push(triggerFilter);
|
||||
}
|
||||
|
||||
if (complexityFilter !== 'all') {
|
||||
countSql += ' AND complexity = ?';
|
||||
if (complexityFilter !== "all") {
|
||||
countSql += " AND complexity = ?";
|
||||
countParams.push(complexityFilter);
|
||||
}
|
||||
|
||||
if (activeOnly) {
|
||||
countSql += ' AND active = 1';
|
||||
countSql += " AND active = 1";
|
||||
}
|
||||
} else {
|
||||
countSql = `SELECT COUNT(*) as total FROM (${sql})`;
|
||||
@@ -467,7 +514,7 @@ class WorkflowDatabase {
|
||||
const total = countResult.total;
|
||||
|
||||
// Add pagination
|
||||
sql += ' ORDER BY name LIMIT ? OFFSET ?';
|
||||
sql += " ORDER BY name LIMIT ? OFFSET ?";
|
||||
params.push(limit, offset);
|
||||
|
||||
this.db.all(sql, params, (err, rows) => {
|
||||
@@ -477,10 +524,10 @@ class WorkflowDatabase {
|
||||
}
|
||||
|
||||
// Parse JSON fields
|
||||
const workflows = rows.map(row => ({
|
||||
const workflows = rows.map((row) => ({
|
||||
...row,
|
||||
integrations: JSON.parse(row.integrations || '[]'),
|
||||
tags: JSON.parse(row.tags || '[]')
|
||||
integrations: JSON.parse(row.integrations || "[]"),
|
||||
tags: JSON.parse(row.tags || "[]"),
|
||||
}));
|
||||
|
||||
resolve({ workflows, total });
|
||||
@@ -496,71 +543,86 @@ class WorkflowDatabase {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const queries = [
|
||||
'SELECT COUNT(*) as total FROM workflows',
|
||||
'SELECT COUNT(*) as active FROM workflows WHERE active = 1',
|
||||
'SELECT COUNT(*) as inactive FROM workflows WHERE active = 0',
|
||||
'SELECT trigger_type, COUNT(*) as count FROM workflows GROUP BY trigger_type',
|
||||
'SELECT complexity, COUNT(*) as count FROM workflows GROUP BY complexity',
|
||||
'SELECT SUM(node_count) as total_nodes FROM workflows',
|
||||
'SELECT analyzed_at FROM workflows ORDER BY analyzed_at DESC LIMIT 1'
|
||||
"SELECT COUNT(*) as total FROM workflows",
|
||||
"SELECT COUNT(*) as active FROM workflows WHERE active = 1",
|
||||
"SELECT COUNT(*) as inactive FROM workflows WHERE active = 0",
|
||||
"SELECT trigger_type, COUNT(*) as count FROM workflows GROUP BY trigger_type",
|
||||
"SELECT complexity, COUNT(*) as count FROM workflows GROUP BY complexity",
|
||||
"SELECT SUM(node_count) as total_nodes FROM workflows",
|
||||
"SELECT analyzed_at FROM workflows ORDER BY analyzed_at DESC LIMIT 1",
|
||||
];
|
||||
|
||||
Promise.all(queries.map(sql =>
|
||||
new Promise((resolve, reject) => {
|
||||
this.db.all(sql, (err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
Promise.all(
|
||||
queries.map(
|
||||
(sql) =>
|
||||
new Promise((resolve, reject) => {
|
||||
this.db.all(sql, (err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
});
|
||||
})
|
||||
)
|
||||
)
|
||||
.then((results) => {
|
||||
const [
|
||||
total,
|
||||
active,
|
||||
inactive,
|
||||
triggers,
|
||||
complexity,
|
||||
nodes,
|
||||
lastIndexed,
|
||||
] = results;
|
||||
|
||||
const triggersMap = {};
|
||||
triggers.forEach((row) => {
|
||||
triggersMap[row.trigger_type] = row.count;
|
||||
});
|
||||
|
||||
const complexityMap = {};
|
||||
complexity.forEach((row) => {
|
||||
complexityMap[row.complexity] = row.count;
|
||||
});
|
||||
|
||||
// Count unique integrations
|
||||
this.db.all("SELECT integrations FROM workflows", (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const allIntegrations = new Set();
|
||||
rows.forEach((row) => {
|
||||
try {
|
||||
const integrations = JSON.parse(row.integrations || "[]");
|
||||
integrations.forEach((integration) =>
|
||||
allIntegrations.add(integration)
|
||||
);
|
||||
} catch (e) {
|
||||
// Ignore parse errors
|
||||
}
|
||||
});
|
||||
|
||||
resolve({
|
||||
total: total[0].total,
|
||||
active: active[0].active,
|
||||
inactive: inactive[0].inactive,
|
||||
triggers: triggersMap,
|
||||
complexity: complexityMap,
|
||||
total_nodes: nodes[0].total_nodes || 0,
|
||||
unique_integrations: allIntegrations.size,
|
||||
last_indexed: lastIndexed[0]?.analyzed_at || "",
|
||||
});
|
||||
});
|
||||
})
|
||||
)).then(results => {
|
||||
const [total, active, inactive, triggers, complexity, nodes, lastIndexed] = results;
|
||||
|
||||
const triggersMap = {};
|
||||
triggers.forEach(row => {
|
||||
triggersMap[row.trigger_type] = row.count;
|
||||
});
|
||||
|
||||
const complexityMap = {};
|
||||
complexity.forEach(row => {
|
||||
complexityMap[row.complexity] = row.count;
|
||||
});
|
||||
|
||||
// Count unique integrations
|
||||
this.db.all('SELECT integrations FROM workflows', (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const allIntegrations = new Set();
|
||||
rows.forEach(row => {
|
||||
try {
|
||||
const integrations = JSON.parse(row.integrations || '[]');
|
||||
integrations.forEach(integration => allIntegrations.add(integration));
|
||||
} catch (e) {
|
||||
// Ignore parse errors
|
||||
}
|
||||
});
|
||||
|
||||
resolve({
|
||||
total: total[0].total,
|
||||
active: active[0].active,
|
||||
inactive: inactive[0].inactive,
|
||||
triggers: triggersMap,
|
||||
complexity: complexityMap,
|
||||
total_nodes: nodes[0].total_nodes || 0,
|
||||
unique_integrations: allIntegrations.size,
|
||||
last_indexed: lastIndexed[0]?.analyzed_at || ''
|
||||
});
|
||||
});
|
||||
}).catch(reject);
|
||||
.catch(reject);
|
||||
});
|
||||
}
|
||||
|
||||
async getWorkflowDetail(filename) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.db.get(
|
||||
'SELECT * FROM workflows WHERE filename = ?',
|
||||
"SELECT * FROM workflows WHERE filename = ?",
|
||||
[filename],
|
||||
(err, row) => {
|
||||
if (err) {
|
||||
@@ -576,8 +638,8 @@ class WorkflowDatabase {
|
||||
// Parse JSON fields and load raw workflow data
|
||||
const workflow = {
|
||||
...row,
|
||||
integrations: JSON.parse(row.integrations || '[]'),
|
||||
tags: JSON.parse(row.tags || '[]')
|
||||
integrations: JSON.parse(row.integrations || "[]"),
|
||||
tags: JSON.parse(row.tags || "[]"),
|
||||
};
|
||||
|
||||
// Load raw workflow JSON
|
||||
@@ -586,7 +648,10 @@ class WorkflowDatabase {
|
||||
const rawWorkflow = fs.readJsonSync(workflowPath);
|
||||
workflow.raw_workflow = rawWorkflow;
|
||||
} catch (error) {
|
||||
console.error(`Error loading raw workflow ${filename}:`, error.message);
|
||||
console.error(
|
||||
`Error loading raw workflow ${filename}:`,
|
||||
error.message
|
||||
);
|
||||
}
|
||||
|
||||
resolve(workflow);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user