feat: add folder support for workflows (fixes #70)

This commit is contained in:
Praveen Mudalgeri
2025-08-05 09:40:47 +05:30
parent 307d530f9b
commit c4885eee92
2057 changed files with 985290 additions and 974268 deletions

View File

@@ -10,20 +10,25 @@
"index": "node src/index-workflows.js" "index": "node src/index-workflows.js"
}, },
"dependencies": { "dependencies": {
"express": "^4.18.2",
"cors": "^2.8.5",
"sqlite3": "^5.1.6",
"compression": "^1.7.4",
"express-rate-limit": "^7.1.5",
"helmet": "^7.1.0",
"fs-extra": "^11.2.0",
"chokidar": "^3.5.3", "chokidar": "^3.5.3",
"commander": "^11.1.0" "commander": "^11.1.0",
"compression": "^1.8.1",
"cors": "^2.8.5",
"express": "^4.21.2",
"express-rate-limit": "^7.5.1",
"fs-extra": "^11.3.0",
"helmet": "^7.2.0",
"sqlite3": "^5.1.7"
}, },
"devDependencies": { "devDependencies": {
"nodemon": "^3.0.2" "nodemon": "^3.0.2"
}, },
"keywords": ["n8n", "workflows", "documentation", "automation"], "keywords": [
"n8n",
"workflows",
"documentation",
"automation"
],
"author": "", "author": "",
"license": "MIT" "license": "MIT"
} }

View File

@@ -1,12 +1,25 @@
const sqlite3 = require('sqlite3').verbose(); const sqlite3 = require("sqlite3").verbose();
const path = require('path'); const path = require("path");
const fs = require('fs-extra'); const fs = require("fs-extra");
const crypto = require('crypto'); const crypto = require("crypto");
async function getAllJsonFiles(dir) {
let results = [];
const items = await fs.readdir(dir, { withFileTypes: true });
for (const item of items) {
const full = path.join(dir, item.name);
if (item.isDirectory()) {
results = results.concat(await getAllJsonFiles(full));
} else if (item.isFile() && full.endsWith(".json")) {
results.push(full);
}
}
return results;
}
class WorkflowDatabase { class WorkflowDatabase {
constructor(dbPath = 'database/workflows.db') { constructor(dbPath = "database/workflows.db") {
this.dbPath = dbPath; this.dbPath = dbPath;
this.workflowsDir = 'workflows'; this.workflowsDir = "workflows";
this.db = null; this.db = null;
this.initialized = false; this.initialized = false;
} }
@@ -30,10 +43,10 @@ class WorkflowDatabase {
} }
// Enable WAL mode for better performance // Enable WAL mode for better performance
this.db.run('PRAGMA journal_mode=WAL'); this.db.run("PRAGMA journal_mode=WAL");
this.db.run('PRAGMA synchronous=NORMAL'); this.db.run("PRAGMA synchronous=NORMAL");
this.db.run('PRAGMA cache_size=10000'); this.db.run("PRAGMA cache_size=10000");
this.db.run('PRAGMA temp_store=MEMORY'); this.db.run("PRAGMA temp_store=MEMORY");
this.createTables().then(resolve).catch(reject); this.createTables().then(resolve).catch(reject);
}); });
@@ -49,6 +62,7 @@ class WorkflowDatabase {
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
filename TEXT UNIQUE NOT NULL, filename TEXT UNIQUE NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
folder TEXT DEFAULT '',
workflow_id TEXT, workflow_id TEXT,
active BOOLEAN DEFAULT 0, active BOOLEAN DEFAULT 0,
description TEXT, description TEXT,
@@ -74,11 +88,11 @@ class WorkflowDatabase {
)`, )`,
// Indexes for performance // Indexes for performance
'CREATE INDEX IF NOT EXISTS idx_trigger_type ON workflows(trigger_type)', "CREATE INDEX IF NOT EXISTS idx_trigger_type ON workflows(trigger_type)",
'CREATE INDEX IF NOT EXISTS idx_complexity ON workflows(complexity)', "CREATE INDEX IF NOT EXISTS idx_complexity ON workflows(complexity)",
'CREATE INDEX IF NOT EXISTS idx_active ON workflows(active)', "CREATE INDEX IF NOT EXISTS idx_active ON workflows(active)",
'CREATE INDEX IF NOT EXISTS idx_node_count ON workflows(node_count)', "CREATE INDEX IF NOT EXISTS idx_node_count ON workflows(node_count)",
'CREATE INDEX IF NOT EXISTS idx_filename ON workflows(filename)', "CREATE INDEX IF NOT EXISTS idx_filename ON workflows(filename)",
// Triggers to sync FTS table (simplified) // Triggers to sync FTS table (simplified)
`CREATE TRIGGER IF NOT EXISTS workflows_ai AFTER INSERT ON workflows BEGIN `CREATE TRIGGER IF NOT EXISTS workflows_ai AFTER INSERT ON workflows BEGIN
@@ -94,7 +108,7 @@ class WorkflowDatabase {
DELETE FROM workflows_fts WHERE filename = old.filename; DELETE FROM workflows_fts WHERE filename = old.filename;
INSERT INTO workflows_fts(filename, name, description, integrations, tags) INSERT INTO workflows_fts(filename, name, description, integrations, tags)
VALUES (new.filename, new.name, new.description, new.integrations, new.tags); VALUES (new.filename, new.name, new.description, new.integrations, new.tags);
END` END`,
]; ];
// Run queries sequentially to avoid race conditions // Run queries sequentially to avoid race conditions
@@ -121,33 +135,37 @@ class WorkflowDatabase {
getFileHash(filePath) { getFileHash(filePath) {
const buffer = fs.readFileSync(filePath); const buffer = fs.readFileSync(filePath);
return crypto.createHash('md5').update(buffer).digest('hex'); return crypto.createHash("md5").update(buffer).digest("hex");
} }
formatWorkflowName(filename) { formatWorkflowName(filename) {
// Remove .json extension and split by underscores // Remove .json extension and split by underscores
const name = filename.replace('.json', ''); const name = filename.replace(".json", "");
const parts = name.split('_'); const parts = name.split("_");
// Skip first part if it's just a number // Skip first part if it's just a number
const startIndex = parts[0] && /^\d+$/.test(parts[0]) ? 1 : 0; const startIndex = parts[0] && /^\d+$/.test(parts[0]) ? 1 : 0;
const cleanParts = parts.slice(startIndex); const cleanParts = parts.slice(startIndex);
return cleanParts.map(part => { return cleanParts
.map((part) => {
const lower = part.toLowerCase(); const lower = part.toLowerCase();
const specialTerms = { const specialTerms = {
'http': 'HTTP', http: "HTTP",
'api': 'API', api: "API",
'webhook': 'Webhook', webhook: "Webhook",
'automation': 'Automation', automation: "Automation",
'automate': 'Automate', automate: "Automate",
'scheduled': 'Scheduled', scheduled: "Scheduled",
'triggered': 'Triggered', triggered: "Triggered",
'manual': 'Manual' manual: "Manual",
}; };
return specialTerms[lower] || part.charAt(0).toUpperCase() + part.slice(1); return (
}).join(' '); specialTerms[lower] || part.charAt(0).toUpperCase() + part.slice(1)
);
})
.join(" ");
} }
analyzeWorkflow(filePath) { analyzeWorkflow(filePath) {
@@ -157,23 +175,32 @@ class WorkflowDatabase {
const fileSize = fs.statSync(filePath).size; const fileSize = fs.statSync(filePath).size;
const fileHash = this.getFileHash(filePath); const fileHash = this.getFileHash(filePath);
const rel = path.relative(this.workflowsDir, filePath);
const parts = rel.split(path.sep);
const folder = parts.length > 1 ? parts[0] : "";
const workflow = { const workflow = {
filename, filename,
name: this.formatWorkflowName(filename), name: this.formatWorkflowName(filename),
workflow_id: data.id || '', folder,
workflow_id: data.id || "",
active: data.active || false, active: data.active || false,
nodes: data.nodes || [], nodes: data.nodes || [],
connections: data.connections || {}, connections: data.connections || {},
tags: data.tags || [], tags: data.tags || [],
created_at: data.createdAt || '', created_at: data.createdAt || "",
updated_at: data.updatedAt || '', updated_at: data.updatedAt || "",
file_hash: fileHash, file_hash: fileHash,
file_size: fileSize file_size: fileSize,
}; };
// Use meaningful JSON name if available // Use meaningful JSON name if available
const jsonName = data.name?.trim(); const jsonName = data.name?.trim();
if (jsonName && jsonName !== filename.replace('.json', '') && !jsonName.startsWith('My workflow')) { if (
jsonName &&
jsonName !== filename.replace(".json", "") &&
!jsonName.startsWith("My workflow")
) {
workflow.name = jsonName; workflow.name = jsonName;
} }
@@ -183,11 +210,11 @@ class WorkflowDatabase {
// Determine complexity // Determine complexity
if (nodeCount <= 5) { if (nodeCount <= 5) {
workflow.complexity = 'low'; workflow.complexity = "low";
} else if (nodeCount <= 15) { } else if (nodeCount <= 15) {
workflow.complexity = 'medium'; workflow.complexity = "medium";
} else { } else {
workflow.complexity = 'high'; workflow.complexity = "high";
} }
// Analyze trigger type and integrations // Analyze trigger type and integrations
@@ -196,40 +223,48 @@ class WorkflowDatabase {
workflow.integrations = Array.from(integrations); workflow.integrations = Array.from(integrations);
// Generate description // Generate description
workflow.description = this.generateDescription(workflow, triggerType, integrations); workflow.description = this.generateDescription(
workflow,
triggerType,
integrations
);
return workflow; return workflow;
} catch (error) { } catch (error) {
console.error(`Error analyzing workflow ${filePath}:`, error.message); console.error(
`Error analyzing workflow file "${filePath}": ${error.message}`
);
return null; return null;
} }
} }
analyzeNodes(nodes) { analyzeNodes(nodes) {
const integrations = new Set(); const integrations = new Set();
let triggerType = 'Manual'; let triggerType = "Manual";
nodes.forEach(node => { nodes.forEach((node) => {
const nodeType = node.type || ''; const nodeType = node.type || "";
// Extract integration name from node type // Extract integration name from node type
if (nodeType.includes('.')) { if (nodeType.includes(".")) {
const parts = nodeType.split('.'); const parts = nodeType.split(".");
if (parts.length >= 2) { if (parts.length >= 2) {
const integration = parts[1]; const integration = parts[1];
if (integration !== 'core' && integration !== 'base') { if (integration !== "core" && integration !== "base") {
integrations.add(integration.charAt(0).toUpperCase() + integration.slice(1)); integrations.add(
integration.charAt(0).toUpperCase() + integration.slice(1)
);
} }
} }
} }
// Determine trigger type based on node types // Determine trigger type based on node types
if (nodeType.includes('webhook')) { if (nodeType.includes("webhook")) {
triggerType = 'Webhook'; triggerType = "Webhook";
} else if (nodeType.includes('cron') || nodeType.includes('schedule')) { } else if (nodeType.includes("cron") || nodeType.includes("schedule")) {
triggerType = 'Scheduled'; triggerType = "Scheduled";
} else if (nodeType.includes('trigger')) { } else if (nodeType.includes("trigger")) {
triggerType = 'Triggered'; triggerType = "Triggered";
} }
}); });
@@ -240,10 +275,10 @@ class WorkflowDatabase {
const parts = []; const parts = [];
// Add trigger info // Add trigger info
if (triggerType !== 'Manual') { if (triggerType !== "Manual") {
parts.push(`${triggerType} workflow`); parts.push(`${triggerType} workflow`);
} else { } else {
parts.push('Manual workflow'); parts.push("Manual workflow");
} }
// Add integration info // Add integration info
@@ -252,13 +287,15 @@ class WorkflowDatabase {
if (integrations.size > 3) { if (integrations.size > 3) {
integrationList.push(`+${integrations.size - 3} more`); integrationList.push(`+${integrations.size - 3} more`);
} }
parts.push(`integrating ${integrationList.join(', ')}`); parts.push(`integrating ${integrationList.join(", ")}`);
} }
// Add complexity info // Add complexity info
parts.push(`with ${workflow.node_count} nodes (${workflow.complexity} complexity)`); parts.push(
`with ${workflow.node_count} nodes (${workflow.complexity} complexity)`
);
return parts.join(' '); return parts.join(" ");
} }
async indexWorkflows(forceReindex = false) { async indexWorkflows(forceReindex = false) {
@@ -266,15 +303,13 @@ class WorkflowDatabase {
await this.initialize(); await this.initialize();
} }
const workflowFiles = await fs.readdir(this.workflowsDir); const jsonFiles = await getAllJsonFiles(this.workflowsDir);
const jsonFiles = workflowFiles.filter(file => file.endsWith('.json'));
let processed = 0; let processed = 0;
let skipped = 0; let skipped = 0;
let errors = 0; let errors = 0;
for (const file of jsonFiles) { for (const filePath of jsonFiles) {
const filePath = path.join(this.workflowsDir, file);
const workflow = this.analyzeWorkflow(filePath); const workflow = this.analyzeWorkflow(filePath);
if (!workflow) { if (!workflow) {
@@ -283,9 +318,12 @@ class WorkflowDatabase {
} }
try { try {
// Check if workflow exists and if hash changed const existing = await this.getWorkflowByFilename(workflow.filename);
const existing = await this.getWorkflowByFilename(file); if (
if (!forceReindex && existing && existing.file_hash === workflow.file_hash) { !forceReindex &&
existing &&
existing.file_hash === workflow.file_hash
) {
skipped++; skipped++;
continue; continue;
} }
@@ -293,7 +331,9 @@ class WorkflowDatabase {
await this.upsertWorkflow(workflow); await this.upsertWorkflow(workflow);
processed++; processed++;
} catch (error) { } catch (error) {
console.error(`Error indexing workflow ${file}:`, error.message); console.error(
`Error indexing workflow ${workflow.filename}: ${error.message}`
);
errors++; errors++;
} }
} }
@@ -304,7 +344,7 @@ class WorkflowDatabase {
async getWorkflowByFilename(filename) { async getWorkflowByFilename(filename) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.db.get( this.db.get(
'SELECT * FROM workflows WHERE filename = ?', "SELECT * FROM workflows WHERE filename = ?",
[filename], [filename],
(err, row) => { (err, row) => {
if (err) reject(err); if (err) reject(err);
@@ -318,15 +358,16 @@ class WorkflowDatabase {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const sql = ` const sql = `
INSERT OR REPLACE INTO workflows ( INSERT OR REPLACE INTO workflows (
filename, name, workflow_id, active, description, trigger_type, filename, name, folder, workflow_id, active, description, trigger_type,
complexity, node_count, integrations, tags, created_at, updated_at, complexity, node_count, integrations, tags, created_at, updated_at,
file_hash, file_size, analyzed_at file_hash, file_size, analyzed_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
`; `;
const params = [ const params = [
workflow.filename, workflow.filename,
workflow.name, workflow.name,
workflow.folder,
workflow.workflow_id, workflow.workflow_id,
workflow.active, workflow.active,
workflow.description, workflow.description,
@@ -338,7 +379,7 @@ class WorkflowDatabase {
workflow.created_at, workflow.created_at,
workflow.updated_at, workflow.updated_at,
workflow.file_hash, workflow.file_hash,
workflow.file_size workflow.file_size,
]; ];
this.db.run(sql, params, function (err) { this.db.run(sql, params, function (err) {
@@ -351,10 +392,10 @@ class WorkflowDatabase {
buildFTSQuery(query) { buildFTSQuery(query) {
// Escape FTS5 special characters and build partial matching query // Escape FTS5 special characters and build partial matching query
let cleanQuery = query let cleanQuery = query
.replace(/[^\w\s"'-]/g, ' ') // Remove special chars except quotes, hyphens, apostrophes .replace(/[^\w\s"'-]/g, " ") // Remove special chars except quotes, hyphens, apostrophes
.trim(); .trim();
if (!cleanQuery) return '*'; if (!cleanQuery) return "*";
// Handle quoted phrases // Handle quoted phrases
const phrases = []; const phrases = [];
@@ -363,14 +404,14 @@ class WorkflowDatabase {
while ((match = quotedRegex.exec(cleanQuery)) !== null) { while ((match = quotedRegex.exec(cleanQuery)) !== null) {
phrases.push(`"${match[1]}"`); // Keep exact phrases phrases.push(`"${match[1]}"`); // Keep exact phrases
cleanQuery = cleanQuery.replace(match[0], ' '); cleanQuery = cleanQuery.replace(match[0], " ");
} }
// Split remaining terms and add wildcards for partial matching // Split remaining terms and add wildcards for partial matching
const terms = cleanQuery const terms = cleanQuery
.split(/\s+/) .split(/\s+/)
.filter(term => term.length > 0) .filter((term) => term.length > 0)
.map(term => { .map((term) => {
// Add wildcard suffix for prefix matching // Add wildcard suffix for prefix matching
if (term.length >= 2) { if (term.length >= 2) {
return `${term}*`; return `${term}*`;
@@ -381,20 +422,26 @@ class WorkflowDatabase {
// Combine phrases and wildcard terms // Combine phrases and wildcard terms
const allTerms = [...phrases, ...terms]; const allTerms = [...phrases, ...terms];
if (allTerms.length === 0) return '*'; if (allTerms.length === 0) return "*";
// Join with AND for more precise results // Join with AND for more precise results
return allTerms.join(' AND '); return allTerms.join(" AND ");
} }
async searchWorkflows(query = '', triggerFilter = 'all', complexityFilter = 'all', async searchWorkflows(
activeOnly = false, limit = 50, offset = 0) { query = "",
triggerFilter = "all",
complexityFilter = "all",
activeOnly = false,
limit = 50,
offset = 0
) {
if (!this.initialized) { if (!this.initialized) {
await this.initialize(); await this.initialize();
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let sql = ''; let sql = "";
let params = []; let params = [];
if (query.trim()) { if (query.trim()) {
@@ -408,22 +455,22 @@ class WorkflowDatabase {
params.push(ftsQuery); params.push(ftsQuery);
} else { } else {
// Regular search // Regular search
sql = 'SELECT * FROM workflows WHERE 1=1'; sql = "SELECT * FROM workflows WHERE 1=1";
} }
// Add filters // Add filters
if (triggerFilter !== 'all') { if (triggerFilter !== "all") {
sql += ' AND trigger_type = ?'; sql += " AND trigger_type = ?";
params.push(triggerFilter); params.push(triggerFilter);
} }
if (complexityFilter !== 'all') { if (complexityFilter !== "all") {
sql += ' AND complexity = ?'; sql += " AND complexity = ?";
params.push(complexityFilter); params.push(complexityFilter);
} }
if (activeOnly) { if (activeOnly) {
sql += ' AND active = 1'; sql += " AND active = 1";
} }
// Count total - rebuild query for FTS compatibility // Count total - rebuild query for FTS compatibility
@@ -440,18 +487,18 @@ class WorkflowDatabase {
countParams = [this.buildFTSQuery(query.trim())]; countParams = [this.buildFTSQuery(query.trim())];
// Add filters to count query // Add filters to count query
if (triggerFilter !== 'all') { if (triggerFilter !== "all") {
countSql += ' AND trigger_type = ?'; countSql += " AND trigger_type = ?";
countParams.push(triggerFilter); countParams.push(triggerFilter);
} }
if (complexityFilter !== 'all') { if (complexityFilter !== "all") {
countSql += ' AND complexity = ?'; countSql += " AND complexity = ?";
countParams.push(complexityFilter); countParams.push(complexityFilter);
} }
if (activeOnly) { if (activeOnly) {
countSql += ' AND active = 1'; countSql += " AND active = 1";
} }
} else { } else {
countSql = `SELECT COUNT(*) as total FROM (${sql})`; countSql = `SELECT COUNT(*) as total FROM (${sql})`;
@@ -467,7 +514,7 @@ class WorkflowDatabase {
const total = countResult.total; const total = countResult.total;
// Add pagination // Add pagination
sql += ' ORDER BY name LIMIT ? OFFSET ?'; sql += " ORDER BY name LIMIT ? OFFSET ?";
params.push(limit, offset); params.push(limit, offset);
this.db.all(sql, params, (err, rows) => { this.db.all(sql, params, (err, rows) => {
@@ -477,10 +524,10 @@ class WorkflowDatabase {
} }
// Parse JSON fields // Parse JSON fields
const workflows = rows.map(row => ({ const workflows = rows.map((row) => ({
...row, ...row,
integrations: JSON.parse(row.integrations || '[]'), integrations: JSON.parse(row.integrations || "[]"),
tags: JSON.parse(row.tags || '[]') tags: JSON.parse(row.tags || "[]"),
})); }));
resolve({ workflows, total }); resolve({ workflows, total });
@@ -496,47 +543,61 @@ class WorkflowDatabase {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const queries = [ const queries = [
'SELECT COUNT(*) as total FROM workflows', "SELECT COUNT(*) as total FROM workflows",
'SELECT COUNT(*) as active FROM workflows WHERE active = 1', "SELECT COUNT(*) as active FROM workflows WHERE active = 1",
'SELECT COUNT(*) as inactive FROM workflows WHERE active = 0', "SELECT COUNT(*) as inactive FROM workflows WHERE active = 0",
'SELECT trigger_type, COUNT(*) as count FROM workflows GROUP BY trigger_type', "SELECT trigger_type, COUNT(*) as count FROM workflows GROUP BY trigger_type",
'SELECT complexity, COUNT(*) as count FROM workflows GROUP BY complexity', "SELECT complexity, COUNT(*) as count FROM workflows GROUP BY complexity",
'SELECT SUM(node_count) as total_nodes FROM workflows', "SELECT SUM(node_count) as total_nodes FROM workflows",
'SELECT analyzed_at FROM workflows ORDER BY analyzed_at DESC LIMIT 1' "SELECT analyzed_at FROM workflows ORDER BY analyzed_at DESC LIMIT 1",
]; ];
Promise.all(queries.map(sql => Promise.all(
queries.map(
(sql) =>
new Promise((resolve, reject) => { new Promise((resolve, reject) => {
this.db.all(sql, (err, rows) => { this.db.all(sql, (err, rows) => {
if (err) reject(err); if (err) reject(err);
else resolve(rows); else resolve(rows);
}); });
}) })
)).then(results => { )
const [total, active, inactive, triggers, complexity, nodes, lastIndexed] = results; )
.then((results) => {
const [
total,
active,
inactive,
triggers,
complexity,
nodes,
lastIndexed,
] = results;
const triggersMap = {}; const triggersMap = {};
triggers.forEach(row => { triggers.forEach((row) => {
triggersMap[row.trigger_type] = row.count; triggersMap[row.trigger_type] = row.count;
}); });
const complexityMap = {}; const complexityMap = {};
complexity.forEach(row => { complexity.forEach((row) => {
complexityMap[row.complexity] = row.count; complexityMap[row.complexity] = row.count;
}); });
// Count unique integrations // Count unique integrations
this.db.all('SELECT integrations FROM workflows', (err, rows) => { this.db.all("SELECT integrations FROM workflows", (err, rows) => {
if (err) { if (err) {
reject(err); reject(err);
return; return;
} }
const allIntegrations = new Set(); const allIntegrations = new Set();
rows.forEach(row => { rows.forEach((row) => {
try { try {
const integrations = JSON.parse(row.integrations || '[]'); const integrations = JSON.parse(row.integrations || "[]");
integrations.forEach(integration => allIntegrations.add(integration)); integrations.forEach((integration) =>
allIntegrations.add(integration)
);
} catch (e) { } catch (e) {
// Ignore parse errors // Ignore parse errors
} }
@@ -550,17 +611,18 @@ class WorkflowDatabase {
complexity: complexityMap, complexity: complexityMap,
total_nodes: nodes[0].total_nodes || 0, total_nodes: nodes[0].total_nodes || 0,
unique_integrations: allIntegrations.size, unique_integrations: allIntegrations.size,
last_indexed: lastIndexed[0]?.analyzed_at || '' last_indexed: lastIndexed[0]?.analyzed_at || "",
}); });
}); });
}).catch(reject); })
.catch(reject);
}); });
} }
async getWorkflowDetail(filename) { async getWorkflowDetail(filename) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.db.get( this.db.get(
'SELECT * FROM workflows WHERE filename = ?', "SELECT * FROM workflows WHERE filename = ?",
[filename], [filename],
(err, row) => { (err, row) => {
if (err) { if (err) {
@@ -576,8 +638,8 @@ class WorkflowDatabase {
// Parse JSON fields and load raw workflow data // Parse JSON fields and load raw workflow data
const workflow = { const workflow = {
...row, ...row,
integrations: JSON.parse(row.integrations || '[]'), integrations: JSON.parse(row.integrations || "[]"),
tags: JSON.parse(row.tags || '[]') tags: JSON.parse(row.tags || "[]"),
}; };
// Load raw workflow JSON // Load raw workflow JSON
@@ -586,7 +648,10 @@ class WorkflowDatabase {
const rawWorkflow = fs.readJsonSync(workflowPath); const rawWorkflow = fs.readJsonSync(workflowPath);
workflow.raw_workflow = rawWorkflow; workflow.raw_workflow = rawWorkflow;
} catch (error) { } catch (error) {
console.error(`Error loading raw workflow ${filename}:`, error.message); console.error(
`Error loading raw workflow ${filename}:`,
error.message
);
} }
resolve(workflow); resolve(workflow);

Some files were not shown because too many files have changed in this diff Show More