🚀 Project optimization and cleanup
- Remove deprecated generate_documentation.py (2187 lines, 71MB HTML output) - Remove unused comprehensive_workflow_renamer.py (396 lines) - Replace import-workflows.sh with Python import_workflows.py (better error handling) - Remove Chinese README to simplify project structure - Enhance run.py with CLI arguments and better configuration - Update requirements.txt with precise version constraints - Overall: Removed ~2600+ lines of unused/deprecated code - Improved: Error handling, logging, and development workflow
This commit is contained in:
21
README.md
21
README.md
@@ -81,17 +81,26 @@ python run.py
|
|||||||
### Option 2: Development Mode
|
### Option 2: Development Mode
|
||||||
```bash
|
```bash
|
||||||
# Start with auto-reload for development
|
# Start with auto-reload for development
|
||||||
python api_server.py --reload
|
python run.py --dev
|
||||||
|
|
||||||
# Or specify custom host/port
|
# Or specify custom host/port
|
||||||
python api_server.py --host 0.0.0.0 --port 3000
|
python run.py --host 0.0.0.0 --port 3000
|
||||||
|
|
||||||
|
# Force database reindexing
|
||||||
|
python run.py --reindex
|
||||||
```
|
```
|
||||||
|
|
||||||
### Import Workflows into n8n
|
### Import Workflows into n8n
|
||||||
1. Open your [n8n Editor UI](https://docs.n8n.io/hosting/editor-ui/)
|
```bash
|
||||||
2. Click **menu** (☰) → `Import workflow`
|
# Use the Python importer (recommended)
|
||||||
3. Choose any `.json` file from the `workflows/` folder
|
python import_workflows.py
|
||||||
4. Update credentials/webhook URLs before running
|
|
||||||
|
# Or manually import individual workflows:
|
||||||
|
# 1. Open your n8n Editor UI
|
||||||
|
# 2. Click menu (☰) → Import workflow
|
||||||
|
# 3. Choose any .json file from the workflows/ folder
|
||||||
|
# 4. Update credentials/webhook URLs before running
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
162
import_workflows.py
Normal file
162
import_workflows.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
N8N Workflow Importer
|
||||||
|
Python replacement for import-workflows.sh with better error handling and progress tracking.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Dict, Any
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowImporter:
|
||||||
|
"""Import n8n workflows with progress tracking and error handling."""
|
||||||
|
|
||||||
|
def __init__(self, workflows_dir: str = "workflows"):
|
||||||
|
self.workflows_dir = Path(workflows_dir)
|
||||||
|
self.imported_count = 0
|
||||||
|
self.failed_count = 0
|
||||||
|
self.errors = []
|
||||||
|
|
||||||
|
def validate_workflow(self, file_path: Path) -> bool:
|
||||||
|
"""Validate workflow JSON before import."""
|
||||||
|
try:
|
||||||
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Basic validation
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check required fields
|
||||||
|
required_fields = ['nodes', 'connections']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
except (json.JSONDecodeError, FileNotFoundError, PermissionError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def import_workflow(self, file_path: Path) -> bool:
|
||||||
|
"""Import a single workflow file."""
|
||||||
|
try:
|
||||||
|
# Validate first
|
||||||
|
if not self.validate_workflow(file_path):
|
||||||
|
self.errors.append(f"Invalid JSON: {file_path.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run n8n import command
|
||||||
|
result = subprocess.run([
|
||||||
|
'npx', 'n8n', 'import:workflow',
|
||||||
|
f'--input={file_path}'
|
||||||
|
], capture_output=True, text=True, timeout=30)
|
||||||
|
|
||||||
|
if result.returncode == 0:
|
||||||
|
print(f"✅ Imported: {file_path.name}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
error_msg = result.stderr.strip() or result.stdout.strip()
|
||||||
|
self.errors.append(f"Import failed for {file_path.name}: {error_msg}")
|
||||||
|
print(f"❌ Failed: {file_path.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
self.errors.append(f"Timeout importing {file_path.name}")
|
||||||
|
print(f"⏰ Timeout: {file_path.name}")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
self.errors.append(f"Error importing {file_path.name}: {str(e)}")
|
||||||
|
print(f"❌ Error: {file_path.name} - {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_workflow_files(self) -> List[Path]:
|
||||||
|
"""Get all workflow JSON files."""
|
||||||
|
if not self.workflows_dir.exists():
|
||||||
|
print(f"❌ Workflows directory not found: {self.workflows_dir}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
json_files = list(self.workflows_dir.glob("*.json"))
|
||||||
|
if not json_files:
|
||||||
|
print(f"❌ No JSON files found in: {self.workflows_dir}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
return sorted(json_files)
|
||||||
|
|
||||||
|
def import_all(self) -> Dict[str, Any]:
|
||||||
|
"""Import all workflow files."""
|
||||||
|
workflow_files = self.get_workflow_files()
|
||||||
|
total_files = len(workflow_files)
|
||||||
|
|
||||||
|
if total_files == 0:
|
||||||
|
return {"success": False, "message": "No workflow files found"}
|
||||||
|
|
||||||
|
print(f"🚀 Starting import of {total_files} workflows...")
|
||||||
|
print("-" * 50)
|
||||||
|
|
||||||
|
for i, file_path in enumerate(workflow_files, 1):
|
||||||
|
print(f"[{i}/{total_files}] Processing {file_path.name}...")
|
||||||
|
|
||||||
|
if self.import_workflow(file_path):
|
||||||
|
self.imported_count += 1
|
||||||
|
else:
|
||||||
|
self.failed_count += 1
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print("\n" + "=" * 50)
|
||||||
|
print(f"📊 Import Summary:")
|
||||||
|
print(f"✅ Successfully imported: {self.imported_count}")
|
||||||
|
print(f"❌ Failed imports: {self.failed_count}")
|
||||||
|
print(f"📁 Total files: {total_files}")
|
||||||
|
|
||||||
|
if self.errors:
|
||||||
|
print(f"\n❌ Errors encountered:")
|
||||||
|
for error in self.errors[:10]: # Show first 10 errors
|
||||||
|
print(f" • {error}")
|
||||||
|
if len(self.errors) > 10:
|
||||||
|
print(f" ... and {len(self.errors) - 10} more errors")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": self.failed_count == 0,
|
||||||
|
"imported": self.imported_count,
|
||||||
|
"failed": self.failed_count,
|
||||||
|
"total": total_files,
|
||||||
|
"errors": self.errors
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def check_n8n_available() -> bool:
|
||||||
|
"""Check if n8n CLI is available."""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
['npx', 'n8n', '--version'],
|
||||||
|
capture_output=True, text=True, timeout=10
|
||||||
|
)
|
||||||
|
return result.returncode == 0
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point."""
|
||||||
|
print("🔧 N8N Workflow Importer")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
# Check if n8n is available
|
||||||
|
if not check_n8n_available():
|
||||||
|
print("❌ n8n CLI not found. Please install n8n first:")
|
||||||
|
print(" npm install -g n8n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Create importer and run
|
||||||
|
importer = WorkflowImporter()
|
||||||
|
result = importer.import_all()
|
||||||
|
|
||||||
|
# Exit with appropriate code
|
||||||
|
sys.exit(0 if result["success"] else 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
fastapi>=0.104.0
|
# N8N Workflows API Dependencies
|
||||||
uvicorn>=0.24.0
|
# Core API Framework
|
||||||
pydantic>=2.4.0
|
fastapi>=0.104.0,<1.0.0
|
||||||
|
uvicorn[standard]>=0.24.0,<1.0.0
|
||||||
|
pydantic>=2.4.0,<3.0.0
|
||||||
131
run.py
131
run.py
@@ -1,56 +1,89 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
🚀 Simple Launcher for n8n-workflows Search Engine
|
🚀 N8N Workflows Search Engine Launcher
|
||||||
Start the system with advanced search capabilities.
|
Start the advanced search system with optimized performance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import argparse
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
def print_banner():
|
def print_banner():
|
||||||
|
"""Print application banner."""
|
||||||
print("🚀 n8n-workflows Advanced Search Engine")
|
print("🚀 n8n-workflows Advanced Search Engine")
|
||||||
print("=" * 50)
|
print("=" * 50)
|
||||||
|
|
||||||
def check_requirements():
|
|
||||||
"""Check if requirements are installed."""
|
def check_requirements() -> bool:
|
||||||
|
"""Check if required dependencies are installed."""
|
||||||
|
missing_deps = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
except ImportError:
|
||||||
|
missing_deps.append("sqlite3")
|
||||||
|
|
||||||
|
try:
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
except ImportError:
|
||||||
|
missing_deps.append("uvicorn")
|
||||||
|
|
||||||
|
try:
|
||||||
import fastapi
|
import fastapi
|
||||||
print("✅ Dependencies verified")
|
except ImportError:
|
||||||
return True
|
missing_deps.append("fastapi")
|
||||||
except ImportError as e:
|
|
||||||
print(f"❌ Missing dependency: {e}")
|
if missing_deps:
|
||||||
|
print(f"❌ Missing dependencies: {', '.join(missing_deps)}")
|
||||||
print("💡 Install with: pip install -r requirements.txt")
|
print("💡 Install with: pip install -r requirements.txt")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def setup_database():
|
print("✅ Dependencies verified")
|
||||||
"""Setup database if needed."""
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def setup_directories():
|
||||||
|
"""Create necessary directories."""
|
||||||
|
directories = ["database", "static", "workflows"]
|
||||||
|
|
||||||
|
for directory in directories:
|
||||||
|
os.makedirs(directory, exist_ok=True)
|
||||||
|
|
||||||
|
print("✅ Directories verified")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_database(force_reindex: bool = False) -> str:
|
||||||
|
"""Setup and initialize the database."""
|
||||||
from workflow_db import WorkflowDatabase
|
from workflow_db import WorkflowDatabase
|
||||||
|
|
||||||
db_path = "database/workflows.db"
|
db_path = "database/workflows.db"
|
||||||
os.makedirs("database", exist_ok=True)
|
|
||||||
|
|
||||||
print(f"🔄 Setting up database: {db_path}")
|
print(f"🔄 Setting up database: {db_path}")
|
||||||
db = WorkflowDatabase(db_path)
|
db = WorkflowDatabase(db_path)
|
||||||
|
|
||||||
# Check if database has data
|
# Check if database has data or force reindex
|
||||||
stats = db.get_stats()
|
stats = db.get_stats()
|
||||||
if stats['total'] == 0:
|
if stats['total'] == 0 or force_reindex:
|
||||||
print("📚 Indexing workflows...")
|
print("📚 Indexing workflows...")
|
||||||
index_stats = db.index_all_workflows(force_reindex=True)
|
index_stats = db.index_all_workflows(force_reindex=True)
|
||||||
print(f"✅ Indexed {index_stats['processed']} workflows")
|
print(f"✅ Indexed {index_stats['processed']} workflows")
|
||||||
|
|
||||||
|
# Show final stats
|
||||||
|
final_stats = db.get_stats()
|
||||||
|
print(f"📊 Database contains {final_stats['total']} workflows")
|
||||||
else:
|
else:
|
||||||
print(f"✅ Database ready: {stats['total']} workflows")
|
print(f"✅ Database ready: {stats['total']} workflows")
|
||||||
|
|
||||||
return db_path
|
return db_path
|
||||||
|
|
||||||
def start_server(port=8000):
|
|
||||||
"""Start the API server."""
|
def start_server(host: str = "127.0.0.1", port: int = 8000, reload: bool = False):
|
||||||
print(f"🌐 Starting server at http://localhost:{port}")
|
"""Start the FastAPI server."""
|
||||||
print(f"📊 API: http://localhost:{port}/api/workflows")
|
print(f"🌐 Starting server at http://{host}:{port}")
|
||||||
print(f"🗂️ Categories: http://localhost:{port}/api/categories")
|
print(f"📊 API Documentation: http://{host}:{port}/docs")
|
||||||
|
print(f"🔍 Workflow Search: http://{host}:{port}/api/workflows")
|
||||||
print()
|
print()
|
||||||
print("Press Ctrl+C to stop the server")
|
print("Press Ctrl+C to stop the server")
|
||||||
print("-" * 50)
|
print("-" * 50)
|
||||||
@@ -58,32 +91,86 @@ def start_server(port=8000):
|
|||||||
# Configure database path
|
# Configure database path
|
||||||
os.environ['WORKFLOW_DB_PATH'] = "database/workflows.db"
|
os.environ['WORKFLOW_DB_PATH'] = "database/workflows.db"
|
||||||
|
|
||||||
# Start uvicorn without reload to avoid StatReload issues
|
# Start uvicorn with better configuration
|
||||||
import uvicorn
|
import uvicorn
|
||||||
uvicorn.run("api_server:app", host="127.0.0.1", port=port, reload=False)
|
uvicorn.run(
|
||||||
|
"api_server:app",
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
reload=reload,
|
||||||
|
log_level="info",
|
||||||
|
access_log=False # Reduce log noise
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
"""Main entry point with command line arguments."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="N8N Workflows Search Engine",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
python run.py # Start with default settings
|
||||||
|
python run.py --port 3000 # Start on port 3000
|
||||||
|
python run.py --host 0.0.0.0 # Accept external connections
|
||||||
|
python run.py --reindex # Force database reindexing
|
||||||
|
python run.py --dev # Development mode with auto-reload
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--host",
|
||||||
|
default="127.0.0.1",
|
||||||
|
help="Host to bind to (default: 127.0.0.1)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--port",
|
||||||
|
type=int,
|
||||||
|
default=8000,
|
||||||
|
help="Port to bind to (default: 8000)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--reindex",
|
||||||
|
action="store_true",
|
||||||
|
help="Force database reindexing"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dev",
|
||||||
|
action="store_true",
|
||||||
|
help="Development mode with auto-reload"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
print_banner()
|
print_banner()
|
||||||
|
|
||||||
# Check dependencies
|
# Check dependencies
|
||||||
if not check_requirements():
|
if not check_requirements():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Setup directories
|
||||||
|
setup_directories()
|
||||||
|
|
||||||
# Setup database
|
# Setup database
|
||||||
try:
|
try:
|
||||||
setup_database()
|
setup_database(force_reindex=args.reindex)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"❌ Database setup error: {e}")
|
print(f"❌ Database setup error: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Start server
|
# Start server
|
||||||
try:
|
try:
|
||||||
start_server()
|
start_server(
|
||||||
|
host=args.host,
|
||||||
|
port=args.port,
|
||||||
|
reload=args.dev
|
||||||
|
)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n👋 Server stopped!")
|
print("\n👋 Server stopped!")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"❌ Server error: {e}")
|
print(f"❌ Server error: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
Reference in New Issue
Block a user