1468 lines
51 KiB
Python
1468 lines
51 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
encoderPro Web Dashboard
|
|
========================
|
|
Modern web interface for monitoring and controlling encoderPro.
|
|
|
|
Features:
|
|
- Real-time statistics and progress
|
|
- File browser and search
|
|
- Job control (start/stop/pause)
|
|
- Encoder configuration
|
|
- Quality checking
|
|
- Log viewer
|
|
- System health monitoring
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
import os
|
|
import sqlite3
|
|
import subprocess
|
|
import threading
|
|
import time
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from typing import Dict, List, Optional
|
|
|
|
# Import encoder detection from reencode module
|
|
try:
|
|
from reencode import EncoderDetector, EncoderCapabilities
|
|
REENCODE_AVAILABLE = True
|
|
except ImportError:
|
|
REENCODE_AVAILABLE = False
|
|
logging.warning("reencode module not available for encoder detection")
|
|
|
|
try:
|
|
import yaml
|
|
except ImportError:
|
|
logging.warning("PyYAML not installed. Install with: pip install pyyaml")
|
|
yaml = None
|
|
|
|
import secrets
|
|
from flask import Flask, render_template, jsonify, request, send_from_directory, session
|
|
from flask_cors import CORS
|
|
|
|
__version__ = "3.1.0"
|
|
|
|
# =============================================================================
|
|
# CONFIGURATION
|
|
# =============================================================================
|
|
|
|
class DashboardConfig:
|
|
"""Dashboard configuration"""
|
|
def __init__(self):
|
|
# Resolve and validate paths to prevent traversal attacks
|
|
self.state_db = self._validate_path(os.getenv('STATE_DB', '/db/state.db'))
|
|
self.log_dir = self._validate_path(os.getenv('LOG_DIR', '/logs'), must_be_dir=True)
|
|
self.config_file = self._validate_path(os.getenv('CONFIG_FILE', '/config/config.yaml'))
|
|
self.reencode_script = self._validate_path(os.getenv('REENCODE_SCRIPT', '/app/reencode.py'))
|
|
|
|
self.host = os.getenv('DASHBOARD_HOST', '0.0.0.0')
|
|
self.port = int(os.getenv('DASHBOARD_PORT', '5000'))
|
|
self.debug = os.getenv('DASHBOARD_DEBUG', 'false').lower() == 'true'
|
|
|
|
if self.debug:
|
|
logging.warning("⚠️ DEBUG MODE ENABLED - Do not use in production!")
|
|
|
|
def _validate_path(self, path_str: str, must_be_dir: bool = False) -> Path:
|
|
"""Validate and resolve path to prevent traversal attacks"""
|
|
try:
|
|
path = Path(path_str).resolve()
|
|
|
|
# Security check: Ensure path doesn't escape expected directories
|
|
# In Docker, all app paths should be under /app, /db, /logs, /config, etc.
|
|
# On Windows for development, allow paths under C:\Users
|
|
import platform
|
|
allowed_prefixes = ['/app', '/db', '/logs', '/config', '/work', '/movies', '/archive']
|
|
|
|
if platform.system() == 'Windows':
|
|
# On Windows, allow local development paths
|
|
allowed_prefixes.extend([
|
|
'C:\\Users',
|
|
'C:/Users'
|
|
])
|
|
|
|
if not any(str(path).startswith(prefix) for prefix in allowed_prefixes):
|
|
raise ValueError(f"Path {path} is outside allowed directories")
|
|
|
|
return path
|
|
except Exception as e:
|
|
logging.error(f"Invalid path configuration: {path_str} - {e}")
|
|
raise ValueError(f"Invalid path: {path_str}")
|
|
|
|
|
|
config = DashboardConfig()
|
|
app = Flask(__name__)
|
|
|
|
# Security configuration
|
|
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', secrets.token_hex(32))
|
|
app.config['SESSION_COOKIE_SECURE'] = False # Set to True only when using HTTPS
|
|
app.config['SESSION_COOKIE_HTTPONLY'] = True
|
|
app.config['SESSION_COOKIE_SAMESITE'] = 'Lax'
|
|
|
|
# Warn if not using secure cookies
|
|
if not app.config['SESSION_COOKIE_SECURE']:
|
|
logging.warning("⚠️ SESSION_COOKIE_SECURE is False - set to True in production with HTTPS")
|
|
|
|
# Configure CORS with stricter settings
|
|
CORS(app, origins=os.getenv('CORS_ORIGINS', '*').split(','), supports_credentials=True)
|
|
|
|
# Global state
|
|
processing_thread = None
|
|
processing_active = False
|
|
processing_pid = None # Track subprocess PID for safe termination
|
|
processing_lock = threading.Lock()
|
|
|
|
|
|
# =============================================================================
|
|
# DATABASE ACCESS
|
|
# =============================================================================
|
|
|
|
class DatabaseReader:
|
|
"""Read-only database access for dashboard"""
|
|
|
|
def __init__(self, db_path: Path):
|
|
self.db_path = db_path
|
|
self._ensure_database()
|
|
|
|
def _ensure_database(self):
|
|
"""Ensure database exists and has correct schema"""
|
|
# Always run initialization - it's safe with CREATE TABLE IF NOT EXISTS
|
|
# This ensures migrations run even if the file exists but schema is outdated
|
|
self._initialize_database()
|
|
|
|
def _initialize_database(self):
|
|
"""Initialize database with schema"""
|
|
# Create directory if needed
|
|
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
conn = sqlite3.connect(str(self.db_path))
|
|
cursor = conn.cursor()
|
|
|
|
# Create files table
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS files (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
filepath TEXT UNIQUE NOT NULL,
|
|
relative_path TEXT NOT NULL,
|
|
state TEXT NOT NULL,
|
|
has_subtitles BOOLEAN,
|
|
original_size INTEGER,
|
|
encoded_size INTEGER,
|
|
subtitle_count INTEGER,
|
|
video_codec TEXT,
|
|
audio_codec TEXT,
|
|
audio_channels INTEGER,
|
|
width INTEGER,
|
|
height INTEGER,
|
|
duration REAL,
|
|
bitrate INTEGER,
|
|
container_format TEXT,
|
|
file_hash TEXT,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
started_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
error_message TEXT,
|
|
profile_name TEXT,
|
|
encoder_used TEXT,
|
|
encode_time_seconds REAL,
|
|
fps REAL
|
|
)
|
|
""")
|
|
|
|
# Create processing_history table
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS processing_history (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
file_id INTEGER NOT NULL,
|
|
profile_name TEXT,
|
|
encoder_used TEXT,
|
|
started_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
success BOOLEAN,
|
|
error_message TEXT,
|
|
original_size INTEGER,
|
|
encoded_size INTEGER,
|
|
encode_time_seconds REAL,
|
|
fps REAL,
|
|
FOREIGN KEY (file_id) REFERENCES files (id)
|
|
)
|
|
""")
|
|
|
|
# Create indices (core columns only)
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_state ON files(state)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_filepath ON files(filepath)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_profile ON files(profile_name)")
|
|
|
|
# Migration: Add new columns if they don't exist
|
|
cursor.execute("PRAGMA table_info(files)")
|
|
columns = {row[1] for row in cursor.fetchall()}
|
|
|
|
migrations = [
|
|
("video_codec", "ALTER TABLE files ADD COLUMN video_codec TEXT"),
|
|
("audio_codec", "ALTER TABLE files ADD COLUMN audio_codec TEXT"),
|
|
("audio_channels", "ALTER TABLE files ADD COLUMN audio_channels INTEGER"),
|
|
("width", "ALTER TABLE files ADD COLUMN width INTEGER"),
|
|
("height", "ALTER TABLE files ADD COLUMN height INTEGER"),
|
|
("duration", "ALTER TABLE files ADD COLUMN duration REAL"),
|
|
("bitrate", "ALTER TABLE files ADD COLUMN bitrate INTEGER"),
|
|
("container_format", "ALTER TABLE files ADD COLUMN container_format TEXT"),
|
|
("file_hash", "ALTER TABLE files ADD COLUMN file_hash TEXT"),
|
|
]
|
|
|
|
for column_name, alter_sql in migrations:
|
|
if column_name not in columns:
|
|
logging.info(f"Adding column '{column_name}' to files table")
|
|
cursor.execute(alter_sql)
|
|
|
|
# Create indices for migrated columns
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_file_hash ON files(file_hash)")
|
|
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
logging.info(f"✅ Database initialized at {self.db_path}")
|
|
|
|
def _get_connection(self):
|
|
"""Get database connection"""
|
|
conn = sqlite3.connect(str(self.db_path))
|
|
conn.row_factory = sqlite3.Row
|
|
return conn
|
|
|
|
def cleanup_stuck_processing(self):
|
|
"""Mark files stuck in 'processing' state as failed for retry"""
|
|
try:
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
# Find files stuck in processing state
|
|
cursor.execute("SELECT COUNT(*) as count FROM files WHERE state = 'processing'")
|
|
stuck_count = cursor.fetchone()['count']
|
|
|
|
if stuck_count > 0:
|
|
logging.warning(f"Found {stuck_count} file(s) stuck in 'processing' state from previous session")
|
|
|
|
# Mark them as failed (interrupted) so they can be retried
|
|
cursor.execute("""
|
|
UPDATE files
|
|
SET state = 'failed',
|
|
error_message = 'Processing interrupted (application restart or crash)',
|
|
completed_at = CURRENT_TIMESTAMP
|
|
WHERE state = 'processing'
|
|
""")
|
|
|
|
conn.commit()
|
|
logging.info(f"✅ Marked {stuck_count} stuck file(s) as failed for retry")
|
|
|
|
conn.close()
|
|
except Exception as e:
|
|
logging.error(f"Error cleaning up stuck processing files: {e}", exc_info=True)
|
|
|
|
def get_statistics(self) -> Dict:
|
|
"""Get processing statistics"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
stats = {}
|
|
|
|
# Count by state
|
|
cursor.execute("""
|
|
SELECT state, COUNT(*) as count
|
|
FROM files
|
|
GROUP BY state
|
|
""")
|
|
for row in cursor.fetchall():
|
|
stats[row['state']] = row['count']
|
|
|
|
# Default values
|
|
for state in ['pending', 'processing', 'completed', 'failed', 'skipped']:
|
|
if state not in stats:
|
|
stats[state] = 0
|
|
|
|
# Size statistics
|
|
cursor.execute("""
|
|
SELECT
|
|
SUM(original_size) as original_total,
|
|
SUM(encoded_size) as encoded_total,
|
|
AVG(fps) as avg_fps,
|
|
AVG(encode_time_seconds) as avg_time
|
|
FROM files
|
|
WHERE state = 'completed'
|
|
""")
|
|
row = cursor.fetchone()
|
|
stats['original_size'] = row['original_total'] or 0
|
|
stats['encoded_size'] = row['encoded_total'] or 0
|
|
stats['avg_fps'] = round(row['avg_fps'] or 0, 2)
|
|
stats['avg_encode_time'] = round(row['avg_time'] or 0, 1)
|
|
|
|
# Calculate savings
|
|
if stats['original_size'] > 0:
|
|
savings = stats['original_size'] - stats['encoded_size']
|
|
stats['space_saved'] = savings
|
|
stats['space_saved_percent'] = round((savings / stats['original_size']) * 100, 1)
|
|
else:
|
|
stats['space_saved'] = 0
|
|
stats['space_saved_percent'] = 0
|
|
|
|
# Encoder usage
|
|
cursor.execute("""
|
|
SELECT encoder_used, COUNT(*) as count
|
|
FROM files
|
|
WHERE state = 'completed' AND encoder_used IS NOT NULL
|
|
GROUP BY encoder_used
|
|
""")
|
|
stats['encoder_usage'] = {row['encoder_used']: row['count'] for row in cursor.fetchall()}
|
|
|
|
# Recent activity
|
|
cursor.execute("""
|
|
SELECT COUNT(*) as count
|
|
FROM files
|
|
WHERE completed_at > datetime('now', '-24 hours')
|
|
""")
|
|
stats['completed_24h'] = cursor.fetchone()['count']
|
|
|
|
conn.close()
|
|
return stats
|
|
|
|
def get_files(self, state: Optional[str] = None, limit: int = 100,
|
|
offset: int = 0, search: Optional[str] = None,
|
|
filter_type: Optional[str] = None) -> List[Dict]:
|
|
"""Get files with filtering"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT * FROM files WHERE 1=1"
|
|
params = []
|
|
|
|
if state:
|
|
query += " AND state = ?"
|
|
params.append(state)
|
|
|
|
if search:
|
|
query += " AND relative_path LIKE ?"
|
|
params.append(f'%{search}%')
|
|
|
|
# Apply attribute filters
|
|
if filter_type:
|
|
if filter_type == 'has_subtitles':
|
|
query += " AND has_subtitles = 1"
|
|
elif filter_type == 'no_subtitles':
|
|
query += " AND (has_subtitles = 0 OR has_subtitles IS NULL)"
|
|
elif filter_type == 'large_files':
|
|
# Files larger than 5GB
|
|
query += " AND original_size > 5368709120"
|
|
elif filter_type == 'surround_sound':
|
|
# 5.1 or 7.1 audio (6+ channels)
|
|
query += " AND audio_channels >= 6"
|
|
elif filter_type == 'stereo_only':
|
|
# Stereo or mono (< 6 channels)
|
|
query += " AND audio_channels < 6"
|
|
elif filter_type == '4k':
|
|
# 4K resolution (3840x2160 or higher)
|
|
query += " AND width >= 3840"
|
|
elif filter_type == '1080p':
|
|
# 1080p resolution
|
|
query += " AND width >= 1920 AND width < 3840 AND height >= 1080"
|
|
elif filter_type == '720p':
|
|
# 720p resolution
|
|
query += " AND width >= 1280 AND width < 1920"
|
|
elif filter_type == 'h264':
|
|
# H.264/AVC codec
|
|
query += " AND video_codec LIKE '%264%'"
|
|
elif filter_type == 'h265':
|
|
# H.265/HEVC codec
|
|
query += " AND video_codec LIKE '%265%' OR video_codec LIKE '%hevc%'"
|
|
elif filter_type == 'high_bitrate':
|
|
# High bitrate (> 10 Mbps)
|
|
query += " AND bitrate > 10000000"
|
|
|
|
query += " ORDER BY updated_at DESC LIMIT ? OFFSET ?"
|
|
params.extend([limit, offset])
|
|
|
|
cursor.execute(query, params)
|
|
files = [dict(row) for row in cursor.fetchall()]
|
|
|
|
conn.close()
|
|
return files
|
|
|
|
def get_file(self, file_id: int) -> Optional[Dict]:
|
|
"""Get single file by ID"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT * FROM files WHERE id = ?", (file_id,))
|
|
row = cursor.fetchone()
|
|
|
|
conn.close()
|
|
return dict(row) if row else None
|
|
|
|
def get_recent_activity(self, limit: int = 20) -> List[Dict]:
|
|
"""Get recent file activity"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
SELECT id, relative_path, state, updated_at, encoder_used, fps
|
|
FROM files
|
|
WHERE state IN ('completed', 'failed')
|
|
ORDER BY updated_at DESC
|
|
LIMIT ?
|
|
""", (limit,))
|
|
|
|
activity = [dict(row) for row in cursor.fetchall()]
|
|
conn.close()
|
|
return activity
|
|
|
|
def get_processing_files(self) -> List[Dict]:
|
|
"""Get currently processing files"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
SELECT id, relative_path, started_at, profile_name
|
|
FROM files
|
|
WHERE state = 'processing'
|
|
ORDER BY started_at
|
|
""")
|
|
|
|
files = [dict(row) for row in cursor.fetchall()]
|
|
conn.close()
|
|
return files
|
|
|
|
def process_duplicates(self) -> Dict:
|
|
"""
|
|
Process existing database to find and mark duplicates.
|
|
Returns statistics about duplicates found.
|
|
"""
|
|
import hashlib
|
|
from pathlib import Path
|
|
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
# Get all files that don't have a hash yet or aren't already marked as duplicates
|
|
cursor.execute("""
|
|
SELECT id, filepath, file_hash, state, relative_path
|
|
FROM files
|
|
WHERE state != 'skipped' OR (state = 'skipped' AND error_message NOT LIKE 'Duplicate of:%')
|
|
ORDER BY id
|
|
""")
|
|
|
|
files = [dict(row) for row in cursor.fetchall()]
|
|
|
|
stats = {
|
|
'total_files': len(files),
|
|
'files_hashed': 0,
|
|
'duplicates_found': 0,
|
|
'duplicates_marked': 0,
|
|
'errors': 0
|
|
}
|
|
|
|
# Track hashes we've seen
|
|
hash_to_file = {} # hash -> (id, filepath, state)
|
|
|
|
for file in files:
|
|
file_path = Path(file['filepath'])
|
|
file_hash = file['file_hash']
|
|
|
|
# Calculate hash if missing
|
|
if not file_hash:
|
|
if not file_path.exists():
|
|
stats['errors'] += 1
|
|
continue
|
|
|
|
try:
|
|
# Use the same hashing logic as MediaInspector
|
|
file_hash = self._calculate_file_hash(file_path)
|
|
|
|
if file_hash:
|
|
# Update file with hash
|
|
cursor.execute("""
|
|
UPDATE files SET file_hash = ? WHERE id = ?
|
|
""", (file_hash, file['id']))
|
|
stats['files_hashed'] += 1
|
|
except Exception as e:
|
|
logging.error(f"Failed to hash {file_path}: {e}")
|
|
stats['errors'] += 1
|
|
continue
|
|
|
|
# Check if this hash has been seen before
|
|
if file_hash in hash_to_file:
|
|
original = hash_to_file[file_hash]
|
|
|
|
# Only mark as duplicate if original is completed
|
|
if original['state'] == 'completed':
|
|
stats['duplicates_found'] += 1
|
|
|
|
# Mark current file as skipped duplicate
|
|
cursor.execute("""
|
|
UPDATE files
|
|
SET state = 'skipped',
|
|
error_message = ?,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
""", (f"Duplicate of: {original['relative_path']}", file['id']))
|
|
|
|
stats['duplicates_marked'] += 1
|
|
logging.info(f"Marked duplicate: {file['relative_path']} -> {original['relative_path']}")
|
|
else:
|
|
# First time seeing this hash
|
|
hash_to_file[file_hash] = {
|
|
'id': file['id'],
|
|
'filepath': file['filepath'],
|
|
'relative_path': file['relative_path'],
|
|
'state': file['state']
|
|
}
|
|
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
return stats
|
|
|
|
def _calculate_file_hash(self, filepath: Path, chunk_size: int = 8192) -> str:
|
|
"""Calculate file hash using same logic as MediaInspector"""
|
|
import hashlib
|
|
|
|
try:
|
|
file_size = filepath.stat().st_size
|
|
|
|
# For small files (<100MB), hash the entire file
|
|
if file_size < 100 * 1024 * 1024:
|
|
hasher = hashlib.sha256()
|
|
with open(filepath, 'rb') as f:
|
|
while chunk := f.read(chunk_size):
|
|
hasher.update(chunk)
|
|
return hasher.hexdigest()
|
|
|
|
# For large files, hash: size + first 64KB + middle 64KB + last 64KB
|
|
hasher = hashlib.sha256()
|
|
hasher.update(str(file_size).encode())
|
|
|
|
with open(filepath, 'rb') as f:
|
|
# First chunk
|
|
hasher.update(f.read(65536))
|
|
|
|
# Middle chunk
|
|
f.seek(file_size // 2)
|
|
hasher.update(f.read(65536))
|
|
|
|
# Last chunk
|
|
f.seek(-65536, 2)
|
|
hasher.update(f.read(65536))
|
|
|
|
return hasher.hexdigest()
|
|
except Exception as e:
|
|
logging.error(f"Failed to hash file {filepath}: {e}")
|
|
return None
|
|
|
|
|
|
# =============================================================================
|
|
# SYSTEM MONITORING
|
|
# =============================================================================
|
|
|
|
class SystemMonitor:
|
|
"""Monitor system resources"""
|
|
|
|
@staticmethod
|
|
def get_gpu_stats() -> List[Dict]:
|
|
"""Get GPU statistics"""
|
|
try:
|
|
result = subprocess.run(
|
|
['nvidia-smi', '--query-gpu=index,name,utilization.gpu,memory.used,memory.total,temperature.gpu',
|
|
'--format=csv,noheader,nounits'],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=5
|
|
)
|
|
|
|
if result.returncode == 0:
|
|
gpus = []
|
|
for line in result.stdout.strip().split('\n'):
|
|
if line:
|
|
parts = [p.strip() for p in line.split(',')]
|
|
gpus.append({
|
|
'index': int(parts[0]),
|
|
'name': parts[1],
|
|
'utilization': int(parts[2]),
|
|
'memory_used': int(parts[3]),
|
|
'memory_total': int(parts[4]),
|
|
'temperature': int(parts[5])
|
|
})
|
|
return gpus
|
|
except:
|
|
pass
|
|
|
|
return []
|
|
|
|
@staticmethod
|
|
def get_cpu_stats() -> Dict:
|
|
"""Get CPU statistics"""
|
|
try:
|
|
# Load average
|
|
with open('/proc/loadavg', 'r') as f:
|
|
load = f.read().strip().split()[:3]
|
|
load_avg = [float(x) for x in load]
|
|
|
|
# CPU count
|
|
cpu_count = os.cpu_count() or 1
|
|
|
|
return {
|
|
'load_1m': load_avg[0],
|
|
'load_5m': load_avg[1],
|
|
'load_15m': load_avg[2],
|
|
'cpu_count': cpu_count,
|
|
'load_percent': round((load_avg[0] / cpu_count) * 100, 1)
|
|
}
|
|
except:
|
|
return {'load_1m': 0, 'load_5m': 0, 'load_15m': 0, 'cpu_count': 1, 'load_percent': 0}
|
|
|
|
@staticmethod
|
|
def get_disk_stats() -> Dict:
|
|
"""Get disk statistics"""
|
|
try:
|
|
import shutil
|
|
|
|
# Work directory
|
|
work_usage = shutil.disk_usage('/work')
|
|
|
|
return {
|
|
'work_total': work_usage.total,
|
|
'work_used': work_usage.used,
|
|
'work_free': work_usage.free,
|
|
'work_percent': round((work_usage.used / work_usage.total) * 100, 1)
|
|
}
|
|
except:
|
|
return {'work_total': 0, 'work_used': 0, 'work_free': 0, 'work_percent': 0}
|
|
|
|
|
|
# =============================================================================
|
|
# JOB CONTROL
|
|
# =============================================================================
|
|
|
|
class JobController:
|
|
"""Control encoding jobs"""
|
|
|
|
@staticmethod
|
|
def start_processing(profile: Optional[str] = None, dry_run: bool = False) -> Dict:
|
|
"""Start processing job"""
|
|
global processing_thread, processing_active
|
|
|
|
with processing_lock:
|
|
if processing_active:
|
|
return {'success': False, 'message': 'Processing already active'}
|
|
|
|
# Check if script exists
|
|
if not config.reencode_script.exists():
|
|
error_msg = f"Reencode script not found at {config.reencode_script}"
|
|
logging.error(error_msg)
|
|
return {'success': False, 'message': error_msg}
|
|
|
|
# Check if config file exists
|
|
if not config.config_file.exists():
|
|
error_msg = f"Config file not found at {config.config_file}"
|
|
logging.error(error_msg)
|
|
return {'success': False, 'message': error_msg}
|
|
|
|
cmd = ['python3', str(config.reencode_script), '-c', str(config.config_file)]
|
|
if profile:
|
|
cmd.extend(['--profile', profile])
|
|
|
|
if dry_run:
|
|
# For dry run, just do a scan
|
|
cmd.append('--scan-only')
|
|
else:
|
|
# Skip scan when processing (dashboard already selected files)
|
|
cmd.append('--no-scan')
|
|
|
|
logging.info(f"Starting processing with command: {' '.join(cmd)}")
|
|
|
|
def run_processing():
|
|
global processing_active, processing_pid
|
|
processing_active = True
|
|
try:
|
|
# Small delay to ensure database transaction is committed
|
|
import time
|
|
time.sleep(0.5)
|
|
|
|
# Start process and track PID
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
processing_pid = process.pid
|
|
logging.info(f"Started processing with PID {processing_pid}")
|
|
logging.info(f"Command: {' '.join(cmd)}")
|
|
|
|
# Wait for completion
|
|
stdout, stderr = process.communicate()
|
|
|
|
# Log output
|
|
if stdout:
|
|
logging.info(f"Processing output: {stdout}")
|
|
if stderr:
|
|
logging.error(f"Processing errors: {stderr}")
|
|
finally:
|
|
processing_active = False
|
|
processing_pid = None
|
|
|
|
processing_thread = threading.Thread(target=run_processing, daemon=True)
|
|
processing_thread.start()
|
|
|
|
mode = "Dry run started" if dry_run else "Processing started"
|
|
return {'success': True, 'message': mode, 'dry_run': dry_run}
|
|
|
|
@staticmethod
|
|
def stop_processing() -> Dict:
|
|
"""Stop processing job"""
|
|
global processing_active, processing_pid
|
|
|
|
with processing_lock:
|
|
if not processing_active:
|
|
return {'success': False, 'message': 'No active processing'}
|
|
|
|
# Send SIGTERM to reencode process using tracked PID
|
|
try:
|
|
if processing_pid:
|
|
import signal
|
|
try:
|
|
os.kill(processing_pid, signal.SIGTERM)
|
|
processing_active = False
|
|
processing_pid = None
|
|
return {'success': True, 'message': 'Processing stopped'}
|
|
except ProcessLookupError:
|
|
# Process already dead
|
|
processing_active = False
|
|
processing_pid = None
|
|
return {'success': True, 'message': 'Process already stopped'}
|
|
else:
|
|
# Fallback: process thread but no PID tracked
|
|
processing_active = False
|
|
return {'success': True, 'message': 'Processing flag cleared (no PID tracked)'}
|
|
except Exception as e:
|
|
logging.error(f"Failed to stop processing: {e}")
|
|
return {'success': False, 'message': 'Failed to stop processing'}
|
|
|
|
@staticmethod
|
|
def is_processing() -> bool:
|
|
"""Check if processing is active"""
|
|
return processing_active
|
|
|
|
|
|
# =============================================================================
|
|
# API ROUTES
|
|
# =============================================================================
|
|
|
|
db_reader = DatabaseReader(config.state_db)
|
|
system_monitor = SystemMonitor()
|
|
job_controller = JobController()
|
|
|
|
|
|
# CSRF Protection
|
|
def generate_csrf_token():
|
|
"""Generate CSRF token for session"""
|
|
if 'csrf_token' not in session:
|
|
session['csrf_token'] = secrets.token_hex(32)
|
|
return session['csrf_token']
|
|
|
|
|
|
def validate_csrf_token():
|
|
"""Validate CSRF token from request"""
|
|
token = request.headers.get('X-CSRF-Token') or request.form.get('csrf_token')
|
|
session_token = session.get('csrf_token')
|
|
|
|
# Debug logging
|
|
if not token:
|
|
logging.warning(f"CSRF validation failed: No token in request headers or form")
|
|
elif not session_token:
|
|
logging.warning(f"CSRF validation failed: No token in session")
|
|
elif token != session_token:
|
|
logging.warning(f"CSRF validation failed: Token mismatch")
|
|
|
|
if not token or token != session_token:
|
|
return False
|
|
return True
|
|
|
|
|
|
@app.before_request
|
|
def csrf_protect():
|
|
"""CSRF protection for state-changing requests"""
|
|
if request.method in ['POST', 'PUT', 'DELETE', 'PATCH']:
|
|
# Skip CSRF for health check and csrf-token endpoint
|
|
if request.path in ['/api/health', '/api/csrf-token']:
|
|
return
|
|
|
|
if not validate_csrf_token():
|
|
return jsonify({'success': False, 'error': 'CSRF token validation failed'}), 403
|
|
|
|
|
|
# Global error handler
|
|
@app.errorhandler(Exception)
|
|
def handle_exception(e):
|
|
"""Handle all uncaught exceptions"""
|
|
logging.error(f"Unhandled exception: {e}", exc_info=True)
|
|
return jsonify({
|
|
'success': False,
|
|
'error': str(e),
|
|
'type': type(e).__name__
|
|
}), 500
|
|
|
|
|
|
@app.route('/')
|
|
def index():
|
|
"""Main dashboard page"""
|
|
csrf_token = generate_csrf_token()
|
|
return render_template('dashboard.html', csrf_token=csrf_token)
|
|
|
|
|
|
@app.route('/favicon.ico')
|
|
def favicon():
|
|
"""Return empty favicon to prevent 404 errors"""
|
|
return '', 204
|
|
|
|
|
|
@app.route('/api/csrf-token')
|
|
def get_csrf_token():
|
|
"""Get CSRF token for client"""
|
|
return jsonify({'csrf_token': generate_csrf_token()})
|
|
|
|
|
|
@app.route('/api/stats')
|
|
def api_stats():
|
|
"""Get statistics"""
|
|
try:
|
|
stats = db_reader.get_statistics()
|
|
return jsonify({'success': True, 'data': stats})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/files')
|
|
def api_files():
|
|
"""Get files list"""
|
|
try:
|
|
# Auto-cleanup stuck files whenever file list is requested
|
|
# This ensures stuck files are cleaned up even if startup cleanup failed
|
|
if not processing_active:
|
|
db_reader.cleanup_stuck_processing()
|
|
|
|
state = request.args.get('state')
|
|
|
|
# Validate state
|
|
valid_states = ['discovered', 'pending', 'processing', 'completed', 'failed', 'skipped', None]
|
|
if state and state not in valid_states:
|
|
return jsonify({'success': False, 'error': 'Invalid state parameter'}), 400
|
|
|
|
# Validate and limit pagination parameters
|
|
try:
|
|
limit = int(request.args.get('limit', 100))
|
|
offset = int(request.args.get('offset', 0))
|
|
except ValueError:
|
|
return jsonify({'success': False, 'error': 'Invalid limit or offset'}), 400
|
|
|
|
if limit < 1 or limit > 1000:
|
|
return jsonify({'success': False, 'error': 'Limit must be between 1 and 1000'}), 400
|
|
|
|
if offset < 0:
|
|
return jsonify({'success': False, 'error': 'Offset must be non-negative'}), 400
|
|
|
|
# Validate and sanitize search parameter
|
|
search = request.args.get('search')
|
|
if search and len(search) > 500:
|
|
return jsonify({'success': False, 'error': 'Search query too long'}), 400
|
|
|
|
# Get filter parameter
|
|
filter_type = request.args.get('filter')
|
|
valid_filters = [
|
|
'has_subtitles', 'no_subtitles', 'large_files', 'surround_sound',
|
|
'stereo_only', '4k', '1080p', '720p', 'h264', 'h265', 'high_bitrate'
|
|
]
|
|
if filter_type and filter_type not in valid_filters:
|
|
return jsonify({'success': False, 'error': 'Invalid filter parameter'}), 400
|
|
|
|
files = db_reader.get_files(state, limit, offset, search, filter_type)
|
|
return jsonify({'success': True, 'data': files})
|
|
except Exception as e:
|
|
logging.error(f"Error in api_files: {e}", exc_info=True)
|
|
return jsonify({'success': False, 'error': 'Internal server error'}), 500
|
|
|
|
|
|
@app.route('/api/file/<int:file_id>')
|
|
def api_file(file_id):
|
|
"""Get single file details"""
|
|
try:
|
|
file_data = db_reader.get_file(file_id)
|
|
if file_data:
|
|
return jsonify({'success': True, 'data': file_data})
|
|
else:
|
|
return jsonify({'success': False, 'error': 'File not found'}), 404
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/activity')
|
|
def api_activity():
|
|
"""Get recent activity"""
|
|
try:
|
|
limit = int(request.args.get('limit', 20))
|
|
activity = db_reader.get_recent_activity(limit)
|
|
return jsonify({'success': True, 'data': activity})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/processing')
|
|
def api_processing():
|
|
"""Get currently processing files"""
|
|
try:
|
|
files = db_reader.get_processing_files()
|
|
is_active = job_controller.is_processing()
|
|
return jsonify({
|
|
'success': True,
|
|
'data': {
|
|
'active': is_active,
|
|
'files': files
|
|
}
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/system')
|
|
def api_system():
|
|
"""Get system statistics"""
|
|
try:
|
|
data = {
|
|
'gpu': system_monitor.get_gpu_stats(),
|
|
'cpu': system_monitor.get_cpu_stats(),
|
|
'disk': system_monitor.get_disk_stats()
|
|
}
|
|
return jsonify({'success': True, 'data': data})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/encoders')
|
|
def api_encoders():
|
|
"""Get available encoder capabilities"""
|
|
try:
|
|
if not REENCODE_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Encoder detection not available'
|
|
}), 500
|
|
|
|
# Detect encoder capabilities
|
|
caps = EncoderDetector.detect_capabilities()
|
|
|
|
# Build response with hardware info
|
|
encoders = {
|
|
'cpu': {
|
|
'h264': caps.has_x264,
|
|
'h265': caps.has_x265,
|
|
'av1': caps.has_av1
|
|
},
|
|
'nvidia': {
|
|
'available': caps.has_nvenc,
|
|
'h264': caps.has_nvenc,
|
|
'h265': caps.has_nvenc,
|
|
'av1': caps.has_nvenc_av1,
|
|
'devices': caps.nvenc_devices if caps.has_nvenc else []
|
|
},
|
|
'intel': {
|
|
'available': caps.has_qsv,
|
|
'h264': caps.has_qsv,
|
|
'h265': caps.has_qsv,
|
|
'av1': caps.has_qsv_av1
|
|
},
|
|
'amd': {
|
|
'available': caps.has_vaapi,
|
|
'h264': caps.has_vaapi,
|
|
'h265': caps.has_vaapi,
|
|
'av1': caps.has_vaapi_av1
|
|
}
|
|
}
|
|
|
|
return jsonify({'success': True, 'encoders': encoders})
|
|
except Exception as e:
|
|
logging.error(f"Error detecting encoders: {e}", exc_info=True)
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/jobs/start', methods=['POST'])
|
|
def api_start_job():
|
|
"""Start processing job"""
|
|
try:
|
|
data = request.get_json() or {}
|
|
profile = data.get('profile')
|
|
dry_run = data.get('dry_run', False)
|
|
result = job_controller.start_processing(profile, dry_run)
|
|
return jsonify(result)
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/jobs/stop', methods=['POST'])
|
|
def api_stop_job():
|
|
"""Stop processing job"""
|
|
try:
|
|
result = job_controller.stop_processing()
|
|
return jsonify(result)
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/jobs/scan', methods=['POST'])
|
|
def api_scan_library():
|
|
"""Scan library to populate database"""
|
|
try:
|
|
global processing_thread, processing_active
|
|
|
|
with processing_lock:
|
|
if processing_active:
|
|
return jsonify({'success': False, 'message': 'Processing already active'})
|
|
|
|
cmd = ['python3', str(config.reencode_script), '-c', str(config.config_file), '--scan-only']
|
|
|
|
def run_scan():
|
|
global processing_active
|
|
processing_active = True
|
|
try:
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=600)
|
|
if result.stdout:
|
|
logging.info(f"Scan output: {result.stdout}")
|
|
if result.stderr:
|
|
logging.error(f"Scan errors: {result.stderr}")
|
|
finally:
|
|
processing_active = False
|
|
|
|
processing_thread = threading.Thread(target=run_scan, daemon=True)
|
|
processing_thread.start()
|
|
|
|
return jsonify({'success': True, 'message': 'Library scan started'})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/jobs/reencode-selected', methods=['POST'])
|
|
def api_reencode_selected():
|
|
"""Re-encode selected files with specified profile"""
|
|
try:
|
|
data = request.get_json()
|
|
file_ids = data.get('file_ids', [])
|
|
profile = data.get('profile')
|
|
|
|
# Validate inputs
|
|
if not file_ids:
|
|
return jsonify({'success': False, 'error': 'No files selected'}), 400
|
|
|
|
if not isinstance(file_ids, list):
|
|
return jsonify({'success': False, 'error': 'file_ids must be an array'}), 400
|
|
|
|
# Validate all file_ids are integers and limit count
|
|
if len(file_ids) > 1000:
|
|
return jsonify({'success': False, 'error': 'Too many files selected (max 1000)'}), 400
|
|
|
|
try:
|
|
file_ids = [int(fid) for fid in file_ids]
|
|
except (ValueError, TypeError):
|
|
return jsonify({'success': False, 'error': 'Invalid file IDs - must be integers'}), 400
|
|
|
|
if not profile or not isinstance(profile, str):
|
|
return jsonify({'success': False, 'error': 'No profile specified'}), 400
|
|
|
|
# Validate profile name (alphanumeric, underscore, hyphen only)
|
|
import re
|
|
if not re.match(r'^[a-zA-Z0-9_-]+$', profile):
|
|
return jsonify({'success': False, 'error': 'Invalid profile name'}), 400
|
|
|
|
# Update file states in database to pending
|
|
conn = None
|
|
try:
|
|
conn = sqlite3.connect(str(config.state_db))
|
|
cursor = conn.cursor()
|
|
|
|
placeholders = ','.join('?' * len(file_ids))
|
|
cursor.execute(f"""
|
|
UPDATE files
|
|
SET state = 'pending',
|
|
profile_name = ?,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE id IN ({placeholders})
|
|
""", [profile] + file_ids)
|
|
|
|
updated_count = cursor.rowcount
|
|
conn.commit()
|
|
|
|
logging.info(f"Reset {updated_count} files to pending state with profile {profile}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'{updated_count} files queued for re-encoding',
|
|
'count': updated_count
|
|
})
|
|
finally:
|
|
if conn:
|
|
conn.close()
|
|
|
|
except Exception as e:
|
|
logging.error(f"Failed to queue files for re-encoding: {e}", exc_info=True)
|
|
return jsonify({'success': False, 'error': 'Internal server error'}), 500
|
|
|
|
|
|
@app.route('/api/jobs/reset-stuck', methods=['POST'])
|
|
def api_reset_stuck():
|
|
"""Mark files stuck in processing state as failed for retry"""
|
|
try:
|
|
db_reader.cleanup_stuck_processing()
|
|
return jsonify({'success': True, 'message': 'Stuck files marked as failed'})
|
|
except Exception as e:
|
|
logging.error(f"Failed to reset stuck files: {e}", exc_info=True)
|
|
return jsonify({'success': False, 'error': 'Internal server error'}), 500
|
|
|
|
|
|
@app.route('/api/logs')
|
|
def api_logs():
|
|
"""Get recent log entries"""
|
|
try:
|
|
lines = int(request.args.get('lines', 100))
|
|
log_file = config.log_dir / 'encoderpro.log'
|
|
|
|
if log_file.exists():
|
|
with open(log_file, 'r') as f:
|
|
all_lines = f.readlines()
|
|
recent_lines = all_lines[-lines:]
|
|
return jsonify({'success': True, 'data': recent_lines})
|
|
else:
|
|
return jsonify({'success': True, 'data': []})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/health')
|
|
def api_health():
|
|
"""Health check endpoint"""
|
|
db_exists = config.state_db.exists()
|
|
db_file_count = 0
|
|
|
|
if db_exists:
|
|
try:
|
|
conn = sqlite3.connect(str(config.state_db))
|
|
cursor = conn.cursor()
|
|
cursor.execute("SELECT COUNT(*) FROM files")
|
|
db_file_count = cursor.fetchone()[0]
|
|
conn.close()
|
|
except:
|
|
pass
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'data': {
|
|
'status': 'healthy',
|
|
'version': __version__,
|
|
'timestamp': datetime.now().isoformat(),
|
|
'database': {
|
|
'exists': db_exists,
|
|
'path': str(config.state_db),
|
|
'file_count': db_file_count,
|
|
'needs_scan': db_file_count == 0
|
|
}
|
|
}
|
|
})
|
|
|
|
|
|
@app.route('/api/config')
|
|
def api_get_config():
|
|
"""Get current configuration"""
|
|
try:
|
|
if yaml is None:
|
|
return jsonify({'success': False, 'error': 'PyYAML not installed. Run: pip install pyyaml'}), 500
|
|
|
|
if config.config_file.exists():
|
|
with open(config.config_file, 'r') as f:
|
|
config_data = yaml.safe_load(f)
|
|
return jsonify({'success': True, 'data': config_data})
|
|
else:
|
|
return jsonify({'success': False, 'error': 'Config file not found'}), 404
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/config', methods=['POST'])
|
|
def api_save_config():
|
|
"""Save configuration"""
|
|
try:
|
|
if yaml is None:
|
|
return jsonify({'success': False, 'error': 'PyYAML not installed. Run: pip install pyyaml'}), 500
|
|
|
|
new_config = request.get_json()
|
|
|
|
if not new_config:
|
|
return jsonify({'success': False, 'error': 'No configuration provided'}), 400
|
|
|
|
# Validate required fields
|
|
required_fields = ['movies_dir', 'archive_dir', 'work_dir']
|
|
for field in required_fields:
|
|
if field not in new_config:
|
|
return jsonify({'success': False, 'error': f'Missing required field: {field}'}), 400
|
|
|
|
# Backup existing config
|
|
if config.config_file.exists():
|
|
backup_path = config.config_file.parent / f"{config.config_file.name}.backup"
|
|
import shutil
|
|
shutil.copy(config.config_file, backup_path)
|
|
|
|
# Save new config
|
|
with open(config.config_file, 'w') as f:
|
|
yaml.dump(new_config, f, default_flow_style=False)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'Configuration saved successfully',
|
|
'data': new_config
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/config/validate', methods=['POST'])
|
|
def api_validate_config():
|
|
"""Validate configuration without saving"""
|
|
try:
|
|
config_data = request.get_json()
|
|
|
|
if not config_data:
|
|
return jsonify({'success': False, 'error': 'No configuration provided'}), 400
|
|
|
|
errors = []
|
|
warnings = []
|
|
|
|
# Check required fields
|
|
required_fields = ['movies_dir', 'archive_dir', 'work_dir']
|
|
for field in required_fields:
|
|
if field not in config_data:
|
|
errors.append(f'Missing required field: {field}')
|
|
|
|
# Check if directories exist
|
|
from pathlib import Path
|
|
if 'movies_dir' in config_data:
|
|
movies_path = Path(config_data['movies_dir'])
|
|
if not movies_path.exists():
|
|
warnings.append(f"Movies directory does not exist: {movies_path}")
|
|
elif not movies_path.is_dir():
|
|
errors.append(f"Movies path is not a directory: {movies_path}")
|
|
|
|
if 'archive_dir' in config_data:
|
|
archive_path = Path(config_data['archive_dir'])
|
|
if not archive_path.exists():
|
|
warnings.append(f"Archive directory does not exist (will be created): {archive_path}")
|
|
|
|
# Check parallel settings
|
|
if 'parallel' in config_data:
|
|
parallel = config_data['parallel']
|
|
max_workers = parallel.get('max_workers', 1)
|
|
gpu_slots = parallel.get('gpu_slots', 0)
|
|
|
|
if max_workers < 1:
|
|
errors.append("max_workers must be at least 1")
|
|
if max_workers > 10:
|
|
warnings.append(f"max_workers={max_workers} is very high, may cause system instability")
|
|
|
|
if gpu_slots > max_workers:
|
|
warnings.append("gpu_slots should not exceed max_workers")
|
|
|
|
# Check profiles
|
|
if 'profiles' in config_data:
|
|
profiles = config_data.get('profiles', {})
|
|
if 'definitions' in profiles:
|
|
for profile_name, profile_data in profiles['definitions'].items():
|
|
if 'encoder' not in profile_data:
|
|
errors.append(f"Profile '{profile_name}' missing encoder")
|
|
if 'quality' not in profile_data:
|
|
warnings.append(f"Profile '{profile_name}' missing quality setting")
|
|
|
|
is_valid = len(errors) == 0
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'data': {
|
|
'valid': is_valid,
|
|
'errors': errors,
|
|
'warnings': warnings
|
|
}
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/profiles')
|
|
def api_get_profiles():
|
|
"""Get available encoding profiles"""
|
|
try:
|
|
if yaml is None:
|
|
return jsonify({'success': False, 'error': 'PyYAML not installed. Run: pip install pyyaml'}), 500
|
|
|
|
if config.config_file.exists():
|
|
with open(config.config_file, 'r') as f:
|
|
config_data = yaml.safe_load(f)
|
|
|
|
profiles = config_data.get('profiles', {})
|
|
return jsonify({
|
|
'success': True,
|
|
'data': {
|
|
'default': profiles.get('default', 'balanced_gpu'),
|
|
'profiles': profiles.get('definitions', {})
|
|
}
|
|
})
|
|
else:
|
|
return jsonify({'success': False, 'error': 'Config file not found'}), 404
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/encoders')
|
|
def api_get_encoders():
|
|
"""Get available encoders on the system"""
|
|
try:
|
|
# Check FFmpeg encoders
|
|
result = subprocess.run(
|
|
['ffmpeg', '-hide_banner', '-encoders'],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=10
|
|
)
|
|
|
|
encoders_output = result.stdout.lower()
|
|
|
|
available = {
|
|
'cpu': {
|
|
'x265': 'libx265' in encoders_output,
|
|
'x264': 'libx264' in encoders_output
|
|
},
|
|
'nvidia': {
|
|
'nvenc_h265': 'hevc_nvenc' in encoders_output,
|
|
'nvenc_h264': 'h264_nvenc' in encoders_output
|
|
},
|
|
'intel': {
|
|
'qsv_h265': 'hevc_qsv' in encoders_output,
|
|
'qsv_h264': 'h264_qsv' in encoders_output
|
|
},
|
|
'amd': {
|
|
'vaapi_h265': 'hevc_vaapi' in encoders_output,
|
|
'vaapi_h264': 'h264_vaapi' in encoders_output
|
|
}
|
|
}
|
|
|
|
return jsonify({'success': True, 'data': available})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/directories/validate', methods=['POST'])
|
|
def api_validate_directories():
|
|
"""Validate directory paths"""
|
|
try:
|
|
data = request.get_json()
|
|
paths_to_check = data.get('paths', {})
|
|
|
|
results = {}
|
|
for name, path_str in paths_to_check.items():
|
|
from pathlib import Path
|
|
path = Path(path_str)
|
|
|
|
results[name] = {
|
|
'path': path_str,
|
|
'exists': path.exists(),
|
|
'is_directory': path.is_dir() if path.exists() else False,
|
|
'is_writable': os.access(path, os.W_OK) if path.exists() else False,
|
|
'is_readable': os.access(path, os.R_OK) if path.exists() else False
|
|
}
|
|
|
|
return jsonify({'success': True, 'data': results})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# UTILITY FUNCTIONS
|
|
# =============================================================================
|
|
|
|
def format_bytes(bytes_val: int) -> str:
|
|
"""Format bytes to human readable"""
|
|
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
|
if bytes_val < 1024.0:
|
|
return f"{bytes_val:.2f} {unit}"
|
|
bytes_val /= 1024.0
|
|
return f"{bytes_val:.2f} PB"
|
|
|
|
|
|
def format_duration(seconds: float) -> str:
|
|
"""Format seconds to human readable duration"""
|
|
if seconds < 60:
|
|
return f"{seconds:.0f}s"
|
|
elif seconds < 3600:
|
|
return f"{seconds/60:.0f}m"
|
|
else:
|
|
hours = seconds / 3600
|
|
return f"{hours:.1f}h"
|
|
|
|
|
|
# Register template filters
|
|
app.jinja_env.filters['format_bytes'] = format_bytes
|
|
app.jinja_env.filters['format_duration'] = format_duration
|
|
|
|
|
|
# =============================================================================
|
|
# MAIN
|
|
# =============================================================================
|
|
|
|
def main():
|
|
"""Run dashboard server"""
|
|
# Set log level based on debug mode
|
|
log_level = logging.DEBUG if config.debug else logging.INFO
|
|
|
|
logging.basicConfig(
|
|
level=log_level,
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
logger.info(f"Starting Web Dashboard v{__version__}")
|
|
logger.info(f"Server: http://{config.host}:{config.port}")
|
|
logger.info(f"Database: {config.state_db}")
|
|
logger.info(f"Config: {config.config_file}")
|
|
logger.info(f"Debug mode: {config.debug}")
|
|
logger.info(f"Log level: {logging.getLevelName(log_level)}")
|
|
|
|
# Clean up any files stuck in processing state from previous session
|
|
try:
|
|
logger.info("Checking for files stuck in processing state...")
|
|
db_reader.cleanup_stuck_processing()
|
|
except Exception as e:
|
|
logger.error(f"Failed to cleanup stuck files on startup: {e}", exc_info=True)
|
|
|
|
|
|
@app.route('/api/process-duplicates', methods=['POST'])
|
|
def api_process_duplicates():
|
|
"""Process database to find and mark duplicates"""
|
|
try:
|
|
logging.info("Starting duplicate processing...")
|
|
stats = db_reader.process_duplicates()
|
|
logging.info(f"Duplicate processing complete: {stats}")
|
|
return jsonify({'success': True, 'stats': stats})
|
|
except Exception as e:
|
|
logging.error(f"Error processing duplicates: {e}", exc_info=True)
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
def main():
|
|
"""Main entry point"""
|
|
app.run(
|
|
host=config.host,
|
|
port=config.port,
|
|
debug=config.debug,
|
|
threaded=True
|
|
)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|