diff --git a/backend/src/python-scripts/progress_data/run_20251021_111611_693047_success.json b/backend/src/python-scripts/progress_data/run_20251021_111611_693047_success.json new file mode 100644 index 0000000..031803d --- /dev/null +++ b/backend/src/python-scripts/progress_data/run_20251021_111611_693047_success.json @@ -0,0 +1,32 @@ +{ + "timestamp": "2025-10-21T11:16:11.693081", + "success": true, + "metadata": { + "solveTime": 0.0218085, + "constraintsAdded": 217, + "variablesCreated": 90, + "optimal": true + }, + "progress": [ + { + "timestamp": 0.00886988639831543, + "objective": 1050.0, + "bound": 2100.0, + "solution_count": 1 + }, + { + "timestamp": 0.009304285049438477, + "objective": 1300.0, + "bound": 1300.0, + "solution_count": 2 + } + ], + "solution_summary": { + "assignments_count": 15, + "violations_count": 0, + "variables_count": 90, + "constraints_count": 217, + "solve_time": 0.0218085, + "optimal": true + } +} \ No newline at end of file diff --git a/backend/src/python-scripts/progress_data/run_20251021_111616_813035_success.json b/backend/src/python-scripts/progress_data/run_20251021_111616_813035_success.json new file mode 100644 index 0000000..be6e684 --- /dev/null +++ b/backend/src/python-scripts/progress_data/run_20251021_111616_813035_success.json @@ -0,0 +1,38 @@ +{ + "timestamp": "2025-10-21T11:16:16.813066", + "success": true, + "metadata": { + "solveTime": 0.0158702, + "constraintsAdded": 217, + "variablesCreated": 90, + "optimal": true + }, + "progress": [ + { + "timestamp": 0.008541107177734375, + "objective": 1050.0, + "bound": 2000.0, + "solution_count": 1 + }, + { + "timestamp": 0.00941777229309082, + "objective": 1250.0, + "bound": 1300.0, + "solution_count": 2 + }, + { + "timestamp": 0.009499549865722656, + "objective": 1300.0, + "bound": 1300.0, + "solution_count": 3 + } + ], + "solution_summary": { + "assignments_count": 15, + "violations_count": 0, + "variables_count": 90, + "constraints_count": 217, + "solve_time": 0.0158702, + "optimal": true + } +} \ No newline at end of file diff --git a/backend/src/python-scripts/run_20251021_105426_failure.json b/backend/src/python-scripts/run_20251021_105426_failure.json new file mode 100644 index 0000000..b1894fc --- /dev/null +++ b/backend/src/python-scripts/run_20251021_105426_failure.json @@ -0,0 +1,32 @@ +{ + "timestamp": "2025-10-21T10:54:26.093544", + "success": false, + "metadata": { + "solveTime": 0, + "constraintsAdded": 0, + "variablesCreated": 0, + "optimal": false + }, + "progress": [], + "solution_summary": { + "assignments_count": 0, + "violations_count": 1, + "variables_count": 0, + "constraints_count": 0, + "solve_time": 0, + "optimal": false + }, + "full_result": { + "assignments": [], + "violations": [ + "Error: 'SolutionCallback' object has no attribute 'HasObjective'" + ], + "success": false, + "metadata": { + "solveTime": 0, + "constraintsAdded": 0, + "variablesCreated": 0, + "optimal": false + } + } +} \ No newline at end of file diff --git a/backend/src/python-scripts/run_20251021_105936_failure.json b/backend/src/python-scripts/run_20251021_105936_failure.json new file mode 100644 index 0000000..0f0d63e --- /dev/null +++ b/backend/src/python-scripts/run_20251021_105936_failure.json @@ -0,0 +1,19 @@ +{ + "timestamp": "2025-10-21T10:59:36.646855", + "success": false, + "metadata": { + "solveTime": 0, + "constraintsAdded": 0, + "variablesCreated": 0, + "optimal": false + }, + "progress": [], + "solution_summary": { + "assignments_count": 0, + "violations_count": 1, + "variables_count": 0, + "constraints_count": 0, + "solve_time": 0, + "optimal": false + } +} \ No newline at end of file diff --git a/backend/src/python-scripts/run_20251021_110336_success.json b/backend/src/python-scripts/run_20251021_110336_success.json new file mode 100644 index 0000000..2ddf8ca --- /dev/null +++ b/backend/src/python-scripts/run_20251021_110336_success.json @@ -0,0 +1,38 @@ +{ + "timestamp": "2025-10-21T11:03:36.697986", + "success": true, + "metadata": { + "solveTime": 0.025875500000000003, + "constraintsAdded": 217, + "variablesCreated": 90, + "optimal": true + }, + "progress": [ + { + "timestamp": 0.008769989013671875, + "objective": 1050.0, + "bound": 2000.0, + "solution_count": 1 + }, + { + "timestamp": 0.009685516357421875, + "objective": 1250.0, + "bound": 1700.0, + "solution_count": 2 + }, + { + "timestamp": 0.010709047317504883, + "objective": 1300.0, + "bound": 1300.0, + "solution_count": 3 + } + ], + "solution_summary": { + "assignments_count": 15, + "violations_count": 0, + "variables_count": 90, + "constraints_count": 217, + "solve_time": 0.025875500000000003, + "optimal": true + } +} \ No newline at end of file diff --git a/backend/src/python-scripts/scheduling_solver.py b/backend/src/python-scripts/scheduling_solver.py index 58182a3..c3e254e 100644 --- a/backend/src/python-scripts/scheduling_solver.py +++ b/backend/src/python-scripts/scheduling_solver.py @@ -3,8 +3,151 @@ from ortools.sat.python import cp_model import json import sys import re +import os +import logging +import time +from pathlib import Path +from datetime import datetime, timedelta from collections import defaultdict +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class ProductionProgressManager: + def __init__(self, script_dir, max_files=1000, retention_days=7): + self.script_dir = Path(script_dir) + self.progress_dir = self.script_dir / "progress_data" + self.max_files = max_files + self.retention_days = retention_days + self._ensure_directory() + self._cleanup_old_files() + + def _ensure_directory(self): + """Create progress directory if it doesn't exist""" + try: + self.progress_dir.mkdir(exist_ok=True) + # Set secure permissions (read/write for owner only) + self.progress_dir.chmod(0o700) + except Exception as e: + logger.warning(f"Could not create progress directory: {e}") + + def _cleanup_old_files(self): + """Remove old progress files based on retention policy""" + try: + cutoff_time = datetime.now() - timedelta(days=self.retention_days) + files = list(self.progress_dir.glob("run_*.json")) + + # Sort by modification time and remove oldest if over limit + if len(files) > self.max_files: + files.sort(key=lambda x: x.stat().st_mtime) + for file_to_delete in files[:len(files) - self.max_files]: + file_to_delete.unlink() + logger.info(f"Cleaned up old progress file: {file_to_delete}") + + # Remove files older than retention period + for file_path in files: + if datetime.fromtimestamp(file_path.stat().st_mtime) < cutoff_time: + file_path.unlink() + logger.info(f"Removed expired progress file: {file_path}") + + except Exception as e: + logger.warning(f"Progress cleanup failed: {e}") + + def save_progress(self, result, progress_data): + """Safely save progress data with production considerations""" + try: + # Check disk space before writing (min 100MB free) + if not self._check_disk_space(): + logger.warning("Insufficient disk space, skipping progress save") + return None + + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f") + success_status = "success" if result.get('success', False) else "failure" + filename = f"run_{timestamp}_{success_status}.json" + filepath = self.progress_dir / filename + + # Prepare safe data (exclude sensitive information) + safe_data = { + 'timestamp': datetime.now().isoformat(), + 'success': result.get('success', False), + 'metadata': result.get('metadata', {}), + 'progress': progress_data, + 'solution_summary': { + 'assignments_count': len(result.get('assignments', [])), + 'violations_count': len(result.get('violations', [])), + 'variables_count': result.get('metadata', {}).get('variablesCreated', 0), + 'constraints_count': result.get('metadata', {}).get('constraintsAdded', 0), + 'solve_time': result.get('metadata', {}).get('solveTime', 0), + 'optimal': result.get('metadata', {}).get('optimal', False) + } + # ❌ REMOVED: 'full_result' containing potentially sensitive data + } + + # Atomic write with temporary file + temp_filepath = filepath.with_suffix('.tmp') + with open(temp_filepath, 'w', encoding='utf-8') as f: + json.dump(safe_data, f, indent=2, ensure_ascii=False) + + # Atomic rename + temp_filepath.rename(filepath) + # Set secure file permissions + filepath.chmod(0o600) + + logger.info(f"Progress data saved: {filename}") + return str(filepath) + + except Exception as e: + logger.error(f"Failed to save progress data: {e}") + return None + + def _check_disk_space(self, min_free_mb=100): + """Check if there's sufficient disk space""" + try: + stat = os.statvfs(self.progress_dir) + free_mb = (stat.f_bavail * stat.f_frsize) / (1024 * 1024) + return free_mb >= min_free_mb + except: + return True # Continue if we can't check disk space + +class SimpleSolutionCallback(cp_model.CpSolverSolutionCallback): + """A simplified callback that only counts solutions""" + def __init__(self): + cp_model.CpSolverSolutionCallback.__init__(self) + self.__solution_count = 0 + self.start_time = time.time() + self.solutions = [] + + def on_solution_callback(self): + current_time = time.time() - self.start_time + self.__solution_count += 1 + + # Try to get objective value safely + try: + objective_value = self.ObjectiveValue() + except: + objective_value = 0 + + # Try to get bound safely + try: + best_bound = self.BestObjectiveBound() + except: + best_bound = 0 + + solution_info = { + 'timestamp': current_time, + 'objective': objective_value, + 'bound': best_bound, + 'solution_count': self.__solution_count + } + + self.solutions.append(solution_info) + print(f"Progress: Solution {self.__solution_count}, Objective: {objective_value}, Time: {current_time:.2f}s", file=sys.stderr) + + def solution_count(self): + return self.__solution_count + + class UniversalSchedulingSolver: def __init__(self): self.model = cp_model.CpModel() @@ -12,6 +155,14 @@ class UniversalSchedulingSolver: self.solver.parameters.max_time_in_seconds = 30 self.solver.parameters.num_search_workers = 8 self.solver.parameters.log_search_progress = False + + # 🆕 Initialize production-safe progress manager + script_dir = os.path.dirname(os.path.abspath(__file__)) + self.progress_manager = ProductionProgressManager( + script_dir=script_dir, + max_files=1000, # Keep last 1000 runs + retention_days=7 # Keep files for 7 days + ) def solve_from_model_data(self, model_data): """Solve from pre-built model data (variables, constraints, objective)""" @@ -48,36 +199,70 @@ class UniversalSchedulingSolver: # Add a default objective if main objective fails self.model.Maximize(sum(cp_vars.values())) - # Solve - status = self.solver.Solve(self.model) + # Solve with callback + callback = SimpleSolutionCallback() + status = self.solver.SolveWithSolutionCallback(self.model, callback) result = self._format_solution(status, cp_vars, model_data) result['metadata']['constraintsAdded'] = constraints_added + + # 🆕 Production-safe progress saving + if callback.solutions: + result['progress'] = callback.solutions + self.progress_manager.save_progress(result, callback.solutions) + else: + result['progress'] = [] + self.progress_manager.save_progress(result, []) + return result except Exception as e: - return self._error_result(str(e)) + error_result = self._error_result(str(e)) + self.progress_manager.save_progress(error_result, []) + return error_result + + def _save_progress_data(self, result, progress_data): + """Save progress data to file in the same directory as this script""" + try: + # Get current script directory + script_dir = os.path.dirname(os.path.abspath(__file__)) + + # Create filename with timestamp and success status + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + success_status = "success" if result.get('success', False) else "failure" + filename = f"run_{timestamp}_{success_status}.json" + filepath = os.path.join(script_dir, filename) + + # Prepare data to save + data_to_save = { + 'timestamp': datetime.now().isoformat(), + 'success': result.get('success', False), + 'metadata': result.get('metadata', {}), + 'progress': progress_data, + 'solution_summary': { + 'assignments_count': len(result.get('assignments', [])), + 'violations_count': len(result.get('violations', [])), + 'variables_count': result.get('metadata', {}).get('variablesCreated', 0), + 'constraints_count': result.get('metadata', {}).get('constraintsAdded', 0), + 'solve_time': result.get('metadata', {}).get('solveTime', 0), + 'optimal': result.get('metadata', {}).get('optimal', False) + } + } + + # Write to file + with open(filepath, 'w', encoding='utf-8') as f: + json.dump(data_to_save, f, indent=2, ensure_ascii=False) + + print(f"Progress data saved to: {filepath}", file=sys.stderr) + + except Exception as e: + print(f"Failed to save progress data: {e}", file=sys.stderr) def _add_constraint(self, expression, cp_vars): """Add constraint from expression string with enhanced parsing""" try: expression = expression.strip() - # Handle implication constraints (=>) - if '=>' in expression: - left, right = expression.split('=>', 1) - left_expr = self._parse_expression(left.strip(), cp_vars) - right_expr = self._parse_expression(right.strip(), cp_vars) - - # A => B is equivalent to (not A) or B - # In CP-SAT: AddBoolOr([A.Not(), B]) - if hasattr(left_expr, 'Not') and hasattr(right_expr, 'Index'): - self.model.AddImplication(left_expr, right_expr) - else: - # Fallback: treat as linear constraint - self.model.Add(left_expr <= right_expr) - return True - # Handle equality if ' == ' in expression: left, right = expression.split(' == ', 1) @@ -198,12 +383,11 @@ class UniversalSchedulingSolver: assignments.append({ 'shiftId': shift_id, 'employeeId': employee_id, - 'assignedAt': '2024-01-01T00:00:00Z', + 'assignedAt': datetime.now().isoformat() + 'Z', 'score': 100 }) print(f"Debug: Found {len(assignments)} assignments", file=sys.stderr) - print(f"Debug: First 5 assignments: {assignments[:5]}", file=sys.stderr) else: print(f"Debug: Solver failed with status {status}", file=sys.stderr) @@ -213,7 +397,7 @@ class UniversalSchedulingSolver: 'assignments': assignments, 'violations': [], 'success': success, - 'variables': variables, # Include ALL variables for debugging + 'variables': variables, 'metadata': { 'solveTime': self.solver.WallTime(), 'constraintsAdded': len(model_data.get('constraints', [])), @@ -227,9 +411,9 @@ class UniversalSchedulingSolver: status_map = { cp_model.OPTIMAL: 'OPTIMAL', cp_model.FEASIBLE: 'FEASIBLE', - cp_MODEL.INFEASIBLE: 'INFEASIBLE', - cp_MODEL.MODEL_INVALID: 'MODEL_INVALID', - cp_MODEL.UNKNOWN: 'UNKNOWN' + cp_model.INFEASIBLE: 'INFEASIBLE', + cp_model.MODEL_INVALID: 'MODEL_INVALID', + cp_model.UNKNOWN: 'UNKNOWN' } return status_map.get(status, f'UNKNOWN_STATUS_{status}') @@ -248,7 +432,6 @@ class UniversalSchedulingSolver: } - # Main execution if __name__ == "__main__": try: diff --git a/backend/src/workers/cp-sat-wrapper.ts b/backend/src/workers/cp-sat-wrapper.ts index e842626..7bbe10e 100644 --- a/backend/src/workers/cp-sat-wrapper.ts +++ b/backend/src/workers/cp-sat-wrapper.ts @@ -8,6 +8,17 @@ import { SolverOptions, Solution, Assignment } from '../models/scheduling.js'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); +export interface ProgressStep { + timestamp: number; + objective: number; + bound: number; + solution_count: number; +} + +export interface SolutionWithProgress extends Solution { + progress?: ProgressStep[]; +} + export class CPModel { private modelData: any; @@ -86,7 +97,7 @@ export class CPSolver { console.log('Using Python script at:', pythonScriptPath); - const modelData = model.export(); + const modelData = model.export(); return new Promise((resolve, reject) => { const pythonProcess = spawn('python', [pythonScriptPath], { @@ -103,6 +114,10 @@ export class CPSolver { pythonProcess.stderr.on('data', (data) => { stderr += data.toString(); + // 🆕 Real-time progress monitoring from stderr + if (data.toString().includes('Progress:')) { + console.log('Python Progress:', data.toString().trim()); + } }); pythonProcess.on('close', (code) => { @@ -116,15 +131,16 @@ export class CPSolver { } try { - console.log('Python raw output:', stdout.substring(0, 500)); // Debug log + console.log('Python raw output:', stdout.substring(0, 500)); const result = JSON.parse(stdout); - // ENHANCED: Better solution parsing - const solution: Solution = { + // Enhanced solution parsing with progress data + const solution: SolutionWithProgress = { success: result.success || false, assignments: result.assignments || [], violations: result.violations || [], + progress: result.progress || [], // 🆕 Parse progress data metadata: { solveTime: result.metadata?.solveTime || 0, constraintsAdded: result.metadata?.constraintsAdded || 0, @@ -133,8 +149,8 @@ export class CPSolver { }, variables: result.variables || {} }; - - console.log(`Python solver result: success=${solution.success}, assignments=${solution.assignments.length}`); + + console.log(`Python solver result: success=${solution.success}, assignments=${solution.assignments.length}, progress_steps=${solution.progress?.length}`); resolve(solution); } catch (parseError) { diff --git a/frontend/src/pages/Dashboard/Dashboard.tsx b/frontend/src/pages/Dashboard/Dashboard.tsx index a246eb6..a1bc583 100644 --- a/frontend/src/pages/Dashboard/Dashboard.tsx +++ b/frontend/src/pages/Dashboard/Dashboard.tsx @@ -343,30 +343,53 @@ const Dashboard: React.FC = () => { return (
- {new Date().toLocaleDateString('de-DE', { - weekday: 'long', - year: 'numeric', - month: 'long', - day: 'numeric' - })} -
-+ {user?.firstname} {user?.lastname} +