Parcourir la source

Async Motion Control & Still Sands Feature (#77)

* fix performance issue

* fix async calls

* move motion control to its own thread

* imporve browse page perfomance

* fix cache

* further optimize page load

* all endpoints should run async

* fix syntax

* motion should use a different core

* fix async issue

* optimize settings page loading

* fix folder loading

* Add still sand feature
Tuan Nguyen il y a 4 mois
Parent
commit
2418db4080

+ 248 - 57
main.py

@@ -27,11 +27,20 @@ import json
 import base64
 import base64
 import time
 import time
 import argparse
 import argparse
+from concurrent.futures import ProcessPoolExecutor
+import multiprocessing
 
 
 # Get log level from environment variable, default to INFO
 # Get log level from environment variable, default to INFO
 log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
 log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
 log_level = getattr(logging, log_level_str, logging.INFO)
 log_level = getattr(logging, log_level_str, logging.INFO)
 
 
+# Create a process pool for CPU-intensive tasks
+# Limit to reasonable number of workers for embedded systems
+cpu_count = multiprocessing.cpu_count()
+# Maximum 3 workers (leaving 1 for motion), minimum 1
+process_pool_size = min(3, max(1, cpu_count - 1))
+process_pool = None  # Will be initialized in lifespan
+
 logging.basicConfig(
 logging.basicConfig(
     level=log_level,
     level=log_level,
     format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
     format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
@@ -65,6 +74,11 @@ async def lifespan(app: FastAPI):
     # Register signal handlers
     # Register signal handlers
     signal.signal(signal.SIGINT, signal_handler)
     signal.signal(signal.SIGINT, signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
+
+    # Initialize process pool for CPU-intensive tasks
+    global process_pool
+    process_pool = ProcessPoolExecutor(max_workers=process_pool_size)
+    logger.info(f"Initialized process pool with {process_pool_size} workers (detected {cpu_count} cores total)")
     
     
     try:
     try:
         connection_manager.connect_device()
         connection_manager.connect_device()
@@ -116,6 +130,13 @@ async def lifespan(app: FastAPI):
 
 
     yield  # This separates startup from shutdown code
     yield  # This separates startup from shutdown code
 
 
+    # Shutdown
+    logger.info("Shutting down Dune Weaver application...")
+
+    # Shutdown process pool
+    if process_pool:
+        process_pool.shutdown(wait=True)
+        logger.info("Process pool shutdown complete")
 
 
 app = FastAPI(lifespan=lifespan)
 app = FastAPI(lifespan=lifespan)
 templates = Jinja2Templates(directory="templates")
 templates = Jinja2Templates(directory="templates")
@@ -133,6 +154,16 @@ class auto_playModeRequest(BaseModel):
     clear_pattern: Optional[str] = "adaptive"
     clear_pattern: Optional[str] = "adaptive"
     shuffle: Optional[bool] = False
     shuffle: Optional[bool] = False
 
 
+class TimeSlot(BaseModel):
+    start_time: str  # HH:MM format
+    end_time: str    # HH:MM format
+    days: str        # "daily", "weekdays", "weekends", or "custom"
+    custom_days: Optional[List[str]] = []  # ["monday", "tuesday", etc.]
+
+class ScheduledPauseRequest(BaseModel):
+    enabled: bool
+    time_slots: List[TimeSlot] = []
+
 class CoordinateRequest(BaseModel):
 class CoordinateRequest(BaseModel):
     theta: float
     theta: float
     rho: float
     rho: float
@@ -218,11 +249,12 @@ async def broadcast_status_update(status: dict):
 
 
 @app.websocket("/ws/cache-progress")
 @app.websocket("/ws/cache-progress")
 async def websocket_cache_progress_endpoint(websocket: WebSocket):
 async def websocket_cache_progress_endpoint(websocket: WebSocket):
+    from modules.core.cache_manager import get_cache_progress
+
     await websocket.accept()
     await websocket.accept()
     active_cache_progress_connections.add(websocket)
     active_cache_progress_connections.add(websocket)
     try:
     try:
         while True:
         while True:
-            from modules.core.cache_manager import get_cache_progress
             progress = get_cache_progress()
             progress = get_cache_progress()
             try:
             try:
                 await websocket.send_json({
                 await websocket.send_json({
@@ -233,7 +265,7 @@ async def websocket_cache_progress_endpoint(websocket: WebSocket):
                 if "close message has been sent" in str(e):
                 if "close message has been sent" in str(e):
                     break
                     break
                 raise
                 raise
-            await asyncio.sleep(0.5)  # Update every 500ms
+            await asyncio.sleep(1.0)  # Update every 1 second (reduced frequency for better performance)
     except WebSocketDisconnect:
     except WebSocketDisconnect:
         pass
         pass
     finally:
     finally:
@@ -283,10 +315,71 @@ async def set_auto_play_mode(request: auto_playModeRequest):
     logger.info(f"auto_play mode {'enabled' if request.enabled else 'disabled'}, playlist: {request.playlist}")
     logger.info(f"auto_play mode {'enabled' if request.enabled else 'disabled'}, playlist: {request.playlist}")
     return {"success": True, "message": "auto_play mode settings updated"}
     return {"success": True, "message": "auto_play mode settings updated"}
 
 
+@app.get("/api/scheduled-pause")
+async def get_scheduled_pause():
+    """Get current Still Sands settings."""
+    return {
+        "enabled": state.scheduled_pause_enabled,
+        "time_slots": state.scheduled_pause_time_slots
+    }
+
+@app.post("/api/scheduled-pause")
+async def set_scheduled_pause(request: ScheduledPauseRequest):
+    """Update Still Sands settings."""
+    try:
+        # Validate time slots
+        for i, slot in enumerate(request.time_slots):
+            # Validate time format (HH:MM)
+            try:
+                start_time = datetime.strptime(slot.start_time, "%H:%M").time()
+                end_time = datetime.strptime(slot.end_time, "%H:%M").time()
+            except ValueError:
+                raise HTTPException(
+                    status_code=400,
+                    detail=f"Invalid time format in slot {i+1}. Use HH:MM format."
+                )
+
+            # Validate days setting
+            if slot.days not in ["daily", "weekdays", "weekends", "custom"]:
+                raise HTTPException(
+                    status_code=400,
+                    detail=f"Invalid days setting in slot {i+1}. Must be 'daily', 'weekdays', 'weekends', or 'custom'."
+                )
+
+            # Validate custom days if applicable
+            if slot.days == "custom":
+                if not slot.custom_days or len(slot.custom_days) == 0:
+                    raise HTTPException(
+                        status_code=400,
+                        detail=f"Custom days must be specified for slot {i+1} when days is set to 'custom'."
+                    )
+
+                valid_days = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
+                for day in slot.custom_days:
+                    if day not in valid_days:
+                        raise HTTPException(
+                            status_code=400,
+                            detail=f"Invalid day '{day}' in slot {i+1}. Valid days are: {', '.join(valid_days)}"
+                        )
+
+        # Update state
+        state.scheduled_pause_enabled = request.enabled
+        state.scheduled_pause_time_slots = [slot.model_dump() for slot in request.time_slots]
+        state.save()
+
+        logger.info(f"Still Sands {'enabled' if request.enabled else 'disabled'} with {len(request.time_slots)} time slots")
+        return {"success": True, "message": "Still Sands settings updated"}
+
+    except HTTPException:
+        raise
+    except Exception as e:
+        logger.error(f"Error updating Still Sands settings: {str(e)}")
+        raise HTTPException(status_code=500, detail=f"Failed to update Still Sands settings: {str(e)}")
+
 @app.get("/list_serial_ports")
 @app.get("/list_serial_ports")
 async def list_ports():
 async def list_ports():
     logger.debug("Listing available serial ports")
     logger.debug("Listing available serial ports")
-    return connection_manager.list_serial_ports()
+    return await asyncio.to_thread(connection_manager.list_serial_ports)
 
 
 @app.post("/connect")
 @app.post("/connect")
 async def connect(request: ConnectRequest):
 async def connect(request: ConnectRequest):
@@ -332,7 +425,8 @@ async def restart(request: ConnectRequest):
 @app.get("/list_theta_rho_files")
 @app.get("/list_theta_rho_files")
 async def list_theta_rho_files():
 async def list_theta_rho_files():
     logger.debug("Listing theta-rho files")
     logger.debug("Listing theta-rho files")
-    files = pattern_manager.list_theta_rho_files()
+    # Run the blocking file system operation in a thread pool
+    files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
     return sorted(files)
     return sorted(files)
 
 
 @app.get("/list_theta_rho_files_with_metadata")
 @app.get("/list_theta_rho_files_with_metadata")
@@ -345,9 +439,10 @@ async def list_theta_rho_files_with_metadata():
     import asyncio
     import asyncio
     from concurrent.futures import ThreadPoolExecutor
     from concurrent.futures import ThreadPoolExecutor
     
     
-    files = pattern_manager.list_theta_rho_files()
+    # Run the blocking file listing in a thread
+    files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
     files_with_metadata = []
     files_with_metadata = []
-    
+
     # Use ThreadPoolExecutor for I/O-bound operations
     # Use ThreadPoolExecutor for I/O-bound operations
     executor = ThreadPoolExecutor(max_workers=4)
     executor = ThreadPoolExecutor(max_workers=4)
     
     
@@ -400,18 +495,74 @@ async def list_theta_rho_files_with_metadata():
                 'coordinates_count': 0
                 'coordinates_count': 0
             }
             }
     
     
-    # Process files in parallel using asyncio
-    loop = asyncio.get_event_loop()
-    tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
-    
-    # Process results as they complete
-    for task in asyncio.as_completed(tasks):
-        try:
-            result = await task
-            files_with_metadata.append(result)
-        except Exception as e:
-            logger.error(f"Error processing file: {str(e)}")
-    
+    # Load the entire metadata cache at once (async)
+    # This is much faster than 1000+ individual metadata lookups
+    try:
+        import json
+        metadata_cache_path = "metadata_cache.json"
+        # Use async file reading to avoid blocking the event loop
+        cache_data = await asyncio.to_thread(lambda: json.load(open(metadata_cache_path, 'r')))
+        cache_dict = cache_data.get('data', {})
+        logger.debug(f"Loaded metadata cache with {len(cache_dict)} entries")
+
+        # Process all files using cached data only
+        for file_path in files:
+            try:
+                # Extract category from path
+                path_parts = file_path.split('/')
+                category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
+
+                # Get file name without extension
+                file_name = os.path.splitext(os.path.basename(file_path))[0]
+
+                # Get metadata from cache
+                cached_entry = cache_dict.get(file_path, {})
+                if isinstance(cached_entry, dict) and 'metadata' in cached_entry:
+                    metadata = cached_entry['metadata']
+                    coords_count = metadata.get('total_coordinates', 0)
+                    date_modified = cached_entry.get('mtime', 0)
+                else:
+                    coords_count = 0
+                    date_modified = 0
+
+                files_with_metadata.append({
+                    'path': file_path,
+                    'name': file_name,
+                    'category': category,
+                    'date_modified': date_modified,
+                    'coordinates_count': coords_count
+                })
+
+            except Exception as e:
+                logger.warning(f"Error processing {file_path}: {e}")
+                # Include file with minimal info if processing fails
+                path_parts = file_path.split('/')
+                category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
+                files_with_metadata.append({
+                    'path': file_path,
+                    'name': os.path.splitext(os.path.basename(file_path))[0],
+                    'category': category,
+                    'date_modified': 0,
+                    'coordinates_count': 0
+                })
+
+    except Exception as e:
+        logger.error(f"Failed to load metadata cache, falling back to slow method: {e}")
+        # Fallback to original method if cache loading fails
+        # Create tasks only when needed
+        loop = asyncio.get_event_loop()
+        tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
+
+        for task in asyncio.as_completed(tasks):
+            try:
+                result = await task
+                files_with_metadata.append(result)
+            except Exception as task_error:
+                logger.error(f"Error processing file: {str(task_error)}")
+
+    # Clean up executor
+    executor.shutdown(wait=False)
+
     return files_with_metadata
     return files_with_metadata
 
 
 @app.post("/upload_theta_rho")
 @app.post("/upload_theta_rho")
@@ -472,11 +623,15 @@ async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
         file_name = normalize_file_path(request.file_name)
         file_name = normalize_file_path(request.file_name)
         file_path = os.path.join(THETA_RHO_DIR, file_name)
         file_path = os.path.join(THETA_RHO_DIR, file_name)
         
         
-        if not os.path.exists(file_path):
+        # Check file existence asynchronously
+        exists = await asyncio.to_thread(os.path.exists, file_path)
+        if not exists:
             raise HTTPException(status_code=404, detail=f"File {file_name} not found")
             raise HTTPException(status_code=404, detail=f"File {file_name} not found")
-        
-        # Parse the theta-rho file
-        coordinates = parse_theta_rho_file(file_path)
+
+        # Parse the theta-rho file in a separate process for CPU-intensive work
+        # This prevents blocking the motion control thread
+        loop = asyncio.get_event_loop()
+        coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, file_path)
         
         
         if not coordinates:
         if not coordinates:
             raise HTTPException(status_code=400, detail="No valid coordinates found in file")
             raise HTTPException(status_code=400, detail="No valid coordinates found in file")
@@ -546,7 +701,7 @@ async def stop_execution():
     if not (state.conn.is_connected() if state.conn else False):
     if not (state.conn.is_connected() if state.conn else False):
         logger.warning("Attempted to stop without a connection")
         logger.warning("Attempted to stop without a connection")
         raise HTTPException(status_code=400, detail="Connection not established")
         raise HTTPException(status_code=400, detail="Connection not established")
-    pattern_manager.stop_actions()
+    await pattern_manager.stop_actions()
     return {"success": True}
     return {"success": True}
 
 
 @app.post("/send_home")
 @app.post("/send_home")
@@ -594,18 +749,21 @@ async def delete_theta_rho_file(request: DeleteFileRequest):
     # Normalize file path for cross-platform compatibility
     # Normalize file path for cross-platform compatibility
     normalized_file_name = normalize_file_path(request.file_name)
     normalized_file_name = normalize_file_path(request.file_name)
     file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
     file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-    if not os.path.exists(file_path):
+
+    # Check file existence asynchronously
+    exists = await asyncio.to_thread(os.path.exists, file_path)
+    if not exists:
         logger.error(f"Attempted to delete non-existent file: {file_path}")
         logger.error(f"Attempted to delete non-existent file: {file_path}")
         raise HTTPException(status_code=404, detail="File not found")
         raise HTTPException(status_code=404, detail="File not found")
 
 
     try:
     try:
-        # Delete the pattern file
-        os.remove(file_path)
+        # Delete the pattern file asynchronously
+        await asyncio.to_thread(os.remove, file_path)
         logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
         logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
         
         
-        # Clean up cached preview image and metadata
+        # Clean up cached preview image and metadata asynchronously
         from modules.core.cache_manager import delete_pattern_cache
         from modules.core.cache_manager import delete_pattern_cache
-        cache_cleanup_success = delete_pattern_cache(normalized_file_name)
+        cache_cleanup_success = await asyncio.to_thread(delete_pattern_cache, normalized_file_name)
         if cache_cleanup_success:
         if cache_cleanup_success:
             logger.info(f"Successfully cleaned up cache for {request.file_name}")
             logger.info(f"Successfully cleaned up cache for {request.file_name}")
         else:
         else:
@@ -624,8 +782,8 @@ async def move_to_center():
             raise HTTPException(status_code=400, detail="Connection not established")
             raise HTTPException(status_code=400, detail="Connection not established")
 
 
         logger.info("Moving device to center position")
         logger.info("Moving device to center position")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 0)
+        await pattern_manager.reset_theta()
+        await pattern_manager.move_polar(0, 0)
         return {"success": True}
         return {"success": True}
     except Exception as e:
     except Exception as e:
         logger.error(f"Failed to move to center: {str(e)}")
         logger.error(f"Failed to move to center: {str(e)}")
@@ -637,8 +795,8 @@ async def move_to_perimeter():
         if not (state.conn.is_connected() if state.conn else False):
         if not (state.conn.is_connected() if state.conn else False):
             logger.warning("Attempted to move to perimeter without a connection")
             logger.warning("Attempted to move to perimeter without a connection")
             raise HTTPException(status_code=400, detail="Connection not established")
             raise HTTPException(status_code=400, detail="Connection not established")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 1)
+        await pattern_manager.reset_theta()
+        await pattern_manager.move_polar(0, 1)
         return {"success": True}
         return {"success": True}
     except Exception as e:
     except Exception as e:
         logger.error(f"Failed to move to perimeter: {str(e)}")
         logger.error(f"Failed to move to perimeter: {str(e)}")
@@ -654,18 +812,24 @@ async def preview_thr(request: DeleteFileRequest):
     normalized_file_name = normalize_file_path(request.file_name)
     normalized_file_name = normalize_file_path(request.file_name)
     # Construct the full path to the pattern file to check existence
     # Construct the full path to the pattern file to check existence
     pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
     pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-    if not os.path.exists(pattern_file_path):
+
+    # Check file existence asynchronously
+    exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
+    if not exists:
         logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
         logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
         raise HTTPException(status_code=404, detail="Pattern file not found")
         raise HTTPException(status_code=404, detail="Pattern file not found")
 
 
     try:
     try:
         cache_path = get_cache_path(normalized_file_name)
         cache_path = get_cache_path(normalized_file_name)
-        
-        if not os.path.exists(cache_path):
+
+        # Check cache existence asynchronously
+        cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
+        if not cache_exists:
             logger.info(f"Cache miss for {request.file_name}. Generating preview...")
             logger.info(f"Cache miss for {request.file_name}. Generating preview...")
             # Attempt to generate the preview if it's missing
             # Attempt to generate the preview if it's missing
             success = await generate_image_preview(normalized_file_name)
             success = await generate_image_preview(normalized_file_name)
-            if not success or not os.path.exists(cache_path):
+            cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
+            if not success or not cache_exists_after:
                 logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
                 logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
                 raise HTTPException(status_code=500, detail="Failed to generate preview image.")
                 raise HTTPException(status_code=500, detail="Failed to generate preview image.")
 
 
@@ -747,7 +911,7 @@ async def send_coordinate(request: CoordinateRequest):
 
 
     try:
     try:
         logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
         logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
-        pattern_manager.move_polar(request.theta, request.rho)
+        await pattern_manager.move_polar(request.theta, request.rho)
         return {"success": True}
         return {"success": True}
     except Exception as e:
     except Exception as e:
         logger.error(f"Failed to send coordinate: {str(e)}")
         logger.error(f"Failed to send coordinate: {str(e)}")
@@ -1043,27 +1207,31 @@ async def preview_thr_batch(request: dict):
         "Content-Type": "application/json"
         "Content-Type": "application/json"
     }
     }
 
 
-    results = {}
-    for file_name in file_names:
+    async def process_single_file(file_name):
+        """Process a single file and return its preview data."""
         t1 = time.time()
         t1 = time.time()
         try:
         try:
             # Normalize file path for cross-platform compatibility
             # Normalize file path for cross-platform compatibility
             normalized_file_name = normalize_file_path(file_name)
             normalized_file_name = normalize_file_path(file_name)
             pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
             pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-            if not os.path.exists(pattern_file_path):
+
+            # Check file existence asynchronously
+            exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
+            if not exists:
                 logger.warning(f"Pattern file not found: {pattern_file_path}")
                 logger.warning(f"Pattern file not found: {pattern_file_path}")
-                results[file_name] = {"error": "Pattern file not found"}
-                continue
+                return file_name, {"error": "Pattern file not found"}
 
 
             cache_path = get_cache_path(normalized_file_name)
             cache_path = get_cache_path(normalized_file_name)
-            
-            if not os.path.exists(cache_path):
+
+            # Check cache existence asynchronously
+            cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
+            if not cache_exists:
                 logger.info(f"Cache miss for {file_name}. Generating preview...")
                 logger.info(f"Cache miss for {file_name}. Generating preview...")
                 success = await generate_image_preview(normalized_file_name)
                 success = await generate_image_preview(normalized_file_name)
-                if not success or not os.path.exists(cache_path):
+                cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
+                if not success or not cache_exists_after:
                     logger.error(f"Failed to generate or find preview for {file_name}")
                     logger.error(f"Failed to generate or find preview for {file_name}")
-                    results[file_name] = {"error": "Failed to generate preview"}
-                    continue
+                    return file_name, {"error": "Failed to generate preview"}
 
 
             metadata = get_pattern_metadata(normalized_file_name)
             metadata = get_pattern_metadata(normalized_file_name)
             if metadata:
             if metadata:
@@ -1071,25 +1239,34 @@ async def preview_thr_batch(request: dict):
                 last_coord_obj = metadata.get('last_coordinate')
                 last_coord_obj = metadata.get('last_coordinate')
             else:
             else:
                 logger.debug(f"Metadata cache miss for {file_name}, parsing file")
                 logger.debug(f"Metadata cache miss for {file_name}, parsing file")
-                coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
+                # Use process pool for CPU-intensive parsing
+                loop = asyncio.get_event_loop()
+                coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, pattern_file_path)
                 first_coord = coordinates[0] if coordinates else None
                 first_coord = coordinates[0] if coordinates else None
                 last_coord = coordinates[-1] if coordinates else None
                 last_coord = coordinates[-1] if coordinates else None
                 first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
                 first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
                 last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
                 last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
 
 
-            with open(cache_path, 'rb') as f:
-                image_data = f.read()
+            # Read image file asynchronously
+            image_data = await asyncio.to_thread(lambda: open(cache_path, 'rb').read())
             image_b64 = base64.b64encode(image_data).decode('utf-8')
             image_b64 = base64.b64encode(image_data).decode('utf-8')
-            results[file_name] = {
+            result = {
                 "image_data": f"data:image/webp;base64,{image_b64}",
                 "image_data": f"data:image/webp;base64,{image_b64}",
                 "first_coordinate": first_coord_obj,
                 "first_coordinate": first_coord_obj,
                 "last_coordinate": last_coord_obj
                 "last_coordinate": last_coord_obj
             }
             }
+            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
+            return file_name, result
         except Exception as e:
         except Exception as e:
             logger.error(f"Error processing {file_name}: {str(e)}")
             logger.error(f"Error processing {file_name}: {str(e)}")
-            results[file_name] = {"error": str(e)}
-        finally:
-            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
+            return file_name, {"error": str(e)}
+
+    # Process all files concurrently
+    tasks = [process_single_file(file_name) for file_name in file_names]
+    file_results = await asyncio.gather(*tasks)
+
+    # Convert results to dictionary
+    results = dict(file_results)
 
 
     logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
     logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
     return JSONResponse(content=results, headers=headers)
     return JSONResponse(content=results, headers=headers)
@@ -1134,10 +1311,24 @@ def signal_handler(signum, frame):
     try:
     try:
         if state.led_controller:
         if state.led_controller:
             state.led_controller.set_power(0)
             state.led_controller.set_power(0)
-        # Run cleanup operations synchronously to ensure completion
-        pattern_manager.stop_actions()
+        # Run cleanup operations - need to handle async in sync context
+        try:
+            # Try to run in existing loop if available
+            import asyncio
+            loop = asyncio.get_running_loop()
+            # If we're in an event loop, schedule the coroutine
+            import concurrent.futures
+            with concurrent.futures.ThreadPoolExecutor() as executor:
+                future = executor.submit(asyncio.run, pattern_manager.stop_actions())
+                future.result(timeout=5.0)  # Wait up to 5 seconds
+        except RuntimeError:
+            # No running loop, create a new one
+            import asyncio
+            asyncio.run(pattern_manager.stop_actions())
+        except Exception as cleanup_err:
+            logger.error(f"Error in async cleanup: {cleanup_err}")
+
         state.save()
         state.save()
-        
         logger.info("Cleanup completed")
         logger.info("Cleanup completed")
     except Exception as e:
     except Exception as e:
         logger.error(f"Error during cleanup: {str(e)}")
         logger.error(f"Error during cleanup: {str(e)}")

+ 81 - 29
modules/connection/connection_manager.py

@@ -4,6 +4,7 @@ import logging
 import serial
 import serial
 import serial.tools.list_ports
 import serial.tools.list_ports
 import websocket
 import websocket
+import asyncio
 
 
 from modules.core.state import state
 from modules.core.state import state
 from modules.led.led_controller import effect_loading, effect_idle, effect_connected, LEDController
 from modules.led.led_controller import effect_loading, effect_idle, effect_connected, LEDController
@@ -71,7 +72,14 @@ class SerialConnection(BaseConnection):
         return self.ser is not None and self.ser.is_open
         return self.ser is not None and self.ser.is_open
 
 
     def close(self) -> None:
     def close(self) -> None:
-        update_machine_position()
+        # Run async update_machine_position in sync context
+        try:
+            loop = asyncio.new_event_loop()
+            asyncio.set_event_loop(loop)
+            loop.run_until_complete(update_machine_position())
+            loop.close()
+        except Exception as e:
+            logger.error(f"Error updating machine position on close: {e}")
         with self.lock:
         with self.lock:
             if self.ser.is_open:
             if self.ser.is_open:
                 self.ser.close()
                 self.ser.close()
@@ -119,7 +127,14 @@ class WebSocketConnection(BaseConnection):
         return self.ws is not None
         return self.ws is not None
 
 
     def close(self) -> None:
     def close(self) -> None:
-        update_machine_position()
+        # Run async update_machine_position in sync context
+        try:
+            loop = asyncio.new_event_loop()
+            asyncio.set_event_loop(loop)
+            loop.run_until_complete(update_machine_position())
+            loop.close()
+        except Exception as e:
+            logger.error(f"Error updating machine position on close: {e}")
         with self.lock:
         with self.lock:
             if self.ws:
             if self.ws:
                 self.ws.close()
                 self.ws.close()
@@ -220,24 +235,26 @@ def parse_machine_position(response: str):
     return None
     return None
 
 
 
 
-def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
+async def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
     """
     """
     Send a G-code command to FluidNC and wait for an 'ok' response.
     Send a G-code command to FluidNC and wait for an 'ok' response.
     If no response after set timeout, sets state to stop and disconnects.
     If no response after set timeout, sets state to stop and disconnects.
     """
     """
     logger.debug(f"Sending G-code: X{x} Y{y} at F{speed}")
     logger.debug(f"Sending G-code: X{x} Y{y} at F{speed}")
-    
+
     # Track overall attempt time
     # Track overall attempt time
     overall_start_time = time.time()
     overall_start_time = time.time()
-    
+
     while True:
     while True:
         try:
         try:
             gcode = f"$J=G91 G21 Y{y} F{speed}" if home else f"G1 X{x} Y{y} F{speed}"
             gcode = f"$J=G91 G21 Y{y} F{speed}" if home else f"G1 X{x} Y{y} F{speed}"
-            state.conn.send(gcode + "\n")
+            # Use asyncio.to_thread for both send and receive operations to avoid blocking
+            await asyncio.to_thread(state.conn.send, gcode + "\n")
             logger.debug(f"Sent command: {gcode}")
             logger.debug(f"Sent command: {gcode}")
             start_time = time.time()
             start_time = time.time()
             while True:
             while True:
-                response = state.conn.readline()
+                # Use asyncio.to_thread for blocking I/O operations
+                response = await asyncio.to_thread(state.conn.readline)
                 logger.debug(f"Response: {response}")
                 logger.debug(f"Response: {response}")
                 if response.lower() == "ok":
                 if response.lower() == "ok":
                     logger.debug("Command execution confirmed.")
                     logger.debug("Command execution confirmed.")
@@ -246,7 +263,7 @@ def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
             # Store the error string inside the exception block
             # Store the error string inside the exception block
             error_str = str(e)
             error_str = str(e)
             logger.warning(f"Error sending command: {error_str}")
             logger.warning(f"Error sending command: {error_str}")
-            
+
             # Immediately return for device not configured errors
             # Immediately return for device not configured errors
             if "Device not configured" in error_str or "Errno 6" in error_str:
             if "Device not configured" in error_str or "Errno 6" in error_str:
                 logger.error(f"Device configuration error detected: {error_str}")
                 logger.error(f"Device configuration error detected: {error_str}")
@@ -256,9 +273,9 @@ def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
                 logger.info("Connection marked as disconnected due to device error")
                 logger.info("Connection marked as disconnected due to device error")
                 return False
                 return False
 
 
-            
+
         logger.warning(f"No 'ok' received for X{x} Y{y}, speed {speed}. Retrying...")
         logger.warning(f"No 'ok' received for X{x} Y{y}, speed {speed}. Retrying...")
-        time.sleep(0.1)
+        await asyncio.sleep(0.1)
     
     
     # If we reach here, the timeout has occurred
     # If we reach here, the timeout has occurred
     logger.error(f"Failed to receive 'ok' response after {max_total_attempt_time} seconds. Stopping and disconnecting.")
     logger.error(f"Failed to receive 'ok' response after {max_total_attempt_time} seconds. Stopping and disconnecting.")
@@ -406,20 +423,27 @@ def home(timeout=15):
                     homing_speed = 120
                     homing_speed = 120
                 logger.info("Sensorless homing not supported. Using crash homing")
                 logger.info("Sensorless homing not supported. Using crash homing")
                 logger.info(f"Homing with speed {homing_speed}")
                 logger.info(f"Homing with speed {homing_speed}")
-                if state.gear_ratio == 6.25:
-                    result = send_grbl_coordinates(0, - 30, homing_speed, home=True)
-                    if result == False:
-                        logger.error("Homing failed - send_grbl_coordinates returned False")
-                        homing_complete.set()
-                        return
-                    state.machine_y -= 30
-                else:
-                    result = send_grbl_coordinates(0, -22, homing_speed, home=True)
-                    if result == False:
-                        logger.error("Homing failed - send_grbl_coordinates returned False")
-                        homing_complete.set()
-                        return
-                    state.machine_y -= 22
+
+                # Run async function in new event loop
+                loop = asyncio.new_event_loop()
+                asyncio.set_event_loop(loop)
+                try:
+                    if state.gear_ratio == 6.25:
+                        result = loop.run_until_complete(send_grbl_coordinates(0, - 30, homing_speed, home=True))
+                        if result == False:
+                            logger.error("Homing failed - send_grbl_coordinates returned False")
+                            homing_complete.set()
+                            return
+                        state.machine_y -= 30
+                    else:
+                        result = loop.run_until_complete(send_grbl_coordinates(0, -22, homing_speed, home=True))
+                        if result == False:
+                            logger.error("Homing failed - send_grbl_coordinates returned False")
+                            homing_complete.set()
+                            return
+                        state.machine_y -= 22
+                finally:
+                    loop.close()
 
 
             state.current_theta = state.current_rho = 0
             state.current_theta = state.current_rho = 0
             homing_success = True
             homing_success = True
@@ -455,16 +479,44 @@ def home(timeout=15):
 
 
 def check_idle():
 def check_idle():
     """
     """
-    Continuously check if the device is idle.
+    Continuously check if the device is idle (synchronous version).
     """
     """
     logger.info("Checking idle")
     logger.info("Checking idle")
     while True:
     while True:
         response = get_status_response()
         response = get_status_response()
         if response and "Idle" in response:
         if response and "Idle" in response:
             logger.info("Device is idle")
             logger.info("Device is idle")
-            update_machine_position()
+            # Schedule async update_machine_position in the existing event loop
+            try:
+                # Try to schedule in existing event loop if available
+                try:
+                    loop = asyncio.get_running_loop()
+                    # Create a task but don't await it (fire and forget)
+                    asyncio.create_task(update_machine_position())
+                    logger.debug("Scheduled machine position update task")
+                except RuntimeError:
+                    # No event loop running, skip machine position update
+                    logger.debug("No event loop running, skipping machine position update")
+            except Exception as e:
+                logger.error(f"Error scheduling machine position update: {e}")
             return True
             return True
         time.sleep(1)
         time.sleep(1)
+
+async def check_idle_async():
+    """
+    Continuously check if the device is idle (async version).
+    """
+    logger.info("Checking idle (async)")
+    while True:
+        response = await asyncio.to_thread(get_status_response)
+        if response and "Idle" in response:
+            logger.info("Device is idle")
+            try:
+                await update_machine_position()
+            except Exception as e:
+                logger.error(f"Error updating machine position: {e}")
+            return True
+        await asyncio.sleep(1)
         
         
 
 
 def get_machine_position(timeout=5):
 def get_machine_position(timeout=5):
@@ -490,12 +542,12 @@ def get_machine_position(timeout=5):
     logger.warning("Timeout reached waiting for machine position")
     logger.warning("Timeout reached waiting for machine position")
     return None, None
     return None, None
 
 
-def update_machine_position():
+async def update_machine_position():
     if (state.conn.is_connected() if state.conn else False):
     if (state.conn.is_connected() if state.conn else False):
         try:
         try:
             logger.info('Saving machine position')
             logger.info('Saving machine position')
-            state.machine_x, state.machine_y = get_machine_position()
-            state.save()
+            state.machine_x, state.machine_y = await asyncio.to_thread(get_machine_position)
+            await asyncio.to_thread(state.save)
             logger.info(f'Machine position saved: {state.machine_x}, {state.machine_y}')
             logger.info(f'Machine position saved: {state.machine_x}, {state.machine_y}')
         except Exception as e:
         except Exception as e:
             logger.error(f"Error updating machine position: {e}")
             logger.error(f"Error updating machine position: {e}")

+ 6 - 2
modules/core/cache_manager.py

@@ -731,9 +731,13 @@ async def generate_cache_background():
         raise
         raise
 
 
 def get_cache_progress():
 def get_cache_progress():
-    """Get the current cache generation progress."""
+    """Get the current cache generation progress.
+
+    Returns a reference to the cache_progress dict for read-only access.
+    The WebSocket handler should not modify this dict.
+    """
     global cache_progress
     global cache_progress
-    return cache_progress.copy()
+    return cache_progress  # Return reference instead of copy for better performance
 
 
 def is_cache_generation_needed():
 def is_cache_generation_needed():
     """Check if cache generation is needed."""
     """Check if cache generation is needed."""

+ 329 - 85
modules/core/pattern_manager.py

@@ -3,7 +3,7 @@ import threading
 import time
 import time
 import random
 import random
 import logging
 import logging
-from datetime import datetime
+from datetime import datetime, time as datetime_time
 from tqdm import tqdm
 from tqdm import tqdm
 from modules.connection import connection_manager
 from modules.connection import connection_manager
 from modules.core.state import state
 from modules.core.state import state
@@ -11,6 +11,9 @@ from math import pi
 import asyncio
 import asyncio
 import json
 import json
 from modules.led.led_controller import effect_playing, effect_idle
 from modules.led.led_controller import effect_playing, effect_idle
+import queue
+from dataclasses import dataclass
+from typing import Optional, Callable
 
 
 # Configure logging
 # Configure logging
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
@@ -29,11 +32,249 @@ pattern_lock = asyncio.Lock()
 # Progress update task
 # Progress update task
 progress_update_task = None
 progress_update_task = None
 
 
+def is_in_scheduled_pause_period():
+    """Check if current time falls within any scheduled pause period."""
+    if not state.scheduled_pause_enabled or not state.scheduled_pause_time_slots:
+        return False
+
+    now = datetime.now()
+    current_time = now.time()
+    current_weekday = now.strftime("%A").lower()  # monday, tuesday, etc.
+
+    for slot in state.scheduled_pause_time_slots:
+        # Parse start and end times
+        try:
+            start_time = datetime_time.fromisoformat(slot['start_time'])
+            end_time = datetime_time.fromisoformat(slot['end_time'])
+        except (ValueError, KeyError):
+            logger.warning(f"Invalid time format in scheduled pause slot: {slot}")
+            continue
+
+        # Check if this slot applies to today
+        slot_applies_today = False
+        days_setting = slot.get('days', 'daily')
+
+        if days_setting == 'daily':
+            slot_applies_today = True
+        elif days_setting == 'weekdays':
+            slot_applies_today = current_weekday in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday']
+        elif days_setting == 'weekends':
+            slot_applies_today = current_weekday in ['saturday', 'sunday']
+        elif days_setting == 'custom':
+            custom_days = slot.get('custom_days', [])
+            slot_applies_today = current_weekday in custom_days
+
+        if not slot_applies_today:
+            continue
+
+        # Check if current time is within the pause period
+        if start_time <= end_time:
+            # Normal case: start and end are on the same day
+            if start_time <= current_time <= end_time:
+                return True
+        else:
+            # Time spans midnight: start is before midnight, end is after midnight
+            if current_time >= start_time or current_time <= end_time:
+                return True
+
+    return False
+
+# Motion Control Thread Infrastructure
+@dataclass
+class MotionCommand:
+    """Represents a motion command for the motion control thread."""
+    command_type: str  # 'move', 'stop', 'pause', 'resume', 'shutdown'
+    theta: Optional[float] = None
+    rho: Optional[float] = None
+    speed: Optional[float] = None
+    callback: Optional[Callable] = None
+    future: Optional[asyncio.Future] = None
+
+class MotionControlThread:
+    """Dedicated thread for hardware motion control operations."""
+
+    def __init__(self):
+        self.command_queue = queue.Queue()
+        self.thread = None
+        self.running = False
+        self.paused = False
+
+    def start(self):
+        """Start the motion control thread."""
+        if self.thread and self.thread.is_alive():
+            return
+
+        self.running = True
+        self.thread = threading.Thread(target=self._motion_loop, daemon=True)
+        self.thread.start()
+        logger.info("Motion control thread started")
+
+    def stop(self):
+        """Stop the motion control thread."""
+        if not self.running:
+            return
+
+        self.running = False
+        # Send shutdown command
+        self.command_queue.put(MotionCommand('shutdown'))
+
+        if self.thread and self.thread.is_alive():
+            self.thread.join(timeout=5.0)
+        logger.info("Motion control thread stopped")
+
+    def _motion_loop(self):
+        """Main loop for the motion control thread."""
+        logger.info("Motion control thread loop started")
+
+        while self.running:
+            try:
+                # Get command with timeout to allow periodic checks
+                command = self.command_queue.get(timeout=1.0)
+
+                if command.command_type == 'shutdown':
+                    break
+
+                elif command.command_type == 'move':
+                    self._execute_move(command)
+
+                elif command.command_type == 'pause':
+                    self.paused = True
+
+                elif command.command_type == 'resume':
+                    self.paused = False
+
+                elif command.command_type == 'stop':
+                    # Clear any pending commands
+                    while not self.command_queue.empty():
+                        try:
+                            self.command_queue.get_nowait()
+                        except queue.Empty:
+                            break
+
+                self.command_queue.task_done()
+
+            except queue.Empty:
+                # Timeout - continue loop for shutdown check
+                continue
+            except Exception as e:
+                logger.error(f"Error in motion control thread: {e}")
+
+        logger.info("Motion control thread loop ended")
+
+    def _execute_move(self, command: MotionCommand):
+        """Execute a move command in the motion thread."""
+        try:
+            # Wait if paused
+            while self.paused and self.running:
+                time.sleep(0.1)
+
+            if not self.running:
+                return
+
+            # Execute the actual motion using sync version
+            self._move_polar_sync(command.theta, command.rho, command.speed)
+
+            # Signal completion if future provided
+            if command.future and not command.future.done():
+                command.future.get_loop().call_soon_threadsafe(
+                    command.future.set_result, None
+                )
+
+        except Exception as e:
+            logger.error(f"Error executing move command: {e}")
+            if command.future and not command.future.done():
+                command.future.get_loop().call_soon_threadsafe(
+                    command.future.set_exception, e
+                )
+
+    def _move_polar_sync(self, theta: float, rho: float, speed: Optional[float] = None):
+        """Synchronous version of move_polar for use in motion thread."""
+        # This is the original sync logic but running in dedicated thread
+        if state.table_type == 'dune_weaver_mini':
+            x_scaling_factor = 2
+            y_scaling_factor = 3.7
+        else:
+            x_scaling_factor = 2
+            y_scaling_factor = 5
+
+        delta_theta = theta - state.current_theta
+        delta_rho = rho - state.current_rho
+        x_increment = delta_theta * 100 / (2 * pi * x_scaling_factor)
+        y_increment = delta_rho * 100 / y_scaling_factor
+
+        x_total_steps = state.x_steps_per_mm * (100/x_scaling_factor)
+        y_total_steps = state.y_steps_per_mm * (100/y_scaling_factor)
+
+        offset = x_increment * (x_total_steps * x_scaling_factor / (state.gear_ratio * y_total_steps * y_scaling_factor))
+
+        if state.table_type == 'dune_weaver_mini' or state.y_steps_per_mm == 546:
+            y_increment -= offset
+        else:
+            y_increment += offset
+
+        new_x_abs = state.machine_x + x_increment
+        new_y_abs = state.machine_y + y_increment
+
+        # Use provided speed or fall back to state.speed
+        actual_speed = speed if speed is not None else state.speed
+
+        # Call sync version of send_grbl_coordinates in this thread
+        self._send_grbl_coordinates_sync(round(new_x_abs, 3), round(new_y_abs, 3), actual_speed)
+
+        # Update state
+        state.current_theta = theta
+        state.current_rho = rho
+        state.machine_x = new_x_abs
+        state.machine_y = new_y_abs
+
+    def _send_grbl_coordinates_sync(self, x: float, y: float, speed: int = 600, timeout: int = 2, home: bool = False):
+        """Synchronous version of send_grbl_coordinates for motion thread."""
+        logger.debug(f"Motion thread sending G-code: X{x} Y{y} at F{speed}")
+
+        # Track overall attempt time
+        overall_start_time = time.time()
+
+        while True:
+            try:
+                gcode = f"$J=G91 G21 Y{y} F{speed}" if home else f"G1 X{x} Y{y} F{speed}"
+                state.conn.send(gcode + "\n")
+                logger.debug(f"Motion thread sent command: {gcode}")
+
+                start_time = time.time()
+                while True:
+                    response = state.conn.readline()
+                    logger.debug(f"Motion thread response: {response}")
+                    if response.lower() == "ok":
+                        logger.debug("Motion thread: Command execution confirmed.")
+                        return
+
+            except Exception as e:
+                error_str = str(e)
+                logger.warning(f"Motion thread error sending command: {error_str}")
+
+                # Immediately return for device not configured errors
+                if "Device not configured" in error_str or "Errno 6" in error_str:
+                    logger.error(f"Motion thread: Device configuration error detected: {error_str}")
+                    state.stop_requested = True
+                    state.conn = None
+                    state.is_connected = False
+                    logger.info("Connection marked as disconnected due to device error")
+                    return False
+
+            logger.warning(f"Motion thread: No 'ok' received for X{x} Y{y}, speed {speed}. Retrying...")
+            time.sleep(0.1)
+
+# Global motion control thread instance
+motion_controller = MotionControlThread()
+
 async def cleanup_pattern_manager():
 async def cleanup_pattern_manager():
     """Clean up pattern manager resources"""
     """Clean up pattern manager resources"""
     global progress_update_task, pattern_lock, pause_event
     global progress_update_task, pattern_lock, pause_event
-    
+
     try:
     try:
+        # Stop motion control thread
+        motion_controller.stop()
+
         # Cancel progress update task if running
         # Cancel progress update task if running
         if progress_update_task and not progress_update_task.done():
         if progress_update_task and not progress_update_task.done():
             try:
             try:
@@ -45,7 +286,7 @@ async def cleanup_pattern_manager():
                     pass
                     pass
             except Exception as e:
             except Exception as e:
                 logger.error(f"Error cancelling progress update task: {e}")
                 logger.error(f"Error cancelling progress update task: {e}")
-        
+
         # Clean up pattern lock
         # Clean up pattern lock
         if pattern_lock:
         if pattern_lock:
             try:
             try:
@@ -54,7 +295,7 @@ async def cleanup_pattern_manager():
                 pattern_lock = None
                 pattern_lock = None
             except Exception as e:
             except Exception as e:
                 logger.error(f"Error cleaning up pattern lock: {e}")
                 logger.error(f"Error cleaning up pattern lock: {e}")
-        
+
         # Clean up pause event
         # Clean up pause event
         if pause_event:
         if pause_event:
             try:
             try:
@@ -62,7 +303,7 @@ async def cleanup_pattern_manager():
                 pause_event = None
                 pause_event = None
             except Exception as e:
             except Exception as e:
                 logger.error(f"Error cleaning up pause event: {e}")
                 logger.error(f"Error cleaning up pause event: {e}")
-        
+
         # Clean up pause condition from state
         # Clean up pause condition from state
         if state.pause_condition:
         if state.pause_condition:
             try:
             try:
@@ -79,12 +320,12 @@ async def cleanup_pattern_manager():
         state.pause_requested = False
         state.pause_requested = False
         state.stop_requested = True
         state.stop_requested = True
         state.is_clearing = False
         state.is_clearing = False
-        
+
         # Reset machine position
         # Reset machine position
         await connection_manager.update_machine_position()
         await connection_manager.update_machine_position()
-        
+
         logger.info("Pattern manager resources cleaned up")
         logger.info("Pattern manager resources cleaned up")
-        
+
     except Exception as e:
     except Exception as e:
         logger.error(f"Error during pattern manager cleanup: {e}")
         logger.error(f"Error during pattern manager cleanup: {e}")
     finally:
     finally:
@@ -95,14 +336,22 @@ async def cleanup_pattern_manager():
 
 
 def list_theta_rho_files():
 def list_theta_rho_files():
     files = []
     files = []
-    for root, _, filenames in os.walk(THETA_RHO_DIR):
-        for file in filenames:
+    for root, dirs, filenames in os.walk(THETA_RHO_DIR):
+        # Skip cached_images directories to avoid scanning thousands of WebP files
+        if 'cached_images' in dirs:
+            dirs.remove('cached_images')
+
+        # Filter .thr files during traversal for better performance
+        thr_files = [f for f in filenames if f.endswith('.thr')]
+
+        for file in thr_files:
             relative_path = os.path.relpath(os.path.join(root, file), THETA_RHO_DIR)
             relative_path = os.path.relpath(os.path.join(root, file), THETA_RHO_DIR)
             # Normalize path separators to always use forward slashes for consistency across platforms
             # Normalize path separators to always use forward slashes for consistency across platforms
             relative_path = relative_path.replace(os.sep, '/')
             relative_path = relative_path.replace(os.sep, '/')
             files.append(relative_path)
             files.append(relative_path)
+
     logger.debug(f"Found {len(files)} theta-rho files")
     logger.debug(f"Found {len(files)} theta-rho files")
-    return [file for file in files if file.endswith('.thr')]
+    return files
 
 
 def parse_theta_rho_file(file_path):
 def parse_theta_rho_file(file_path):
     """Parse a theta-rho file and return a list of (theta, rho) pairs."""
     """Parse a theta-rho file and return a list of (theta, rho) pairs."""
@@ -293,13 +542,13 @@ async def run_theta_rho_file(file_path, is_playlist=False):
         state.execution_progress = (0, total_coordinates, None, 0)
         state.execution_progress = (0, total_coordinates, None, 0)
         
         
         # stop actions without resetting the playlist
         # stop actions without resetting the playlist
-        stop_actions(clear_playlist=False)
+        await stop_actions(clear_playlist=False)
 
 
         state.current_playing_file = file_path
         state.current_playing_file = file_path
         state.stop_requested = False
         state.stop_requested = False
         logger.info(f"Starting pattern execution: {file_path}")
         logger.info(f"Starting pattern execution: {file_path}")
         logger.info(f"t: {state.current_theta}, r: {state.current_rho}")
         logger.info(f"t: {state.current_theta}, r: {state.current_rho}")
-        reset_theta()
+        await reset_theta()
         
         
         start_time = time.time()
         start_time = time.time()
         if state.led_controller:
         if state.led_controller:
@@ -323,17 +572,33 @@ async def run_theta_rho_file(file_path, is_playlist=False):
                 
                 
                 if state.skip_requested:
                 if state.skip_requested:
                     logger.info("Skipping pattern...")
                     logger.info("Skipping pattern...")
-                    connection_manager.check_idle()
+                    await connection_manager.check_idle_async()
                     if state.led_controller:
                     if state.led_controller:
                         effect_idle(state.led_controller)
                         effect_idle(state.led_controller)
                     break
                     break
 
 
-                # Wait for resume if paused
-                if state.pause_requested:
-                    logger.info("Execution paused...")
+                # Wait for resume if paused (manual or scheduled)
+                manual_pause = state.pause_requested
+                scheduled_pause = is_in_scheduled_pause_period()
+
+                if manual_pause or scheduled_pause:
+                    if manual_pause and scheduled_pause:
+                        logger.info("Execution paused (manual + scheduled pause active)...")
+                    elif manual_pause:
+                        logger.info("Execution paused (manual)...")
+                    else:
+                        logger.info("Execution paused (scheduled pause period)...")
+
                     if state.led_controller:
                     if state.led_controller:
                         effect_idle(state.led_controller)
                         effect_idle(state.led_controller)
-                    await pause_event.wait()
+
+                    # Wait until both manual pause is released AND we're outside scheduled pause period
+                    while state.pause_requested or is_in_scheduled_pause_period():
+                        await asyncio.sleep(1)  # Check every second
+                        # Also wait for the pause event in case of manual pause
+                        if state.pause_requested:
+                            await pause_event.wait()
+
                     logger.info("Execution resumed...")
                     logger.info("Execution resumed...")
                     if state.led_controller:
                     if state.led_controller:
                         effect_playing(state.led_controller)
                         effect_playing(state.led_controller)
@@ -345,7 +610,7 @@ async def run_theta_rho_file(file_path, is_playlist=False):
                 else:
                 else:
                     current_speed = state.speed
                     current_speed = state.speed
                     
                     
-                move_polar(theta, rho, current_speed)
+                await move_polar(theta, rho, current_speed)
                 
                 
                 # Update progress for all coordinates including the first one
                 # Update progress for all coordinates including the first one
                 pbar.update(1)
                 pbar.update(1)
@@ -366,7 +631,7 @@ async def run_theta_rho_file(file_path, is_playlist=False):
             logger.error("Device is not connected. Stopping pattern execution.")
             logger.error("Device is not connected. Stopping pattern execution.")
             return
             return
             
             
-        connection_manager.check_idle()
+        await connection_manager.check_idle_async()
         
         
         # Set LED back to idle when pattern completes normally (not stopped early)
         # Set LED back to idle when pattern completes normally (not stopped early)
         if state.led_controller and not state.stop_requested:
         if state.led_controller and not state.stop_requested:
@@ -509,7 +774,7 @@ async def run_theta_rho_files(file_paths, pause_time=0, clear_pattern=None, run_
         
         
         logger.info("All requested patterns completed (or stopped) and state cleared")
         logger.info("All requested patterns completed (or stopped) and state cleared")
 
 
-def stop_actions(clear_playlist = True):
+async def stop_actions(clear_playlist = True):
     """Stop all current actions."""
     """Stop all current actions."""
     try:
     try:
         with state.pause_condition:
         with state.pause_condition:
@@ -518,85 +783,61 @@ def stop_actions(clear_playlist = True):
             state.current_playing_file = None
             state.current_playing_file = None
             state.execution_progress = None
             state.execution_progress = None
             state.is_clearing = False
             state.is_clearing = False
-            
+
             if clear_playlist:
             if clear_playlist:
                 # Clear playlist state
                 # Clear playlist state
                 state.current_playlist = None
                 state.current_playlist = None
                 state.current_playlist_index = None
                 state.current_playlist_index = None
                 state.playlist_mode = None
                 state.playlist_mode = None
-                
+
                 # Cancel progress update task if we're clearing the playlist
                 # Cancel progress update task if we're clearing the playlist
                 global progress_update_task
                 global progress_update_task
                 if progress_update_task and not progress_update_task.done():
                 if progress_update_task and not progress_update_task.done():
                     progress_update_task.cancel()
                     progress_update_task.cancel()
-                
+
             state.pause_condition.notify_all()
             state.pause_condition.notify_all()
-            connection_manager.update_machine_position()
+            # Call async function directly since we're in async context
+            await connection_manager.update_machine_position()
     except Exception as e:
     except Exception as e:
         logger.error(f"Error during stop_actions: {e}")
         logger.error(f"Error during stop_actions: {e}")
         # Ensure we still update machine position even if there's an error
         # Ensure we still update machine position even if there's an error
-        connection_manager.update_machine_position()
+        try:
+            await connection_manager.update_machine_position()
+        except Exception as update_err:
+            logger.error(f"Error updating machine position on error: {update_err}")
 
 
-def move_polar(theta, rho, speed=None):
+async def move_polar(theta, rho, speed=None):
     """
     """
-    This functions take in a pair of theta rho coordinate, compute the distance to travel based on current theta, rho,
-    and translate the motion to gcode jog command and sent to grbl. 
-    
-    Since having similar steps_per_mm will make x and y axis moves at around the same speed, we have to scale the 
-    x_steps_per_mm and y_steps_per_mm so that they are roughly the same. Here's the range of motion:
-    
-    X axis (angular): 50mm = 1 revolution
-    Y axis (radial): 0 => 20mm = theta 0 (center) => 1 (perimeter)
-    
+    Queue a motion command to be executed in the dedicated motion control thread.
+    This makes motion control non-blocking for API endpoints.
+
     Args:
     Args:
-        theta (_type_): _description_
-        rho (_type_): _description_
+        theta (float): Target theta coordinate
+        rho (float): Target rho coordinate
         speed (int, optional): Speed override. If None, uses state.speed
         speed (int, optional): Speed override. If None, uses state.speed
     """
     """
-    # Adding soft limit to reduce hardware sound
-    # soft_limit_inner = 0.01
-    # if rho < soft_limit_inner:
-    #     rho = soft_limit_inner
-    
-    # soft_limit_outter = 0.015
-    # if rho > (1-soft_limit_outter):
-    #     rho = (1-soft_limit_outter)
-    
-    if state.table_type == 'dune_weaver_mini':
-        x_scaling_factor = 2
-        y_scaling_factor = 3.7
-    else:
-        x_scaling_factor = 2
-        y_scaling_factor = 5
-    
-    delta_theta = theta - state.current_theta
-    delta_rho = rho - state.current_rho
-    x_increment = delta_theta * 100 / (2 * pi * x_scaling_factor)  # Added -1 to reverse direction
-    y_increment = delta_rho * 100 / y_scaling_factor
-    
-    x_total_steps = state.x_steps_per_mm * (100/x_scaling_factor)
-    y_total_steps = state.y_steps_per_mm * (100/y_scaling_factor)
-        
-    offset = x_increment * (x_total_steps * x_scaling_factor / (state.gear_ratio * y_total_steps * y_scaling_factor))
-
-    if state.table_type == 'dune_weaver_mini' or state.y_steps_per_mm == 546:
-        y_increment -= offset
-    else:
-        y_increment += offset
-    
-    new_x_abs = state.machine_x + x_increment
-    new_y_abs = state.machine_y + y_increment
-    
-    # Use provided speed or fall back to state.speed
-    actual_speed = speed if speed is not None else state.speed
-    
-    # dynamic_speed = compute_dynamic_speed(rho, max_speed=actual_speed)
-    
-    connection_manager.send_grbl_coordinates(round(new_x_abs, 3), round(new_y_abs,3), actual_speed)
-    state.current_theta = theta
-    state.current_rho = rho
-    state.machine_x = new_x_abs
-    state.machine_y = new_y_abs
+    # Ensure motion control thread is running
+    if not motion_controller.running:
+        motion_controller.start()
+
+    # Create future for async/await pattern
+    loop = asyncio.get_event_loop()
+    future = loop.create_future()
+
+    # Create and queue motion command
+    command = MotionCommand(
+        command_type='move',
+        theta=theta,
+        rho=rho,
+        speed=speed,
+        future=future
+    )
+
+    motion_controller.command_queue.put(command)
+    logger.debug(f"Queued motion command: theta={theta}, rho={rho}, speed={speed}")
+
+    # Wait for command completion
+    await future
     
     
 def pause_execution():
 def pause_execution():
     """Pause pattern execution using asyncio Event."""
     """Pause pattern execution using asyncio Event."""
@@ -612,10 +853,11 @@ def resume_execution():
     pause_event.set()  # Set the event to resume execution
     pause_event.set()  # Set the event to resume execution
     return True
     return True
     
     
-def reset_theta():
+async def reset_theta():
     logger.info('Resetting Theta')
     logger.info('Resetting Theta')
     state.current_theta = state.current_theta % (2 * pi)
     state.current_theta = state.current_theta % (2 * pi)
-    connection_manager.update_machine_position()
+    # Call async function directly since we're in async context
+    await connection_manager.update_machine_position()
 
 
 def set_speed(new_speed):
 def set_speed(new_speed):
     state.speed = new_speed
     state.speed = new_speed
@@ -625,7 +867,9 @@ def get_status():
     """Get the current status of pattern execution."""
     """Get the current status of pattern execution."""
     status = {
     status = {
         "current_file": state.current_playing_file,
         "current_file": state.current_playing_file,
-        "is_paused": state.pause_requested,
+        "is_paused": state.pause_requested or is_in_scheduled_pause_period(),
+        "manual_pause": state.pause_requested,
+        "scheduled_pause": is_in_scheduled_pause_period(),
         "is_running": bool(state.current_playing_file and not state.stop_requested),
         "is_running": bool(state.current_playing_file and not state.stop_requested),
         "progress": None,
         "progress": None,
         "playlist": None,
         "playlist": None,

+ 9 - 1
modules/core/state.py

@@ -56,10 +56,14 @@ class AppState:
         # auto_play mode settings
         # auto_play mode settings
         self.auto_play_enabled = False
         self.auto_play_enabled = False
         self.auto_play_playlist = None  # Playlist to auto-play in auto_play mode
         self.auto_play_playlist = None  # Playlist to auto-play in auto_play mode
-        self.auto_play_run_mode = "loop"  # "single" or "loop" 
+        self.auto_play_run_mode = "loop"  # "single" or "loop"
         self.auto_play_pause_time = 5.0  # Pause between patterns in seconds
         self.auto_play_pause_time = 5.0  # Pause between patterns in seconds
         self.auto_play_clear_pattern = "adaptive"  # Clear pattern option
         self.auto_play_clear_pattern = "adaptive"  # Clear pattern option
         self.auto_play_shuffle = False  # Shuffle playlist
         self.auto_play_shuffle = False  # Shuffle playlist
+
+        # Still Sands settings
+        self.scheduled_pause_enabled = False
+        self.scheduled_pause_time_slots = []  # List of time slot dictionaries
         
         
         self.load()
         self.load()
 
 
@@ -192,6 +196,8 @@ class AppState:
             "auto_play_pause_time": self.auto_play_pause_time,
             "auto_play_pause_time": self.auto_play_pause_time,
             "auto_play_clear_pattern": self.auto_play_clear_pattern,
             "auto_play_clear_pattern": self.auto_play_clear_pattern,
             "auto_play_shuffle": self.auto_play_shuffle,
             "auto_play_shuffle": self.auto_play_shuffle,
+            "scheduled_pause_enabled": self.scheduled_pause_enabled,
+            "scheduled_pause_time_slots": self.scheduled_pause_time_slots,
         }
         }
 
 
     def from_dict(self, data):
     def from_dict(self, data):
@@ -228,6 +234,8 @@ class AppState:
         self.auto_play_pause_time = data.get("auto_play_pause_time", 5.0)
         self.auto_play_pause_time = data.get("auto_play_pause_time", 5.0)
         self.auto_play_clear_pattern = data.get("auto_play_clear_pattern", "adaptive")
         self.auto_play_clear_pattern = data.get("auto_play_clear_pattern", "adaptive")
         self.auto_play_shuffle = data.get("auto_play_shuffle", False)
         self.auto_play_shuffle = data.get("auto_play_shuffle", False)
+        self.scheduled_pause_enabled = data.get("scheduled_pause_enabled", False)
+        self.scheduled_pause_time_slots = data.get("scheduled_pause_time_slots", [])
 
 
     def save(self):
     def save(self):
         """Save the current state to a JSON file."""
         """Save the current state to a JSON file."""

+ 6 - 5
modules/core/version_manager.py

@@ -20,20 +20,21 @@ class VersionManager:
         self.github_api_url = f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}"
         self.github_api_url = f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}"
         self._current_version = None
         self._current_version = None
         
         
-    def get_current_version(self) -> str:
-        """Read current version from VERSION file"""
+    async def get_current_version(self) -> str:
+        """Read current version from VERSION file (async)"""
         if self._current_version is None:
         if self._current_version is None:
             try:
             try:
                 version_file = Path(__file__).parent.parent.parent / "VERSION"
                 version_file = Path(__file__).parent.parent.parent / "VERSION"
                 if version_file.exists():
                 if version_file.exists():
-                    self._current_version = version_file.read_text().strip()
+                    self._current_version = await asyncio.to_thread(version_file.read_text)
+                    self._current_version = self._current_version.strip()
                 else:
                 else:
                     logger.warning("VERSION file not found, using default version")
                     logger.warning("VERSION file not found, using default version")
                     self._current_version = "1.0.0"
                     self._current_version = "1.0.0"
             except Exception as e:
             except Exception as e:
                 logger.error(f"Error reading VERSION file: {e}")
                 logger.error(f"Error reading VERSION file: {e}")
                 self._current_version = "1.0.0"
                 self._current_version = "1.0.0"
-        
+
         return self._current_version
         return self._current_version
     
     
     async def get_latest_release(self) -> Dict[str, any]:
     async def get_latest_release(self) -> Dict[str, any]:
@@ -94,7 +95,7 @@ class VersionManager:
     
     
     async def get_version_info(self) -> Dict[str, any]:
     async def get_version_info(self) -> Dict[str, any]:
         """Get complete version information"""
         """Get complete version information"""
-        current = self.get_current_version()
+        current = await self.get_current_version()
         latest_release = await self.get_latest_release()
         latest_release = await self.get_latest_release()
         
         
         if latest_release:
         if latest_release:

+ 75 - 3
static/js/base.js

@@ -1,5 +1,78 @@
 // Player status bar functionality - Updated to fix logMessage errors
 // Player status bar functionality - Updated to fix logMessage errors
 
 
+// Pattern files cache for improved performance with localStorage persistence
+const PATTERN_CACHE_KEY = 'dune_weaver_pattern_files_cache';
+const PATTERN_CACHE_EXPIRY = 30 * 60 * 1000; // 30 minutes cache (longer since it persists)
+
+// Function to get cached pattern files or fetch fresh data
+async function getCachedPatternFiles(forceRefresh = false) {
+    const now = Date.now();
+
+    // Try to load from localStorage first
+    if (!forceRefresh) {
+        try {
+            const cachedData = localStorage.getItem(PATTERN_CACHE_KEY);
+            if (cachedData) {
+                const { files, timestamp } = JSON.parse(cachedData);
+                if (files && timestamp && (now - timestamp) < PATTERN_CACHE_EXPIRY) {
+                    console.log('Using cached pattern files from localStorage');
+                    return files;
+                }
+            }
+        } catch (error) {
+            console.warn('Error reading pattern files cache from localStorage:', error);
+        }
+    }
+
+    try {
+        console.log('Fetching fresh pattern files from server');
+        const response = await fetch('/list_theta_rho_files');
+        if (!response.ok) {
+            throw new Error(`Failed to fetch pattern files: ${response.status}`);
+        }
+
+        const files = await response.json();
+
+        // Store in localStorage
+        try {
+            const cacheData = { files, timestamp: now };
+            localStorage.setItem(PATTERN_CACHE_KEY, JSON.stringify(cacheData));
+        } catch (error) {
+            console.warn('Error storing pattern files cache in localStorage:', error);
+        }
+
+        return files;
+    } catch (error) {
+        console.error('Error fetching pattern files:', error);
+
+        // Try to return any cached data as fallback, even if expired
+        try {
+            const cachedData = localStorage.getItem(PATTERN_CACHE_KEY);
+            if (cachedData) {
+                const { files } = JSON.parse(cachedData);
+                if (files) {
+                    console.log('Using expired cached pattern files as fallback');
+                    return files;
+                }
+            }
+        } catch (fallbackError) {
+            console.warn('Error reading fallback cache:', fallbackError);
+        }
+
+        return [];
+    }
+}
+
+// Function to invalidate pattern files cache
+function invalidatePatternFilesCache() {
+    try {
+        localStorage.removeItem(PATTERN_CACHE_KEY);
+        console.log('Pattern files cache invalidated');
+    } catch (error) {
+        console.warn('Error invalidating pattern files cache:', error);
+    }
+}
+
 // Helper function to normalize file paths for cross-platform compatibility
 // Helper function to normalize file paths for cross-platform compatibility
 function normalizeFilePath(filePath) {
 function normalizeFilePath(filePath) {
     if (!filePath) return '';
     if (!filePath) return '';
@@ -905,9 +978,8 @@ function initializeCacheAllPrompt() {
 
 
 async function startCacheAllProcess() {
 async function startCacheAllProcess() {
     try {
     try {
-        // Get list of patterns
-        const response = await fetch('/list_theta_rho_files');
-        const patterns = await response.json();
+        // Get list of patterns using cached function
+        const patterns = await getCachedPatternFiles();
         
         
         if (!patterns || patterns.length === 0) {
         if (!patterns || patterns.length === 0) {
             throw new Error('No patterns found');
             throw new Error('No patterns found');

+ 5 - 4
static/js/image2sand-init.js

@@ -166,7 +166,10 @@ async function saveConvertedPattern() {
             const fileInput = document.getElementById('upload_file');
             const fileInput = document.getElementById('upload_file');
             const finalFileName = 'custom_patterns/' + thrFileName;
             const finalFileName = 'custom_patterns/' + thrFileName;
             logMessage(`Image converted and saved as ${finalFileName}`, LOG_TYPE.SUCCESS);
             logMessage(`Image converted and saved as ${finalFileName}`, LOG_TYPE.SUCCESS);
-            
+
+            // Invalidate pattern files cache to include new file
+            invalidatePatternFilesCache();
+
             // Close the converter dialog
             // Close the converter dialog
             closeImageConverter();
             closeImageConverter();
 
 
@@ -386,9 +389,7 @@ async function loadThetaRhoFiles() {
 
 
     try {
     try {
         // Fetch the file list from your backend
         // Fetch the file list from your backend
-        const response = await fetch('/list_theta_rho_files');
-        if (!response.ok) throw new Error('Failed to fetch file list');
-        const files = await response.json();
+        const files = await getCachedPatternFiles();
 
 
         // Populate the list
         // Populate the list
         files.forEach(filename => {
         files.forEach(filename => {

+ 53 - 16
static/js/index.js

@@ -647,8 +647,7 @@ async function loadPatterns(forceRefresh = false) {
         
         
         // First load basic patterns list for fast initial display
         // First load basic patterns list for fast initial display
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
-        const basicResponse = await fetch('/list_theta_rho_files');
-        const basicPatterns = await basicResponse.json();
+        const basicPatterns = await getCachedPatternFiles(forceRefresh);
         const thrPatterns = basicPatterns.filter(file => file.endsWith('.thr'));
         const thrPatterns = basicPatterns.filter(file => file.endsWith('.thr'));
         logMessage(`Received ${thrPatterns.length} basic patterns from server`, LOG_TYPE.INFO);
         logMessage(`Received ${thrPatterns.length} basic patterns from server`, LOG_TYPE.INFO);
         
         
@@ -674,13 +673,25 @@ async function loadPatterns(forceRefresh = false) {
                     metadataAbortController.abort();
                     metadataAbortController.abort();
                 }
                 }
                 
                 
-                // Create new AbortController for this request
+                // Create new AbortController for this request with timeout
                 metadataAbortController = new AbortController();
                 metadataAbortController = new AbortController();
-                
+
+                // Set a timeout to prevent hanging on slow Pi systems
+                const timeoutId = setTimeout(() => {
+                    metadataAbortController.abort();
+                    logMessage('Metadata loading timed out after 30 seconds', LOG_TYPE.WARNING);
+                }, 30000); // 30 second timeout
+
                 logMessage('Loading enhanced metadata...', LOG_TYPE.DEBUG);
                 logMessage('Loading enhanced metadata...', LOG_TYPE.DEBUG);
                 const metadataResponse = await fetch('/list_theta_rho_files_with_metadata', {
                 const metadataResponse = await fetch('/list_theta_rho_files_with_metadata', {
-                    signal: metadataAbortController.signal
+                    signal: metadataAbortController.signal,
+                    headers: {
+                        'Cache-Control': 'no-cache'
+                    }
                 });
                 });
+
+                // Clear timeout if request succeeds
+                clearTimeout(timeoutId);
                 const patternsWithMetadata = await metadataResponse.json();
                 const patternsWithMetadata = await metadataResponse.json();
                 
                 
                 // Store enhanced patterns data
                 // Store enhanced patterns data
@@ -698,11 +709,31 @@ async function loadPatterns(forceRefresh = false) {
                 metadataAbortController = null;
                 metadataAbortController = null;
             } catch (metadataError) {
             } catch (metadataError) {
                 if (metadataError.name === 'AbortError') {
                 if (metadataError.name === 'AbortError') {
-                    logMessage('Metadata loading cancelled (navigating away)', LOG_TYPE.DEBUG);
+                    logMessage('Metadata loading cancelled or timed out', LOG_TYPE.WARNING);
                 } else {
                 } else {
                     logMessage(`Failed to load enhanced metadata: ${metadataError.message}`, LOG_TYPE.WARNING);
                     logMessage(`Failed to load enhanced metadata: ${metadataError.message}`, LOG_TYPE.WARNING);
                 }
                 }
-                // No fallback needed - basic patterns already displayed
+
+                // Create basic metadata from file list to populate categories
+                if (allPatterns && allPatterns.length > 0) {
+                    allPatternsWithMetadata = allPatterns.map(pattern => {
+                        const pathParts = pattern.split('/');
+                        const category = pathParts.length > 1 ? pathParts.slice(0, -1).join('/') : 'root';
+                        const fileName = pathParts[pathParts.length - 1].replace('.thr', '');
+                        return {
+                            path: pattern,
+                            name: fileName,
+                            category: category,
+                            date_modified: 0,
+                            coordinates_count: 0
+                        };
+                    });
+
+                    // Update category filter with basic data
+                    updateBrowseCategoryFilter();
+                    logMessage('Using basic category data (metadata unavailable)', LOG_TYPE.INFO);
+                }
+
                 metadataAbortController = null;
                 metadataAbortController = null;
             }
             }
         }, 100); // Small delay to let initial render complete
         }, 100); // Small delay to let initial render complete
@@ -1107,7 +1138,10 @@ function setupPreviewPanelEvents(pattern) {
                 if (result.success) {
                 if (result.success) {
                     logMessage(`Pattern deleted successfully: ${pattern}`, LOG_TYPE.SUCCESS);
                     logMessage(`Pattern deleted successfully: ${pattern}`, LOG_TYPE.SUCCESS);
                     showStatusMessage(`Pattern "${pattern.split('/').pop()}" deleted successfully`);
                     showStatusMessage(`Pattern "${pattern.split('/').pop()}" deleted successfully`);
-                    
+
+                    // Invalidate pattern files cache
+                    invalidatePatternFilesCache();
+
                     // Clear from in-memory caches
                     // Clear from in-memory caches
                     previewCache.delete(pattern);
                     previewCache.delete(pattern);
                     imageCache.delete(pattern);
                     imageCache.delete(pattern);
@@ -1143,8 +1177,8 @@ function setupPreviewPanelEvents(pattern) {
                     document.getElementById('patternPreviewTitle').textContent = 'Pattern Details';
                     document.getElementById('patternPreviewTitle').textContent = 'Pattern Details';
                     document.getElementById('firstCoordinate').textContent = '(0, 0)';
                     document.getElementById('firstCoordinate').textContent = '(0, 0)';
                     document.getElementById('lastCoordinate').textContent = '(0, 0)';
                     document.getElementById('lastCoordinate').textContent = '(0, 0)';
-                    // Refresh the pattern list (force refresh since pattern was deleted)
-                    await loadPatterns(true);
+                    // Refresh the pattern list (cache already invalidated above)
+                    await loadPatterns();
                 } else {
                 } else {
                     throw new Error(result.error || 'Unknown error');
                     throw new Error(result.error || 'Unknown error');
                 }
                 }
@@ -1642,11 +1676,14 @@ function setupUploadEventHandlers() {
                     const result = await response.json();
                     const result = await response.json();
                     if (result.success) {
                     if (result.success) {
                         successCount++;
                         successCount++;
-                        
+
+                        // Invalidate pattern files cache to include new file
+                        invalidatePatternFilesCache();
+
                         // Clear any existing cache for this pattern to ensure fresh loading
                         // Clear any existing cache for this pattern to ensure fresh loading
                         const newPatternPath = `custom_patterns/${file.name}`;
                         const newPatternPath = `custom_patterns/${file.name}`;
                         previewCache.delete(newPatternPath);
                         previewCache.delete(newPatternPath);
-                        
+
                         logMessage(`Successfully uploaded: ${file.name}`, LOG_TYPE.SUCCESS);
                         logMessage(`Successfully uploaded: ${file.name}`, LOG_TYPE.SUCCESS);
                     } else {
                     } else {
                         failCount++;
                         failCount++;
@@ -1672,8 +1709,8 @@ function setupUploadEventHandlers() {
                 // Add a small delay to allow backend preview generation to complete
                 // Add a small delay to allow backend preview generation to complete
                 await new Promise(resolve => setTimeout(resolve, 1000));
                 await new Promise(resolve => setTimeout(resolve, 1000));
                 
                 
-                // Refresh the pattern list (force refresh since new patterns were uploaded)
-                await loadPatterns(true);
+                // Refresh the pattern list (cache already invalidated above)
+                await loadPatterns();
                 
                 
                 // Trigger preview loading for newly uploaded patterns
                 // Trigger preview loading for newly uploaded patterns
                 setTimeout(() => {
                 setTimeout(() => {
@@ -1715,8 +1752,8 @@ function setupUploadEventHandlers() {
                 const patternToDelete = confirmBtn.dataset.pattern;
                 const patternToDelete = confirmBtn.dataset.pattern;
                 if (patternToDelete) {
                 if (patternToDelete) {
                     await deletePattern(patternToDelete);
                     await deletePattern(patternToDelete);
-                    // Force refresh after deletion
-                    await loadPatterns(true);
+                    // Refresh after deletion (cache invalidated in deletePattern)
+                    await loadPatterns();
                 }
                 }
                 deleteModal.classList.add('hidden');
                 deleteModal.classList.add('hidden');
             });
             });

+ 1 - 6
static/js/playlists.js

@@ -1041,12 +1041,7 @@ async function loadAvailablePatterns(forceRefresh = false) {
     try {
     try {
         // First load basic patterns list for fast initial display
         // First load basic patterns list for fast initial display
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
-        const basicResponse = await fetch('/list_theta_rho_files');
-        if (!basicResponse.ok) {
-            throw new Error('Failed to load available patterns');
-        }
-        
-        const patterns = await basicResponse.json();
+        const patterns = await getCachedPatternFiles(forceRefresh);
         const thrPatterns = patterns.filter(file => file.endsWith('.thr'));
         const thrPatterns = patterns.filter(file => file.endsWith('.thr'));
         availablePatterns = [...thrPatterns];
         availablePatterns = [...thrPatterns];
         filteredPatterns = [...availablePatterns];
         filteredPatterns = [...availablePatterns];

+ 315 - 3
static/js/settings.js

@@ -171,7 +171,7 @@ document.addEventListener('DOMContentLoaded', async () => {
         fetch('/list_serial_ports').then(response => response.json()).catch(() => []),
         fetch('/list_serial_ports').then(response => response.json()).catch(() => []),
         
         
         // Load available pattern files for clear pattern selection
         // Load available pattern files for clear pattern selection
-        fetch('/list_theta_rho_files').then(response => response.json()).catch(() => []),
+        getCachedPatternFiles().catch(() => []),
         
         
         // Load current custom clear patterns
         // Load current custom clear patterns
         fetch('/api/custom_clear_patterns').then(response => response.json()).catch(() => ({ custom_clear_from_in: null, custom_clear_from_out: null })),
         fetch('/api/custom_clear_patterns').then(response => response.json()).catch(() => ({ custom_clear_from_in: null, custom_clear_from_out: null })),
@@ -180,8 +180,11 @@ document.addEventListener('DOMContentLoaded', async () => {
         fetch('/api/clear_pattern_speed').then(response => response.json()).catch(() => ({ clear_pattern_speed: 200 })),
         fetch('/api/clear_pattern_speed').then(response => response.json()).catch(() => ({ clear_pattern_speed: 200 })),
         
         
         // Load current app name
         // Load current app name
-        fetch('/api/app-name').then(response => response.json()).catch(() => ({ app_name: 'Dune Weaver' }))
-    ]).then(([statusData, wledData, updateData, ports, patterns, clearPatterns, clearSpeedData, appNameData]) => {
+        fetch('/api/app-name').then(response => response.json()).catch(() => ({ app_name: 'Dune Weaver' })),
+
+        // Load Still Sands settings
+        fetch('/api/scheduled-pause').then(response => response.json()).catch(() => ({ enabled: false, time_slots: [] }))
+    ]).then(([statusData, wledData, updateData, ports, patterns, clearPatterns, clearSpeedData, appNameData, scheduledPauseData]) => {
         // Update connection status
         // Update connection status
         setCachedConnectionStatus(statusData);
         setCachedConnectionStatus(statusData);
         updateConnectionUI(statusData);
         updateConnectionUI(statusData);
@@ -299,6 +302,9 @@ document.addEventListener('DOMContentLoaded', async () => {
         if (appNameInput && appNameData.app_name) {
         if (appNameInput && appNameData.app_name) {
             appNameInput.value = appNameData.app_name;
             appNameInput.value = appNameData.app_name;
         }
         }
+
+        // Store Still Sands data for later initialization
+        window.initialStillSandsData = scheduledPauseData;
     }).catch(error => {
     }).catch(error => {
         logMessage(`Error initializing settings page: ${error.message}`, LOG_TYPE.ERROR);
         logMessage(`Error initializing settings page: ${error.message}`, LOG_TYPE.ERROR);
     });
     });
@@ -1020,4 +1026,310 @@ async function initializeauto_playMode() {
 // Initialize auto_play mode when DOM is ready
 // Initialize auto_play mode when DOM is ready
 document.addEventListener('DOMContentLoaded', function() {
 document.addEventListener('DOMContentLoaded', function() {
     initializeauto_playMode();
     initializeauto_playMode();
+    initializeStillSandsMode();
 });
 });
+
+// Still Sands Mode Functions
+async function initializeStillSandsMode() {
+    logMessage('Initializing Still Sands mode', LOG_TYPE.INFO);
+
+    const stillSandsToggle = document.getElementById('scheduledPauseToggle');
+    const stillSandsSettings = document.getElementById('scheduledPauseSettings');
+    const addTimeSlotButton = document.getElementById('addTimeSlotButton');
+    const saveStillSandsButton = document.getElementById('savePauseSettings');
+    const timeSlotsContainer = document.getElementById('timeSlotsContainer');
+
+    // Check if elements exist
+    if (!stillSandsToggle || !stillSandsSettings || !addTimeSlotButton || !saveStillSandsButton || !timeSlotsContainer) {
+        logMessage('Still Sands elements not found, skipping initialization', LOG_TYPE.WARNING);
+        logMessage(`Found elements: toggle=${!!stillSandsToggle}, settings=${!!stillSandsSettings}, addBtn=${!!addTimeSlotButton}, saveBtn=${!!saveStillSandsButton}, container=${!!timeSlotsContainer}`, LOG_TYPE.WARNING);
+        return;
+    }
+
+    logMessage('All Still Sands elements found successfully', LOG_TYPE.INFO);
+
+    // Track time slots
+    let timeSlots = [];
+    let slotIdCounter = 0;
+
+    // Load current Still Sands settings from initial data
+    try {
+        // Use the data loaded during page initialization, fallback to API if not available
+        let data;
+        if (window.initialStillSandsData) {
+            data = window.initialStillSandsData;
+            // Clear the global variable after use
+            delete window.initialStillSandsData;
+        } else {
+            // Fallback to API call if initial data not available
+            const response = await fetch('/api/scheduled-pause');
+            data = await response.json();
+        }
+
+        stillSandsToggle.checked = data.enabled || false;
+        if (data.enabled) {
+            stillSandsSettings.style.display = 'block';
+        }
+
+        // Load existing time slots
+        timeSlots = data.time_slots || [];
+        renderTimeSlots();
+    } catch (error) {
+        logMessage(`Error loading Still Sands settings: ${error.message}`, LOG_TYPE.ERROR);
+        // Initialize with empty settings if load fails
+        timeSlots = [];
+        renderTimeSlots();
+    }
+
+    // Function to validate time format (HH:MM)
+    function isValidTime(timeString) {
+        const timeRegex = /^([01]?[0-9]|2[0-3]):[0-5][0-9]$/;
+        return timeRegex.test(timeString);
+    }
+
+    // Function to create a new time slot element
+    function createTimeSlotElement(slot) {
+        const slotDiv = document.createElement('div');
+        slotDiv.className = 'time-slot-item';
+        slotDiv.dataset.slotId = slot.id;
+
+        slotDiv.innerHTML = `
+            <div class="flex items-center gap-3">
+                <div class="flex-1 grid grid-cols-1 md:grid-cols-2 gap-3">
+                    <div class="flex flex-col gap-1">
+                        <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">Start Time</label>
+                        <input
+                            type="time"
+                            class="start-time form-input resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal leading-normal transition-colors"
+                            value="${slot.start_time || ''}"
+                            required
+                        />
+                    </div>
+                    <div class="flex flex-col gap-1">
+                        <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">End Time</label>
+                        <input
+                            type="time"
+                            class="end-time form-input resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal leading-normal transition-colors"
+                            value="${slot.end_time || ''}"
+                            required
+                        />
+                    </div>
+                </div>
+                <div class="flex flex-col gap-1">
+                    <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">Days</label>
+                    <select class="days-select form-select resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal transition-colors">
+                        <option value="daily" ${slot.days === 'daily' ? 'selected' : ''}>Daily</option>
+                        <option value="weekdays" ${slot.days === 'weekdays' ? 'selected' : ''}>Weekdays</option>
+                        <option value="weekends" ${slot.days === 'weekends' ? 'selected' : ''}>Weekends</option>
+                        <option value="custom" ${slot.days === 'custom' ? 'selected' : ''}>Custom</option>
+                    </select>
+                </div>
+                <button
+                    type="button"
+                    class="remove-slot-btn flex items-center justify-center w-9 h-9 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded-lg transition-colors"
+                    title="Remove time slot"
+                >
+                    <span class="material-icons text-base">delete</span>
+                </button>
+            </div>
+            <div class="custom-days-container mt-2" style="display: ${slot.days === 'custom' ? 'block' : 'none'};">
+                <label class="text-slate-700 dark:text-slate-300 text-xs font-medium mb-1 block">Select Days</label>
+                <div class="flex flex-wrap gap-2">
+                    ${['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'].map(day => `
+                        <label class="flex items-center gap-1 text-xs">
+                            <input
+                                type="checkbox"
+                                name="custom-days-${slot.id}"
+                                value="${day}"
+                                ${slot.custom_days && slot.custom_days.includes(day) ? 'checked' : ''}
+                                class="rounded border-slate-300 text-sky-600 focus:ring-sky-500"
+                            />
+                            <span class="text-slate-700 dark:text-slate-300 capitalize">${day.substring(0, 3)}</span>
+                        </label>
+                    `).join('')}
+                </div>
+            </div>
+        `;
+
+        // Add event listeners for this slot
+        const startTimeInput = slotDiv.querySelector('.start-time');
+        const endTimeInput = slotDiv.querySelector('.end-time');
+        const daysSelect = slotDiv.querySelector('.days-select');
+        const customDaysContainer = slotDiv.querySelector('.custom-days-container');
+        const removeButton = slotDiv.querySelector('.remove-slot-btn');
+
+        // Show/hide custom days based on selection
+        daysSelect.addEventListener('change', () => {
+            customDaysContainer.style.display = daysSelect.value === 'custom' ? 'block' : 'none';
+            updateTimeSlot(slot.id);
+        });
+
+        // Update slot data when inputs change
+        startTimeInput.addEventListener('change', () => updateTimeSlot(slot.id));
+        endTimeInput.addEventListener('change', () => updateTimeSlot(slot.id));
+
+        // Handle custom day checkboxes
+        customDaysContainer.addEventListener('change', () => updateTimeSlot(slot.id));
+
+        // Remove slot button
+        removeButton.addEventListener('click', () => {
+            removeTimeSlot(slot.id);
+        });
+
+        return slotDiv;
+    }
+
+    // Function to render all time slots
+    function renderTimeSlots() {
+        timeSlotsContainer.innerHTML = '';
+
+        if (timeSlots.length === 0) {
+            timeSlotsContainer.innerHTML = `
+                <div class="text-center py-8 text-slate-500 dark:text-slate-400">
+                    <span class="material-icons text-4xl mb-2 block">schedule</span>
+                    <p>No time slots configured</p>
+                    <p class="text-xs mt-1">Click "Add Time Slot" to create a pause schedule</p>
+                </div>
+            `;
+            return;
+        }
+
+        timeSlots.forEach(slot => {
+            const slotElement = createTimeSlotElement(slot);
+            timeSlotsContainer.appendChild(slotElement);
+        });
+    }
+
+    // Function to add a new time slot
+    function addTimeSlot() {
+        const newSlot = {
+            id: ++slotIdCounter,
+            start_time: '22:00',
+            end_time: '08:00',
+            days: 'daily',
+            custom_days: []
+        };
+
+        timeSlots.push(newSlot);
+        renderTimeSlots();
+    }
+
+    // Function to remove a time slot
+    function removeTimeSlot(slotId) {
+        timeSlots = timeSlots.filter(slot => slot.id !== slotId);
+        renderTimeSlots();
+    }
+
+    // Function to update a time slot's data
+    function updateTimeSlot(slotId) {
+        const slotElement = timeSlotsContainer.querySelector(`[data-slot-id="${slotId}"]`);
+        if (!slotElement) return;
+
+        const slot = timeSlots.find(s => s.id === slotId);
+        if (!slot) return;
+
+        // Update slot data from inputs
+        slot.start_time = slotElement.querySelector('.start-time').value;
+        slot.end_time = slotElement.querySelector('.end-time').value;
+        slot.days = slotElement.querySelector('.days-select').value;
+
+        // Update custom days if applicable
+        if (slot.days === 'custom') {
+            const checkedDays = Array.from(slotElement.querySelectorAll(`input[name="custom-days-${slotId}"]:checked`))
+                .map(cb => cb.value);
+            slot.custom_days = checkedDays;
+        } else {
+            slot.custom_days = [];
+        }
+    }
+
+    // Function to validate all time slots
+    function validateTimeSlots() {
+        const errors = [];
+
+        timeSlots.forEach((slot, index) => {
+            if (!slot.start_time || !isValidTime(slot.start_time)) {
+                errors.push(`Time slot ${index + 1}: Invalid start time`);
+            }
+            if (!slot.end_time || !isValidTime(slot.end_time)) {
+                errors.push(`Time slot ${index + 1}: Invalid end time`);
+            }
+            if (slot.days === 'custom' && (!slot.custom_days || slot.custom_days.length === 0)) {
+                errors.push(`Time slot ${index + 1}: Please select at least one day for custom schedule`);
+            }
+        });
+
+        return errors;
+    }
+
+    // Function to save settings
+    async function saveStillSandsSettings() {
+        // Update all slots from current form values
+        timeSlots.forEach(slot => updateTimeSlot(slot.id));
+
+        // Validate time slots
+        const validationErrors = validateTimeSlots();
+        if (validationErrors.length > 0) {
+            showStatusMessage(`Validation errors: ${validationErrors.join(', ')}`, 'error');
+            return;
+        }
+
+        try {
+            const response = await fetch('/api/scheduled-pause', {
+                method: 'POST',
+                headers: { 'Content-Type': 'application/json' },
+                body: JSON.stringify({
+                    enabled: stillSandsToggle.checked,
+                    time_slots: timeSlots.map(slot => ({
+                        start_time: slot.start_time,
+                        end_time: slot.end_time,
+                        days: slot.days,
+                        custom_days: slot.custom_days
+                    }))
+                })
+            });
+
+            if (!response.ok) {
+                const errorData = await response.json();
+                throw new Error(errorData.detail || 'Failed to save Still Sands settings');
+            }
+
+            showStatusMessage('Still Sands settings saved successfully', 'success');
+        } catch (error) {
+            logMessage(`Error saving Still Sands settings: ${error.message}`, LOG_TYPE.ERROR);
+            showStatusMessage(`Failed to save settings: ${error.message}`, 'error');
+        }
+    }
+
+    // Initialize slot ID counter
+    if (timeSlots.length > 0) {
+        slotIdCounter = Math.max(...timeSlots.map(slot => slot.id || 0));
+    }
+
+    // Assign IDs to existing slots if they don't have them
+    timeSlots.forEach(slot => {
+        if (!slot.id) {
+            slot.id = ++slotIdCounter;
+        }
+    });
+
+    // Event listeners
+    stillSandsToggle.addEventListener('change', async () => {
+        logMessage(`Still Sands toggle changed: ${stillSandsToggle.checked}`, LOG_TYPE.INFO);
+        stillSandsSettings.style.display = stillSandsToggle.checked ? 'block' : 'none';
+        logMessage(`Settings display set to: ${stillSandsSettings.style.display}`, LOG_TYPE.INFO);
+
+        // Auto-save when toggle changes
+        try {
+            await saveStillSandsSettings();
+            const statusText = stillSandsToggle.checked ? 'enabled' : 'disabled';
+            showStatusMessage(`Still Sands ${statusText} successfully`, 'success');
+        } catch (error) {
+            logMessage(`Error saving Still Sands toggle: ${error.message}`, LOG_TYPE.ERROR);
+            showStatusMessage(`Failed to save Still Sands setting: ${error.message}`, 'error');
+        }
+    });
+
+    addTimeSlotButton.addEventListener('click', addTimeSlot);
+    saveStillSandsButton.addEventListener('click', saveStillSandsSettings);
+}

+ 176 - 0
templates/settings.html

@@ -112,6 +112,114 @@ endblock %}
   background-color: #92400e;
   background-color: #92400e;
   color: #fef3c7;
   color: #fef3c7;
 }
 }
+
+/* Toggle switch styles */
+.switch {
+  position: relative;
+  display: inline-block;
+  width: 60px;
+  height: 34px;
+}
+
+.switch input {
+  opacity: 0;
+  width: 0;
+  height: 0;
+}
+
+.slider {
+  position: absolute;
+  cursor: pointer;
+  top: 0;
+  left: 0;
+  right: 0;
+  bottom: 0;
+  background-color: #ccc;
+  transition: .4s;
+}
+
+.slider:before {
+  position: absolute;
+  content: "";
+  height: 26px;
+  width: 26px;
+  left: 4px;
+  bottom: 4px;
+  background-color: white;
+  transition: .4s;
+}
+
+input:checked + .slider {
+  background-color: #0c7ff2;
+}
+
+input:focus + .slider {
+  box-shadow: 0 0 1px #0c7ff2;
+}
+
+input:checked + .slider:before {
+  transform: translateX(26px);
+}
+
+.slider.round {
+  border-radius: 34px;
+}
+
+.slider.round:before {
+  border-radius: 50%;
+}
+
+/* Dark mode for switches */
+.dark .slider {
+  background-color: #404040;
+}
+
+.dark input:checked + .slider {
+  background-color: #0c7ff2;
+}
+
+/* Time slot specific styles */
+.time-slot-item {
+  background-color: #f8fafc;
+  border: 1px solid #e2e8f0;
+  border-radius: 8px;
+  padding: 16px;
+  transition: all 0.15s;
+}
+
+.dark .time-slot-item {
+  background-color: #1e293b;
+  border-color: #475569;
+}
+
+.time-slot-item:hover {
+  border-color: #cbd5e1;
+}
+
+.dark .time-slot-item:hover {
+  border-color: #64748b;
+}
+
+/* Info box dark mode */
+.dark .bg-blue-50 {
+  background-color: #1e3a8a;
+}
+
+.dark .border-blue-200 {
+  border-color: #1e40af;
+}
+
+.dark .text-blue-600 {
+  color: #60a5fa;
+}
+
+.dark .text-blue-800 {
+  color: #dbeafe;
+}
+
+.dark .text-blue-700 {
+  color: #bfdbfe;
+}
 {% endblock %}
 {% endblock %}
 
 
 {% block content %}
 {% block content %}
@@ -488,6 +596,74 @@ endblock %}
       </div>
       </div>
     </div>
     </div>
   </section>
   </section>
+  <section class="bg-white rounded-xl shadow-sm overflow-hidden">
+    <h2
+      class="text-slate-800 text-xl sm:text-2xl font-semibold leading-tight tracking-[-0.01em] px-6 py-4 border-b border-slate-200"
+    >
+      Still Sands
+    </h2>
+    <div class="px-6 py-5 space-y-6">
+      <div class="flex items-center justify-between">
+        <div class="flex-1">
+          <h3 class="text-slate-700 text-base font-medium leading-normal">Enable Still Sands</h3>
+          <p class="text-xs text-slate-500 mt-1">
+            Automatically bring the sands to rest during specified time periods.
+          </p>
+        </div>
+        <label class="switch">
+          <input type="checkbox" id="scheduledPauseToggle">
+          <span class="slider round"></span>
+        </label>
+      </div>
+
+      <div id="scheduledPauseSettings" class="space-y-4" style="display: none;">
+        <div class="bg-slate-50 rounded-lg p-4 space-y-4">
+          <div class="flex items-center justify-between">
+            <h4 class="text-slate-800 text-base font-semibold">Still Periods</h4>
+            <button
+              id="addTimeSlotButton"
+              class="flex items-center justify-center gap-2 cursor-pointer rounded-lg h-9 px-3 bg-sky-600 hover:bg-sky-700 text-white text-xs font-medium leading-normal tracking-[0.015em] transition-colors"
+            >
+              <span class="material-icons text-base">add</span>
+              <span>Add Still Period</span>
+            </button>
+          </div>
+          <p class="text-sm text-slate-600">
+            Define time periods when the sands should rest in stillness. Patterns will resume automatically when still periods end.
+          </p>
+
+          <div id="timeSlotsContainer" class="space-y-3">
+            <!-- Time slots will be dynamically added here -->
+          </div>
+
+          <div class="text-xs text-slate-500 bg-blue-50 border border-blue-200 rounded-lg p-3">
+            <div class="flex items-start gap-2">
+              <span class="material-icons text-blue-600 text-base">info</span>
+              <div>
+                <p class="font-medium text-blue-800">Important Notes:</p>
+                <ul class="mt-1 space-y-1 text-blue-700">
+                  <li>• Times are based on your system's local time zone</li>
+                  <li>• Currently running patterns will pause immediately when entering a still period</li>
+                  <li>• Patterns will resume automatically when exiting a still period</li>
+                  <li>• Still periods that span midnight (e.g., 22:00 to 06:00) are supported</li>
+                </ul>
+              </div>
+            </div>
+          </div>
+        </div>
+
+        <div class="flex justify-end">
+          <button
+            id="savePauseSettings"
+            class="flex items-center justify-center gap-2 min-w-[140px] cursor-pointer rounded-lg h-10 px-4 bg-sky-600 hover:bg-sky-700 text-white text-sm font-medium leading-normal tracking-[0.015em] transition-colors"
+          >
+            <span class="material-icons text-lg">save</span>
+            <span class="truncate">Save Still Sands</span>
+          </button>
+        </div>
+      </div>
+    </div>
+  </section>
   <section class="bg-white rounded-xl shadow-sm overflow-hidden">
   <section class="bg-white rounded-xl shadow-sm overflow-hidden">
     <h2
     <h2
       class="text-slate-800 text-xl sm:text-2xl font-semibold leading-tight tracking-[-0.01em] px-6 py-4 border-b border-slate-200"
       class="text-slate-800 text-xl sm:text-2xl font-semibold leading-tight tracking-[-0.01em] px-6 py-4 border-b border-slate-200"