Selaa lähdekoodia

Merge main into kiosk_mode_eglfs

Resolved conflicts in:
- VERSION: Updated to 3.3.3 from main
- main.py: Used improved async cache generation from main
- modules/connection/connection_manager.py: Used proper asyncio event loop handling from main

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
tuanchris 3 kuukautta sitten
vanhempi
sitoutus
219c3da6b3

+ 1 - 1
VERSION

@@ -1 +1 @@
-3.2.1
+3.3.3

+ 3 - 1
docker-compose.yml

@@ -1,12 +1,14 @@
 services:
   dune-weaver:
-    build: . # Uncomment this if you need to build 
+    build: . # Uncomment this if you need to build
     image: ghcr.io/tuanchris/dune-weaver:main # Use latest production image
     restart: always
     ports:
       - "8080:8080" # Map port 8080 of the container to 8080 of the host (access via http://localhost:8080)
     volumes:
       - .:/app
+      # Mount timezone file from host for Still Sands scheduling
+      - /etc/timezone:/etc/host-timezone:ro
     devices:
       - "/dev/ttyACM0:/dev/ttyACM0"
       - "/dev/ttyUSB0:/dev/ttyUSB0"

+ 270 - 67
main.py

@@ -27,11 +27,20 @@ import json
 import base64
 import time
 import argparse
+from concurrent.futures import ProcessPoolExecutor
+import multiprocessing
 
 # Get log level from environment variable, default to INFO
 log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
 log_level = getattr(logging, log_level_str, logging.INFO)
 
+# Create a process pool for CPU-intensive tasks
+# Limit to reasonable number of workers for embedded systems
+cpu_count = multiprocessing.cpu_count()
+# Maximum 3 workers (leaving 1 for motion), minimum 1
+process_pool_size = min(3, max(1, cpu_count - 1))
+process_pool = None  # Will be initialized in lifespan
+
 logging.basicConfig(
     level=log_level,
     format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
@@ -65,6 +74,11 @@ async def lifespan(app: FastAPI):
     # Register signal handlers
     signal.signal(signal.SIGINT, signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
+
+    # Initialize process pool for CPU-intensive tasks
+    global process_pool
+    process_pool = ProcessPoolExecutor(max_workers=process_pool_size)
+    logger.info(f"Initialized process pool with {process_pool_size} workers (detected {cpu_count} cores total)")
     
     try:
         connection_manager.connect_device()
@@ -95,19 +109,34 @@ async def lifespan(app: FastAPI):
     except Exception as e:
         logger.warning(f"Failed to initialize MQTT: {str(e)}")
     
-    # Start cache generation in background if needed
-    try:
-        from modules.core.cache_manager import is_cache_generation_needed, generate_cache_background
-        if is_cache_generation_needed():
-            logger.info("Cache generation needed, starting background task...")
-            asyncio.create_task(generate_cache_background())
-        else:
-            logger.info("Cache is up to date, skipping generation")
-    except Exception as e:
-        logger.warning(f"Failed to start cache generation: {str(e)}")
+    # Schedule cache generation check for later (non-blocking startup)
+    async def delayed_cache_check():
+        """Check and generate cache in background."""
+        try:
+            logger.info("Starting cache check...")
+
+            from modules.core.cache_manager import is_cache_generation_needed_async, generate_cache_background
+
+            if await is_cache_generation_needed_async():
+                logger.info("Cache generation needed, starting background task...")
+                asyncio.create_task(generate_cache_background())  # Don't await - run in background
+            else:
+                logger.info("Cache is up to date, skipping generation")
+        except Exception as e:
+            logger.warning(f"Failed during cache generation: {str(e)}")
+
+    # Start cache check in background immediately
+    asyncio.create_task(delayed_cache_check())
 
     yield  # This separates startup from shutdown code
 
+    # Shutdown
+    logger.info("Shutting down Dune Weaver application...")
+
+    # Shutdown process pool
+    if process_pool:
+        process_pool.shutdown(wait=True)
+        logger.info("Process pool shutdown complete")
 
 app = FastAPI(lifespan=lifespan)
 templates = Jinja2Templates(directory="templates")
@@ -125,6 +154,17 @@ class auto_playModeRequest(BaseModel):
     clear_pattern: Optional[str] = "adaptive"
     shuffle: Optional[bool] = False
 
+class TimeSlot(BaseModel):
+    start_time: str  # HH:MM format
+    end_time: str    # HH:MM format
+    days: str        # "daily", "weekdays", "weekends", or "custom"
+    custom_days: Optional[List[str]] = []  # ["monday", "tuesday", etc.]
+
+class ScheduledPauseRequest(BaseModel):
+    enabled: bool
+    control_wled: Optional[bool] = False
+    time_slots: List[TimeSlot] = []
+
 class CoordinateRequest(BaseModel):
     theta: float
     rho: float
@@ -210,11 +250,12 @@ async def broadcast_status_update(status: dict):
 
 @app.websocket("/ws/cache-progress")
 async def websocket_cache_progress_endpoint(websocket: WebSocket):
+    from modules.core.cache_manager import get_cache_progress
+
     await websocket.accept()
     active_cache_progress_connections.add(websocket)
     try:
         while True:
-            from modules.core.cache_manager import get_cache_progress
             progress = get_cache_progress()
             try:
                 await websocket.send_json({
@@ -225,7 +266,7 @@ async def websocket_cache_progress_endpoint(websocket: WebSocket):
                 if "close message has been sent" in str(e):
                     break
                 raise
-            await asyncio.sleep(0.5)  # Update every 500ms
+            await asyncio.sleep(1.0)  # Update every 1 second (reduced frequency for better performance)
     except WebSocketDisconnect:
         pass
     finally:
@@ -275,10 +316,74 @@ async def set_auto_play_mode(request: auto_playModeRequest):
     logger.info(f"auto_play mode {'enabled' if request.enabled else 'disabled'}, playlist: {request.playlist}")
     return {"success": True, "message": "auto_play mode settings updated"}
 
+@app.get("/api/scheduled-pause")
+async def get_scheduled_pause():
+    """Get current Still Sands settings."""
+    return {
+        "enabled": state.scheduled_pause_enabled,
+        "control_wled": state.scheduled_pause_control_wled,
+        "time_slots": state.scheduled_pause_time_slots
+    }
+
+@app.post("/api/scheduled-pause")
+async def set_scheduled_pause(request: ScheduledPauseRequest):
+    """Update Still Sands settings."""
+    try:
+        # Validate time slots
+        for i, slot in enumerate(request.time_slots):
+            # Validate time format (HH:MM)
+            try:
+                start_time = datetime.strptime(slot.start_time, "%H:%M").time()
+                end_time = datetime.strptime(slot.end_time, "%H:%M").time()
+            except ValueError:
+                raise HTTPException(
+                    status_code=400,
+                    detail=f"Invalid time format in slot {i+1}. Use HH:MM format."
+                )
+
+            # Validate days setting
+            if slot.days not in ["daily", "weekdays", "weekends", "custom"]:
+                raise HTTPException(
+                    status_code=400,
+                    detail=f"Invalid days setting in slot {i+1}. Must be 'daily', 'weekdays', 'weekends', or 'custom'."
+                )
+
+            # Validate custom days if applicable
+            if slot.days == "custom":
+                if not slot.custom_days or len(slot.custom_days) == 0:
+                    raise HTTPException(
+                        status_code=400,
+                        detail=f"Custom days must be specified for slot {i+1} when days is set to 'custom'."
+                    )
+
+                valid_days = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
+                for day in slot.custom_days:
+                    if day not in valid_days:
+                        raise HTTPException(
+                            status_code=400,
+                            detail=f"Invalid day '{day}' in slot {i+1}. Valid days are: {', '.join(valid_days)}"
+                        )
+
+        # Update state
+        state.scheduled_pause_enabled = request.enabled
+        state.scheduled_pause_control_wled = request.control_wled
+        state.scheduled_pause_time_slots = [slot.model_dump() for slot in request.time_slots]
+        state.save()
+
+        wled_msg = " (with WLED control)" if request.control_wled else ""
+        logger.info(f"Still Sands {'enabled' if request.enabled else 'disabled'} with {len(request.time_slots)} time slots{wled_msg}")
+        return {"success": True, "message": "Still Sands settings updated"}
+
+    except HTTPException:
+        raise
+    except Exception as e:
+        logger.error(f"Error updating Still Sands settings: {str(e)}")
+        raise HTTPException(status_code=500, detail=f"Failed to update Still Sands settings: {str(e)}")
+
 @app.get("/list_serial_ports")
 async def list_ports():
     logger.debug("Listing available serial ports")
-    return connection_manager.list_serial_ports()
+    return await asyncio.to_thread(connection_manager.list_serial_ports)
 
 @app.post("/connect")
 async def connect(request: ConnectRequest):
@@ -324,7 +429,8 @@ async def restart(request: ConnectRequest):
 @app.get("/list_theta_rho_files")
 async def list_theta_rho_files():
     logger.debug("Listing theta-rho files")
-    files = pattern_manager.list_theta_rho_files()
+    # Run the blocking file system operation in a thread pool
+    files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
     return sorted(files)
 
 @app.get("/list_theta_rho_files_with_metadata")
@@ -337,9 +443,10 @@ async def list_theta_rho_files_with_metadata():
     import asyncio
     from concurrent.futures import ThreadPoolExecutor
     
-    files = pattern_manager.list_theta_rho_files()
+    # Run the blocking file listing in a thread
+    files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
     files_with_metadata = []
-    
+
     # Use ThreadPoolExecutor for I/O-bound operations
     executor = ThreadPoolExecutor(max_workers=4)
     
@@ -392,18 +499,74 @@ async def list_theta_rho_files_with_metadata():
                 'coordinates_count': 0
             }
     
-    # Process files in parallel using asyncio
-    loop = asyncio.get_event_loop()
-    tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
-    
-    # Process results as they complete
-    for task in asyncio.as_completed(tasks):
-        try:
-            result = await task
-            files_with_metadata.append(result)
-        except Exception as e:
-            logger.error(f"Error processing file: {str(e)}")
-    
+    # Load the entire metadata cache at once (async)
+    # This is much faster than 1000+ individual metadata lookups
+    try:
+        import json
+        metadata_cache_path = "metadata_cache.json"
+        # Use async file reading to avoid blocking the event loop
+        cache_data = await asyncio.to_thread(lambda: json.load(open(metadata_cache_path, 'r')))
+        cache_dict = cache_data.get('data', {})
+        logger.debug(f"Loaded metadata cache with {len(cache_dict)} entries")
+
+        # Process all files using cached data only
+        for file_path in files:
+            try:
+                # Extract category from path
+                path_parts = file_path.split('/')
+                category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
+
+                # Get file name without extension
+                file_name = os.path.splitext(os.path.basename(file_path))[0]
+
+                # Get metadata from cache
+                cached_entry = cache_dict.get(file_path, {})
+                if isinstance(cached_entry, dict) and 'metadata' in cached_entry:
+                    metadata = cached_entry['metadata']
+                    coords_count = metadata.get('total_coordinates', 0)
+                    date_modified = cached_entry.get('mtime', 0)
+                else:
+                    coords_count = 0
+                    date_modified = 0
+
+                files_with_metadata.append({
+                    'path': file_path,
+                    'name': file_name,
+                    'category': category,
+                    'date_modified': date_modified,
+                    'coordinates_count': coords_count
+                })
+
+            except Exception as e:
+                logger.warning(f"Error processing {file_path}: {e}")
+                # Include file with minimal info if processing fails
+                path_parts = file_path.split('/')
+                category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
+                files_with_metadata.append({
+                    'path': file_path,
+                    'name': os.path.splitext(os.path.basename(file_path))[0],
+                    'category': category,
+                    'date_modified': 0,
+                    'coordinates_count': 0
+                })
+
+    except Exception as e:
+        logger.error(f"Failed to load metadata cache, falling back to slow method: {e}")
+        # Fallback to original method if cache loading fails
+        # Create tasks only when needed
+        loop = asyncio.get_event_loop()
+        tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
+
+        for task in asyncio.as_completed(tasks):
+            try:
+                result = await task
+                files_with_metadata.append(result)
+            except Exception as task_error:
+                logger.error(f"Error processing file: {str(task_error)}")
+
+    # Clean up executor
+    executor.shutdown(wait=False)
+
     return files_with_metadata
 
 @app.post("/upload_theta_rho")
@@ -464,11 +627,15 @@ async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
         file_name = normalize_file_path(request.file_name)
         file_path = os.path.join(THETA_RHO_DIR, file_name)
         
-        if not os.path.exists(file_path):
+        # Check file existence asynchronously
+        exists = await asyncio.to_thread(os.path.exists, file_path)
+        if not exists:
             raise HTTPException(status_code=404, detail=f"File {file_name} not found")
-        
-        # Parse the theta-rho file
-        coordinates = parse_theta_rho_file(file_path)
+
+        # Parse the theta-rho file in a separate process for CPU-intensive work
+        # This prevents blocking the motion control thread
+        loop = asyncio.get_event_loop()
+        coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, file_path)
         
         if not coordinates:
             raise HTTPException(status_code=400, detail="No valid coordinates found in file")
@@ -538,7 +705,7 @@ async def stop_execution():
     if not (state.conn.is_connected() if state.conn else False):
         logger.warning("Attempted to stop without a connection")
         raise HTTPException(status_code=400, detail="Connection not established")
-    pattern_manager.stop_actions()
+    await pattern_manager.stop_actions()
     return {"success": True}
 
 @app.post("/send_home")
@@ -586,18 +753,21 @@ async def delete_theta_rho_file(request: DeleteFileRequest):
     # Normalize file path for cross-platform compatibility
     normalized_file_name = normalize_file_path(request.file_name)
     file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-    if not os.path.exists(file_path):
+
+    # Check file existence asynchronously
+    exists = await asyncio.to_thread(os.path.exists, file_path)
+    if not exists:
         logger.error(f"Attempted to delete non-existent file: {file_path}")
         raise HTTPException(status_code=404, detail="File not found")
 
     try:
-        # Delete the pattern file
-        os.remove(file_path)
+        # Delete the pattern file asynchronously
+        await asyncio.to_thread(os.remove, file_path)
         logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
         
-        # Clean up cached preview image and metadata
+        # Clean up cached preview image and metadata asynchronously
         from modules.core.cache_manager import delete_pattern_cache
-        cache_cleanup_success = delete_pattern_cache(normalized_file_name)
+        cache_cleanup_success = await asyncio.to_thread(delete_pattern_cache, normalized_file_name)
         if cache_cleanup_success:
             logger.info(f"Successfully cleaned up cache for {request.file_name}")
         else:
@@ -616,8 +786,8 @@ async def move_to_center():
             raise HTTPException(status_code=400, detail="Connection not established")
 
         logger.info("Moving device to center position")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 0)
+        await pattern_manager.reset_theta()
+        await pattern_manager.move_polar(0, 0)
         return {"success": True}
     except Exception as e:
         logger.error(f"Failed to move to center: {str(e)}")
@@ -629,8 +799,8 @@ async def move_to_perimeter():
         if not (state.conn.is_connected() if state.conn else False):
             logger.warning("Attempted to move to perimeter without a connection")
             raise HTTPException(status_code=400, detail="Connection not established")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 1)
+        await pattern_manager.reset_theta()
+        await pattern_manager.move_polar(0, 1)
         return {"success": True}
     except Exception as e:
         logger.error(f"Failed to move to perimeter: {str(e)}")
@@ -646,18 +816,24 @@ async def preview_thr(request: DeleteFileRequest):
     normalized_file_name = normalize_file_path(request.file_name)
     # Construct the full path to the pattern file to check existence
     pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-    if not os.path.exists(pattern_file_path):
+
+    # Check file existence asynchronously
+    exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
+    if not exists:
         logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
         raise HTTPException(status_code=404, detail="Pattern file not found")
 
     try:
         cache_path = get_cache_path(normalized_file_name)
-        
-        if not os.path.exists(cache_path):
+
+        # Check cache existence asynchronously
+        cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
+        if not cache_exists:
             logger.info(f"Cache miss for {request.file_name}. Generating preview...")
             # Attempt to generate the preview if it's missing
             success = await generate_image_preview(normalized_file_name)
-            if not success or not os.path.exists(cache_path):
+            cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
+            if not success or not cache_exists_after:
                 logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
                 raise HTTPException(status_code=500, detail="Failed to generate preview image.")
 
@@ -739,7 +915,7 @@ async def send_coordinate(request: CoordinateRequest):
 
     try:
         logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
-        pattern_manager.move_polar(request.theta, request.rho)
+        await pattern_manager.move_polar(request.theta, request.rho)
         return {"success": True}
     except Exception as e:
         logger.error(f"Failed to send coordinate: {str(e)}")
@@ -1035,27 +1211,31 @@ async def preview_thr_batch(request: dict):
         "Content-Type": "application/json"
     }
 
-    results = {}
-    for file_name in file_names:
+    async def process_single_file(file_name):
+        """Process a single file and return its preview data."""
         t1 = time.time()
         try:
             # Normalize file path for cross-platform compatibility
             normalized_file_name = normalize_file_path(file_name)
             pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
-            if not os.path.exists(pattern_file_path):
+
+            # Check file existence asynchronously
+            exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
+            if not exists:
                 logger.warning(f"Pattern file not found: {pattern_file_path}")
-                results[file_name] = {"error": "Pattern file not found"}
-                continue
+                return file_name, {"error": "Pattern file not found"}
 
             cache_path = get_cache_path(normalized_file_name)
-            
-            if not os.path.exists(cache_path):
+
+            # Check cache existence asynchronously
+            cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
+            if not cache_exists:
                 logger.info(f"Cache miss for {file_name}. Generating preview...")
                 success = await generate_image_preview(normalized_file_name)
-                if not success or not os.path.exists(cache_path):
+                cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
+                if not success or not cache_exists_after:
                     logger.error(f"Failed to generate or find preview for {file_name}")
-                    results[file_name] = {"error": "Failed to generate preview"}
-                    continue
+                    return file_name, {"error": "Failed to generate preview"}
 
             metadata = get_pattern_metadata(normalized_file_name)
             if metadata:
@@ -1063,25 +1243,34 @@ async def preview_thr_batch(request: dict):
                 last_coord_obj = metadata.get('last_coordinate')
             else:
                 logger.debug(f"Metadata cache miss for {file_name}, parsing file")
-                coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
+                # Use process pool for CPU-intensive parsing
+                loop = asyncio.get_event_loop()
+                coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, pattern_file_path)
                 first_coord = coordinates[0] if coordinates else None
                 last_coord = coordinates[-1] if coordinates else None
                 first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
                 last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
 
-            with open(cache_path, 'rb') as f:
-                image_data = f.read()
+            # Read image file asynchronously
+            image_data = await asyncio.to_thread(lambda: open(cache_path, 'rb').read())
             image_b64 = base64.b64encode(image_data).decode('utf-8')
-            results[file_name] = {
+            result = {
                 "image_data": f"data:image/webp;base64,{image_b64}",
                 "first_coordinate": first_coord_obj,
                 "last_coordinate": last_coord_obj
             }
+            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
+            return file_name, result
         except Exception as e:
             logger.error(f"Error processing {file_name}: {str(e)}")
-            results[file_name] = {"error": str(e)}
-        finally:
-            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
+            return file_name, {"error": str(e)}
+
+    # Process all files concurrently
+    tasks = [process_single_file(file_name) for file_name in file_names]
+    file_results = await asyncio.gather(*tasks)
+
+    # Convert results to dictionary
+    results = dict(file_results)
 
     logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
     return JSONResponse(content=results, headers=headers)
@@ -1126,10 +1315,24 @@ def signal_handler(signum, frame):
     try:
         if state.led_controller:
             state.led_controller.set_power(0)
-        # Run cleanup operations synchronously to ensure completion
-        pattern_manager.stop_actions()
+        # Run cleanup operations - need to handle async in sync context
+        try:
+            # Try to run in existing loop if available
+            import asyncio
+            loop = asyncio.get_running_loop()
+            # If we're in an event loop, schedule the coroutine
+            import concurrent.futures
+            with concurrent.futures.ThreadPoolExecutor() as executor:
+                future = executor.submit(asyncio.run, pattern_manager.stop_actions())
+                future.result(timeout=5.0)  # Wait up to 5 seconds
+        except RuntimeError:
+            # No running loop, create a new one
+            import asyncio
+            asyncio.run(pattern_manager.stop_actions())
+        except Exception as cleanup_err:
+            logger.error(f"Error in async cleanup: {cleanup_err}")
+
         state.save()
-        
         logger.info("Cleanup completed")
     except Exception as e:
         logger.error(f"Error during cleanup: {str(e)}")

+ 82 - 30
modules/connection/connection_manager.py

@@ -4,12 +4,13 @@ import logging
 import serial
 import serial.tools.list_ports
 import websocket
+import asyncio
 
 from modules.core.state import state
 from modules.led.led_controller import effect_loading, effect_idle, effect_connected, LEDController
 logger = logging.getLogger(__name__)
 
-IGNORE_PORTS = ['/dev/cu.debug-console', '/dev/cu.Bluetooth-Incoming-Port', '/dev/ttyS0']
+IGNORE_PORTS = ['/dev/cu.debug-console', '/dev/cu.Bluetooth-Incoming-Port']
 
 ###############################################################################
 # Connection Abstraction
@@ -71,7 +72,14 @@ class SerialConnection(BaseConnection):
         return self.ser is not None and self.ser.is_open
 
     def close(self) -> None:
-        update_machine_position()
+        # Run async update_machine_position in sync context
+        try:
+            loop = asyncio.new_event_loop()
+            asyncio.set_event_loop(loop)
+            loop.run_until_complete(update_machine_position())
+            loop.close()
+        except Exception as e:
+            logger.error(f"Error updating machine position on close: {e}")
         with self.lock:
             if self.ser.is_open:
                 self.ser.close()
@@ -119,7 +127,14 @@ class WebSocketConnection(BaseConnection):
         return self.ws is not None
 
     def close(self) -> None:
-        update_machine_position()
+        # Run async update_machine_position in sync context
+        try:
+            loop = asyncio.new_event_loop()
+            asyncio.set_event_loop(loop)
+            loop.run_until_complete(update_machine_position())
+            loop.close()
+        except Exception as e:
+            logger.error(f"Error updating machine position on close: {e}")
         with self.lock:
             if self.ws:
                 self.ws.close()
@@ -220,24 +235,26 @@ def parse_machine_position(response: str):
     return None
 
 
-def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
+async def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
     """
     Send a G-code command to FluidNC and wait for an 'ok' response.
     If no response after set timeout, sets state to stop and disconnects.
     """
     logger.debug(f"Sending G-code: X{x} Y{y} at F{speed}")
-    
+
     # Track overall attempt time
     overall_start_time = time.time()
-    
+
     while True:
         try:
             gcode = f"$J=G91 G21 Y{y} F{speed}" if home else f"G1 X{x} Y{y} F{speed}"
-            state.conn.send(gcode + "\n")
+            # Use asyncio.to_thread for both send and receive operations to avoid blocking
+            await asyncio.to_thread(state.conn.send, gcode + "\n")
             logger.debug(f"Sent command: {gcode}")
             start_time = time.time()
             while True:
-                response = state.conn.readline()
+                # Use asyncio.to_thread for blocking I/O operations
+                response = await asyncio.to_thread(state.conn.readline)
                 logger.debug(f"Response: {response}")
                 if response.lower() == "ok":
                     logger.debug("Command execution confirmed.")
@@ -246,7 +263,7 @@ def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
             # Store the error string inside the exception block
             error_str = str(e)
             logger.warning(f"Error sending command: {error_str}")
-            
+
             # Immediately return for device not configured errors
             if "Device not configured" in error_str or "Errno 6" in error_str:
                 logger.error(f"Device configuration error detected: {error_str}")
@@ -256,9 +273,9 @@ def send_grbl_coordinates(x, y, speed=600, timeout=2, home=False):
                 logger.info("Connection marked as disconnected due to device error")
                 return False
 
-            
+
         logger.warning(f"No 'ok' received for X{x} Y{y}, speed {speed}. Retrying...")
-        time.sleep(0.1)
+        await asyncio.sleep(0.1)
     
     # If we reach here, the timeout has occurred
     logger.error(f"Failed to receive 'ok' response after {max_total_attempt_time} seconds. Stopping and disconnecting.")
@@ -406,20 +423,27 @@ def home(timeout=15):
                     homing_speed = 120
                 logger.info("Sensorless homing not supported. Using crash homing")
                 logger.info(f"Homing with speed {homing_speed}")
-                if state.gear_ratio == 6.25 and state.x_steps_per_mm == 256:
-                    result = send_grbl_coordinates(0, - 30, homing_speed, home=True)
-                    if result == False:
-                        logger.error("Homing failed - send_grbl_coordinates returned False")
-                        homing_complete.set()
-                        return
-                    state.machine_y -= 30
-                else:
-                    result = send_grbl_coordinates(0, -22, homing_speed, home=True)
-                    if result == False:
-                        logger.error("Homing failed - send_grbl_coordinates returned False")
-                        homing_complete.set()
-                        return
-                    state.machine_y -= 22
+
+                # Run async function in new event loop
+                loop = asyncio.new_event_loop()
+                asyncio.set_event_loop(loop)
+                try:
+                    if state.gear_ratio == 6.25:
+                        result = loop.run_until_complete(send_grbl_coordinates(0, - 30, homing_speed, home=True))
+                        if result == False:
+                            logger.error("Homing failed - send_grbl_coordinates returned False")
+                            homing_complete.set()
+                            return
+                        state.machine_y -= 30
+                    else:
+                        result = loop.run_until_complete(send_grbl_coordinates(0, -22, homing_speed, home=True))
+                        if result == False:
+                            logger.error("Homing failed - send_grbl_coordinates returned False")
+                            homing_complete.set()
+                            return
+                        state.machine_y -= 22
+                finally:
+                    loop.close()
 
             state.current_theta = state.current_rho = 0
             homing_success = True
@@ -455,16 +479,44 @@ def home(timeout=15):
 
 def check_idle():
     """
-    Continuously check if the device is idle.
+    Continuously check if the device is idle (synchronous version).
     """
     logger.info("Checking idle")
     while True:
         response = get_status_response()
         if response and "Idle" in response:
             logger.info("Device is idle")
-            update_machine_position()
+            # Schedule async update_machine_position in the existing event loop
+            try:
+                # Try to schedule in existing event loop if available
+                try:
+                    loop = asyncio.get_running_loop()
+                    # Create a task but don't await it (fire and forget)
+                    asyncio.create_task(update_machine_position())
+                    logger.debug("Scheduled machine position update task")
+                except RuntimeError:
+                    # No event loop running, skip machine position update
+                    logger.debug("No event loop running, skipping machine position update")
+            except Exception as e:
+                logger.error(f"Error scheduling machine position update: {e}")
             return True
         time.sleep(1)
+
+async def check_idle_async():
+    """
+    Continuously check if the device is idle (async version).
+    """
+    logger.info("Checking idle (async)")
+    while True:
+        response = await asyncio.to_thread(get_status_response)
+        if response and "Idle" in response:
+            logger.info("Device is idle")
+            try:
+                await update_machine_position()
+            except Exception as e:
+                logger.error(f"Error updating machine position: {e}")
+            return True
+        await asyncio.sleep(1)
         
 
 def get_machine_position(timeout=5):
@@ -490,12 +542,12 @@ def get_machine_position(timeout=5):
     logger.warning("Timeout reached waiting for machine position")
     return None, None
 
-def update_machine_position():
+async def update_machine_position():
     if (state.conn.is_connected() if state.conn else False):
         try:
             logger.info('Saving machine position')
-            state.machine_x, state.machine_y = get_machine_position()
-            state.save()
+            state.machine_x, state.machine_y = await asyncio.to_thread(get_machine_position)
+            await asyncio.to_thread(state.save)
             logger.info(f'Machine position saved: {state.machine_x}, {state.machine_y}')
         except Exception as e:
             logger.error(f"Error updating machine position: {e}")

+ 6 - 2
modules/core/cache_manager.py

@@ -731,9 +731,13 @@ async def generate_cache_background():
         raise
 
 def get_cache_progress():
-    """Get the current cache generation progress."""
+    """Get the current cache generation progress.
+
+    Returns a reference to the cache_progress dict for read-only access.
+    The WebSocket handler should not modify this dict.
+    """
     global cache_progress
-    return cache_progress.copy()
+    return cache_progress  # Return reference instead of copy for better performance
 
 def is_cache_generation_needed():
     """Check if cache generation is needed."""

+ 477 - 119
modules/core/pattern_manager.py

@@ -1,9 +1,10 @@
 import os
+from zoneinfo import ZoneInfo
 import threading
 import time
 import random
 import logging
-from datetime import datetime
+from datetime import datetime, time as datetime_time
 from tqdm import tqdm
 from modules.connection import connection_manager
 from modules.core.state import state
@@ -11,6 +12,9 @@ from math import pi
 import asyncio
 import json
 from modules.led.led_controller import effect_playing, effect_idle
+import queue
+from dataclasses import dataclass
+from typing import Optional, Callable
 
 # Configure logging
 logger = logging.getLogger(__name__)
@@ -29,11 +33,304 @@ pattern_lock = asyncio.Lock()
 # Progress update task
 progress_update_task = None
 
+# Cache timezone at module level - read once per session
+_cached_timezone = None
+_cached_zoneinfo = None
+
+def _get_system_timezone():
+    """Get and cache the system timezone. Called once per session."""
+    global _cached_timezone, _cached_zoneinfo
+
+    if _cached_timezone is not None:
+        return _cached_zoneinfo
+
+    user_tz = 'UTC'  # Default fallback
+
+    # Try to read timezone from /etc/host-timezone (mounted from host)
+    try:
+        if os.path.exists('/etc/host-timezone'):
+            with open('/etc/host-timezone', 'r') as f:
+                user_tz = f.read().strip()
+                logger.info(f"Still Sands using timezone: {user_tz} (from host system)")
+        # Fallback to /etc/timezone if host-timezone doesn't exist
+        elif os.path.exists('/etc/timezone'):
+            with open('/etc/timezone', 'r') as f:
+                user_tz = f.read().strip()
+                logger.info(f"Still Sands using timezone: {user_tz} (from container)")
+        # Fallback to TZ environment variable
+        elif os.environ.get('TZ'):
+            user_tz = os.environ.get('TZ')
+            logger.info(f"Still Sands using timezone: {user_tz} (from environment)")
+        else:
+            logger.info("Still Sands using timezone: UTC (default)")
+    except Exception as e:
+        logger.debug(f"Could not read timezone: {e}")
+
+    # Cache the timezone
+    _cached_timezone = user_tz
+    try:
+        _cached_zoneinfo = ZoneInfo(user_tz)
+    except Exception as e:
+        logger.warning(f"Invalid timezone '{user_tz}', falling back to system time: {e}")
+        _cached_zoneinfo = None
+
+    return _cached_zoneinfo
+
+def is_in_scheduled_pause_period():
+    """Check if current time falls within any scheduled pause period."""
+    if not state.scheduled_pause_enabled or not state.scheduled_pause_time_slots:
+        return False
+
+    # Get cached timezone
+    tz_info = _get_system_timezone()
+
+    try:
+        # Get current time in user's timezone
+        if tz_info:
+            now = datetime.now(tz_info)
+        else:
+            now = datetime.now()
+    except Exception as e:
+        logger.warning(f"Error getting current time: {e}")
+        now = datetime.now()
+
+    current_time = now.time()
+    current_weekday = now.strftime("%A").lower()  # monday, tuesday, etc.
+
+    for slot in state.scheduled_pause_time_slots:
+        # Parse start and end times
+        try:
+            start_time = datetime_time.fromisoformat(slot['start_time'])
+            end_time = datetime_time.fromisoformat(slot['end_time'])
+        except (ValueError, KeyError):
+            logger.warning(f"Invalid time format in scheduled pause slot: {slot}")
+            continue
+
+        # Check if this slot applies to today
+        slot_applies_today = False
+        days_setting = slot.get('days', 'daily')
+
+        if days_setting == 'daily':
+            slot_applies_today = True
+        elif days_setting == 'weekdays':
+            slot_applies_today = current_weekday in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday']
+        elif days_setting == 'weekends':
+            slot_applies_today = current_weekday in ['saturday', 'sunday']
+        elif days_setting == 'custom':
+            custom_days = slot.get('custom_days', [])
+            slot_applies_today = current_weekday in custom_days
+
+        if not slot_applies_today:
+            continue
+
+        # Check if current time is within the pause period
+        if start_time <= end_time:
+            # Normal case: start and end are on the same day
+            if start_time <= current_time <= end_time:
+                return True
+        else:
+            # Time spans midnight: start is before midnight, end is after midnight
+            if current_time >= start_time or current_time <= end_time:
+                return True
+
+    return False
+
+# Motion Control Thread Infrastructure
+@dataclass
+class MotionCommand:
+    """Represents a motion command for the motion control thread."""
+    command_type: str  # 'move', 'stop', 'pause', 'resume', 'shutdown'
+    theta: Optional[float] = None
+    rho: Optional[float] = None
+    speed: Optional[float] = None
+    callback: Optional[Callable] = None
+    future: Optional[asyncio.Future] = None
+
+class MotionControlThread:
+    """Dedicated thread for hardware motion control operations."""
+
+    def __init__(self):
+        self.command_queue = queue.Queue()
+        self.thread = None
+        self.running = False
+        self.paused = False
+
+    def start(self):
+        """Start the motion control thread."""
+        if self.thread and self.thread.is_alive():
+            return
+
+        self.running = True
+        self.thread = threading.Thread(target=self._motion_loop, daemon=True)
+        self.thread.start()
+        logger.info("Motion control thread started")
+
+    def stop(self):
+        """Stop the motion control thread."""
+        if not self.running:
+            return
+
+        self.running = False
+        # Send shutdown command
+        self.command_queue.put(MotionCommand('shutdown'))
+
+        if self.thread and self.thread.is_alive():
+            self.thread.join(timeout=5.0)
+        logger.info("Motion control thread stopped")
+
+    def _motion_loop(self):
+        """Main loop for the motion control thread."""
+        logger.info("Motion control thread loop started")
+
+        while self.running:
+            try:
+                # Get command with timeout to allow periodic checks
+                command = self.command_queue.get(timeout=1.0)
+
+                if command.command_type == 'shutdown':
+                    break
+
+                elif command.command_type == 'move':
+                    self._execute_move(command)
+
+                elif command.command_type == 'pause':
+                    self.paused = True
+
+                elif command.command_type == 'resume':
+                    self.paused = False
+
+                elif command.command_type == 'stop':
+                    # Clear any pending commands
+                    while not self.command_queue.empty():
+                        try:
+                            self.command_queue.get_nowait()
+                        except queue.Empty:
+                            break
+
+                self.command_queue.task_done()
+
+            except queue.Empty:
+                # Timeout - continue loop for shutdown check
+                continue
+            except Exception as e:
+                logger.error(f"Error in motion control thread: {e}")
+
+        logger.info("Motion control thread loop ended")
+
+    def _execute_move(self, command: MotionCommand):
+        """Execute a move command in the motion thread."""
+        try:
+            # Wait if paused
+            while self.paused and self.running:
+                time.sleep(0.1)
+
+            if not self.running:
+                return
+
+            # Execute the actual motion using sync version
+            self._move_polar_sync(command.theta, command.rho, command.speed)
+
+            # Signal completion if future provided
+            if command.future and not command.future.done():
+                command.future.get_loop().call_soon_threadsafe(
+                    command.future.set_result, None
+                )
+
+        except Exception as e:
+            logger.error(f"Error executing move command: {e}")
+            if command.future and not command.future.done():
+                command.future.get_loop().call_soon_threadsafe(
+                    command.future.set_exception, e
+                )
+
+    def _move_polar_sync(self, theta: float, rho: float, speed: Optional[float] = None):
+        """Synchronous version of move_polar for use in motion thread."""
+        # This is the original sync logic but running in dedicated thread
+        if state.table_type == 'dune_weaver_mini':
+            x_scaling_factor = 2
+            y_scaling_factor = 3.7
+        else:
+            x_scaling_factor = 2
+            y_scaling_factor = 5
+
+        delta_theta = theta - state.current_theta
+        delta_rho = rho - state.current_rho
+        x_increment = delta_theta * 100 / (2 * pi * x_scaling_factor)
+        y_increment = delta_rho * 100 / y_scaling_factor
+
+        x_total_steps = state.x_steps_per_mm * (100/x_scaling_factor)
+        y_total_steps = state.y_steps_per_mm * (100/y_scaling_factor)
+
+        offset = x_increment * (x_total_steps * x_scaling_factor / (state.gear_ratio * y_total_steps * y_scaling_factor))
+
+        if state.table_type == 'dune_weaver_mini' or state.y_steps_per_mm == 546:
+            y_increment -= offset
+        else:
+            y_increment += offset
+
+        new_x_abs = state.machine_x + x_increment
+        new_y_abs = state.machine_y + y_increment
+
+        # Use provided speed or fall back to state.speed
+        actual_speed = speed if speed is not None else state.speed
+
+        # Call sync version of send_grbl_coordinates in this thread
+        self._send_grbl_coordinates_sync(round(new_x_abs, 3), round(new_y_abs, 3), actual_speed)
+
+        # Update state
+        state.current_theta = theta
+        state.current_rho = rho
+        state.machine_x = new_x_abs
+        state.machine_y = new_y_abs
+
+    def _send_grbl_coordinates_sync(self, x: float, y: float, speed: int = 600, timeout: int = 2, home: bool = False):
+        """Synchronous version of send_grbl_coordinates for motion thread."""
+        logger.debug(f"Motion thread sending G-code: X{x} Y{y} at F{speed}")
+
+        # Track overall attempt time
+        overall_start_time = time.time()
+
+        while True:
+            try:
+                gcode = f"$J=G91 G21 Y{y} F{speed}" if home else f"G1 X{x} Y{y} F{speed}"
+                state.conn.send(gcode + "\n")
+                logger.debug(f"Motion thread sent command: {gcode}")
+
+                start_time = time.time()
+                while True:
+                    response = state.conn.readline()
+                    logger.debug(f"Motion thread response: {response}")
+                    if response.lower() == "ok":
+                        logger.debug("Motion thread: Command execution confirmed.")
+                        return
+
+            except Exception as e:
+                error_str = str(e)
+                logger.warning(f"Motion thread error sending command: {error_str}")
+
+                # Immediately return for device not configured errors
+                if "Device not configured" in error_str or "Errno 6" in error_str:
+                    logger.error(f"Motion thread: Device configuration error detected: {error_str}")
+                    state.stop_requested = True
+                    state.conn = None
+                    state.is_connected = False
+                    logger.info("Connection marked as disconnected due to device error")
+                    return False
+
+            logger.warning(f"Motion thread: No 'ok' received for X{x} Y{y}, speed {speed}. Retrying...")
+            time.sleep(0.1)
+
+# Global motion control thread instance
+motion_controller = MotionControlThread()
+
 async def cleanup_pattern_manager():
     """Clean up pattern manager resources"""
     global progress_update_task, pattern_lock, pause_event
-    
+
     try:
+        # Stop motion control thread
+        motion_controller.stop()
+
         # Cancel progress update task if running
         if progress_update_task and not progress_update_task.done():
             try:
@@ -45,7 +342,7 @@ async def cleanup_pattern_manager():
                     pass
             except Exception as e:
                 logger.error(f"Error cancelling progress update task: {e}")
-        
+
         # Clean up pattern lock
         if pattern_lock:
             try:
@@ -54,7 +351,7 @@ async def cleanup_pattern_manager():
                 pattern_lock = None
             except Exception as e:
                 logger.error(f"Error cleaning up pattern lock: {e}")
-        
+
         # Clean up pause event
         if pause_event:
             try:
@@ -62,7 +359,7 @@ async def cleanup_pattern_manager():
                 pause_event = None
             except Exception as e:
                 logger.error(f"Error cleaning up pause event: {e}")
-        
+
         # Clean up pause condition from state
         if state.pause_condition:
             try:
@@ -79,12 +376,12 @@ async def cleanup_pattern_manager():
         state.pause_requested = False
         state.stop_requested = True
         state.is_clearing = False
-        
+
         # Reset machine position
         await connection_manager.update_machine_position()
-        
+
         logger.info("Pattern manager resources cleaned up")
-        
+
     except Exception as e:
         logger.error(f"Error during pattern manager cleanup: {e}")
     finally:
@@ -95,14 +392,22 @@ async def cleanup_pattern_manager():
 
 def list_theta_rho_files():
     files = []
-    for root, _, filenames in os.walk(THETA_RHO_DIR):
-        for file in filenames:
+    for root, dirs, filenames in os.walk(THETA_RHO_DIR):
+        # Skip cached_images directories to avoid scanning thousands of WebP files
+        if 'cached_images' in dirs:
+            dirs.remove('cached_images')
+
+        # Filter .thr files during traversal for better performance
+        thr_files = [f for f in filenames if f.endswith('.thr')]
+
+        for file in thr_files:
             relative_path = os.path.relpath(os.path.join(root, file), THETA_RHO_DIR)
             # Normalize path separators to always use forward slashes for consistency across platforms
             relative_path = relative_path.replace(os.sep, '/')
             files.append(relative_path)
+
     logger.debug(f"Found {len(files)} theta-rho files")
-    return [file for file in files if file.endswith('.thr')]
+    return files
 
 def parse_theta_rho_file(file_path):
     """Parse a theta-rho file and return a list of (theta, rho) pairs."""
@@ -127,36 +432,62 @@ def parse_theta_rho_file(file_path):
         logger.debug(f"Parsed {len(coordinates)} coordinates from {file_path}")
     return coordinates
 
-def get_first_rho_from_cache(file_path):
-    """Get the first rho value from cached metadata, falling back to file parsing if needed."""
+def get_first_rho_from_cache(file_path, cache_data=None):
+    """Get the first rho value from cached metadata, falling back to file parsing if needed.
+
+    Args:
+        file_path: Path to the pattern file
+        cache_data: Optional pre-loaded cache data dict to avoid repeated disk I/O
+    """
     try:
         # Import cache_manager locally to avoid circular import
         from modules.core import cache_manager
-        
+
         # Try to get from metadata cache first
-        file_name = os.path.basename(file_path)
-        metadata = cache_manager.get_pattern_metadata(file_name)
-        
-        if metadata and 'first_coordinate' in metadata:
-            # In the cache, 'x' is theta and 'y' is rho
-            return metadata['first_coordinate']['y']
-        
+        # Use relative path from THETA_RHO_DIR to match cache keys (which include subdirectories)
+        file_name = os.path.relpath(file_path, THETA_RHO_DIR)
+
+        # Use provided cache_data if available, otherwise load from disk
+        if cache_data is not None:
+            # Extract metadata directly from provided cache
+            data_section = cache_data.get('data', {})
+            if file_name in data_section:
+                cached_entry = data_section[file_name]
+                metadata = cached_entry.get('metadata')
+                # When cache_data is provided, trust it without checking mtime
+                # This significantly speeds up bulk operations (playlists with 1000+ patterns)
+                # by avoiding 1000+ os.path.getmtime() calls on slow storage (e.g., Pi SD cards)
+                if metadata and 'first_coordinate' in metadata:
+                    return metadata['first_coordinate']['y']
+        else:
+            # Fall back to loading cache from disk (original behavior)
+            metadata = cache_manager.get_pattern_metadata(file_name)
+            if metadata and 'first_coordinate' in metadata:
+                # In the cache, 'x' is theta and 'y' is rho
+                return metadata['first_coordinate']['y']
+
         # Fallback to parsing the file if not in cache
         logger.debug(f"Metadata not cached for {file_name}, parsing file")
         coordinates = parse_theta_rho_file(file_path)
         if coordinates:
             return coordinates[0][1]  # Return rho value
-        
+
         return None
     except Exception as e:
         logger.warning(f"Error getting first rho from cache for {file_path}: {str(e)}")
         return None
 
-def get_clear_pattern_file(clear_pattern_mode, path=None):
-    """Return a .thr file path based on pattern_name and table type."""
+def get_clear_pattern_file(clear_pattern_mode, path=None, cache_data=None):
+    """Return a .thr file path based on pattern_name and table type.
+
+    Args:
+        clear_pattern_mode: The clear pattern mode to use
+        path: Optional path to the pattern file for adaptive mode
+        cache_data: Optional pre-loaded cache data dict to avoid repeated disk I/O
+    """
     if not clear_pattern_mode or clear_pattern_mode == 'none':
         return
-    
+
     # Define patterns for each table type
     clear_patterns = {
         'dune_weaver': {
@@ -177,16 +508,16 @@ def get_clear_pattern_file(clear_pattern_mode, path=None):
             'clear_sideway': './patterns/clear_sideway_pro.thr'
         }
     }
-    
+
     # Get patterns for current table type, fallback to standard patterns if type not found
     table_patterns = clear_patterns.get(state.table_type, clear_patterns['dune_weaver'])
-    
+
     # Check for custom patterns first
     if state.custom_clear_from_out and clear_pattern_mode in ['clear_from_out', 'adaptive']:
         if clear_pattern_mode == 'adaptive':
             # For adaptive mode, use cached metadata to check first rho
             if path:
-                first_rho = get_first_rho_from_cache(path)
+                first_rho = get_first_rho_from_cache(path, cache_data)
                 if first_rho is not None and first_rho < 0.5:
                     # Use custom clear_from_out if set
                     custom_path = os.path.join('./patterns', state.custom_clear_from_out)
@@ -198,12 +529,12 @@ def get_clear_pattern_file(clear_pattern_mode, path=None):
             if os.path.exists(custom_path):
                 logger.debug(f"Using custom clear_from_out: {custom_path}")
                 return custom_path
-    
+
     if state.custom_clear_from_in and clear_pattern_mode in ['clear_from_in', 'adaptive']:
         if clear_pattern_mode == 'adaptive':
             # For adaptive mode, use cached metadata to check first rho
             if path:
-                first_rho = get_first_rho_from_cache(path)
+                first_rho = get_first_rho_from_cache(path, cache_data)
                 if first_rho is not None and first_rho >= 0.5:
                     # Use custom clear_from_in if set
                     custom_path = os.path.join('./patterns', state.custom_clear_from_in)
@@ -215,9 +546,9 @@ def get_clear_pattern_file(clear_pattern_mode, path=None):
             if os.path.exists(custom_path):
                 logger.debug(f"Using custom clear_from_in: {custom_path}")
                 return custom_path
-    
+
     logger.debug(f"Clear pattern mode: {clear_pattern_mode} for table type: {state.table_type}")
-    
+
     if clear_pattern_mode == "random":
         return random.choice(list(table_patterns.values()))
 
@@ -225,13 +556,13 @@ def get_clear_pattern_file(clear_pattern_mode, path=None):
         if not path:
             logger.warning("No path provided for adaptive clear pattern")
             return random.choice(list(table_patterns.values()))
-            
+
         # Use cached metadata to get first rho value
-        first_rho = get_first_rho_from_cache(path)
+        first_rho = get_first_rho_from_cache(path, cache_data)
         if first_rho is None:
             logger.warning("Could not determine first rho value for adaptive clear pattern")
             return random.choice(list(table_patterns.values()))
-            
+
         if first_rho < 0.5:
             return table_patterns['clear_from_out']
         else:
@@ -291,15 +622,15 @@ async def run_theta_rho_file(file_path, is_playlist=False):
             logger.info(f"Running normal pattern at initial speed {state.speed}")
 
         state.execution_progress = (0, total_coordinates, None, 0)
-        
-        # stop actions without resetting the playlist
-        stop_actions(clear_playlist=False)
+
+        # stop actions without resetting the playlist, and don't wait for lock (we already have it)
+        await stop_actions(clear_playlist=False, wait_for_lock=False)
 
         state.current_playing_file = file_path
         state.stop_requested = False
         logger.info(f"Starting pattern execution: {file_path}")
         logger.info(f"t: {state.current_theta}, r: {state.current_rho}")
-        reset_theta()
+        await reset_theta()
         
         start_time = time.time()
         if state.led_controller:
@@ -323,19 +654,48 @@ async def run_theta_rho_file(file_path, is_playlist=False):
                 
                 if state.skip_requested:
                     logger.info("Skipping pattern...")
-                    connection_manager.check_idle()
+                    await connection_manager.check_idle_async()
                     if state.led_controller:
                         effect_idle(state.led_controller)
                     break
 
-                # Wait for resume if paused
-                if state.pause_requested:
-                    logger.info("Execution paused...")
-                    if state.led_controller:
+                # Wait for resume if paused (manual or scheduled)
+                manual_pause = state.pause_requested
+                scheduled_pause = is_in_scheduled_pause_period()
+
+                if manual_pause or scheduled_pause:
+                    if manual_pause and scheduled_pause:
+                        logger.info("Execution paused (manual + scheduled pause active)...")
+                    elif manual_pause:
+                        logger.info("Execution paused (manual)...")
+                    else:
+                        logger.info("Execution paused (scheduled pause period)...")
+                        # Turn off WLED if scheduled pause and control_wled is enabled
+                        if state.scheduled_pause_control_wled and state.led_controller:
+                            logger.info("Turning off WLED lights during Still Sands period")
+                            state.led_controller.set_power(0)
+
+                    # Only show idle effect if NOT in scheduled pause with WLED control
+                    # (manual pause always shows idle effect)
+                    if state.led_controller and not (scheduled_pause and state.scheduled_pause_control_wled):
                         effect_idle(state.led_controller)
-                    await pause_event.wait()
+
+                    # Remember if we turned off WLED for scheduled pause
+                    wled_was_off_for_scheduled = scheduled_pause and state.scheduled_pause_control_wled and not manual_pause
+
+                    # Wait until both manual pause is released AND we're outside scheduled pause period
+                    while state.pause_requested or is_in_scheduled_pause_period():
+                        await asyncio.sleep(1)  # Check every second
+                        # Also wait for the pause event in case of manual pause
+                        if state.pause_requested:
+                            await pause_event.wait()
+
                     logger.info("Execution resumed...")
                     if state.led_controller:
+                        # Turn WLED back on if it was turned off for scheduled pause
+                        if wled_was_off_for_scheduled:
+                            logger.info("Turning WLED lights back on as Still Sands period ended")
+                            state.led_controller.set_power(1)
                         effect_playing(state.led_controller)
 
                 # Dynamically determine the speed for each movement
@@ -345,7 +705,7 @@ async def run_theta_rho_file(file_path, is_playlist=False):
                 else:
                     current_speed = state.speed
                     
-                move_polar(theta, rho, current_speed)
+                await move_polar(theta, rho, current_speed)
                 
                 # Update progress for all coordinates including the first one
                 pbar.update(1)
@@ -366,7 +726,7 @@ async def run_theta_rho_file(file_path, is_playlist=False):
             logger.error("Device is not connected. Stopping pattern execution.")
             return
             
-        connection_manager.check_idle()
+        await connection_manager.check_idle_async()
         
         # Set LED back to idle when pattern completes normally (not stopped early)
         if state.led_controller and not state.stop_requested:
@@ -404,26 +764,29 @@ async def run_theta_rho_files(file_paths, pause_time=0, clear_pattern=None, run_
         progress_update_task = asyncio.create_task(broadcast_progress())
     
     
-    if shuffle:
-        random.shuffle(file_paths)
-        logger.info("Playlist shuffled")
-
-
     if shuffle:
         random.shuffle(file_paths)
         logger.info("Playlist shuffled")
 
     try:
         while True:
+            # Load metadata cache once for all patterns (significant performance improvement)
+            # This avoids reading the cache file from disk for every pattern
+            cache_data = None
+            if clear_pattern and clear_pattern in ['adaptive', 'clear_from_in', 'clear_from_out']:
+                from modules.core import cache_manager
+                cache_data = cache_manager.load_metadata_cache()
+                logger.info(f"Loaded metadata cache for {len(cache_data.get('data', {}))} patterns")
+
             # Construct the complete pattern sequence
             pattern_sequence = []
             for path in file_paths:
                 # Add clear pattern if specified
                 if clear_pattern and clear_pattern != 'none':
-                    clear_file_path = get_clear_pattern_file(clear_pattern, path)
+                    clear_file_path = get_clear_pattern_file(clear_pattern, path, cache_data)
                     if clear_file_path:
                         pattern_sequence.append(clear_file_path)
-                
+
                 # Add main pattern
                 pattern_sequence.append(path)
 
@@ -509,8 +872,14 @@ async def run_theta_rho_files(file_paths, pause_time=0, clear_pattern=None, run_
         
         logger.info("All requested patterns completed (or stopped) and state cleared")
 
-def stop_actions(clear_playlist = True):
-    """Stop all current actions."""
+async def stop_actions(clear_playlist = True, wait_for_lock = True):
+    """Stop all current actions and wait for pattern to fully release.
+
+    Args:
+        clear_playlist: Whether to clear playlist state
+        wait_for_lock: Whether to wait for pattern_lock to be released. Set to False when
+                      called from within pattern execution to avoid deadlock.
+    """
     try:
         with state.pause_condition:
             state.pause_requested = False
@@ -518,85 +887,71 @@ def stop_actions(clear_playlist = True):
             state.current_playing_file = None
             state.execution_progress = None
             state.is_clearing = False
-            
+
             if clear_playlist:
                 # Clear playlist state
                 state.current_playlist = None
                 state.current_playlist_index = None
                 state.playlist_mode = None
-                
+
                 # Cancel progress update task if we're clearing the playlist
                 global progress_update_task
                 if progress_update_task and not progress_update_task.done():
                     progress_update_task.cancel()
-                
+
             state.pause_condition.notify_all()
-            connection_manager.update_machine_position()
+
+        # Wait for the pattern lock to be released before continuing
+        # This ensures that when stop_actions completes, the pattern has fully stopped
+        # Skip this if called from within pattern execution to avoid deadlock
+        if wait_for_lock and pattern_lock.locked():
+            logger.info("Waiting for pattern to fully stop...")
+            # Acquire and immediately release the lock to ensure the pattern has exited
+            async with pattern_lock:
+                logger.info("Pattern lock acquired - pattern has fully stopped")
+
+        # Call async function directly since we're in async context
+        await connection_manager.update_machine_position()
     except Exception as e:
         logger.error(f"Error during stop_actions: {e}")
         # Ensure we still update machine position even if there's an error
-        connection_manager.update_machine_position()
+        try:
+            await connection_manager.update_machine_position()
+        except Exception as update_err:
+            logger.error(f"Error updating machine position on error: {update_err}")
 
-def move_polar(theta, rho, speed=None):
+async def move_polar(theta, rho, speed=None):
     """
-    This functions take in a pair of theta rho coordinate, compute the distance to travel based on current theta, rho,
-    and translate the motion to gcode jog command and sent to grbl. 
-    
-    Since having similar steps_per_mm will make x and y axis moves at around the same speed, we have to scale the 
-    x_steps_per_mm and y_steps_per_mm so that they are roughly the same. Here's the range of motion:
-    
-    X axis (angular): 50mm = 1 revolution
-    Y axis (radial): 0 => 20mm = theta 0 (center) => 1 (perimeter)
-    
+    Queue a motion command to be executed in the dedicated motion control thread.
+    This makes motion control non-blocking for API endpoints.
+
     Args:
-        theta (_type_): _description_
-        rho (_type_): _description_
+        theta (float): Target theta coordinate
+        rho (float): Target rho coordinate
         speed (int, optional): Speed override. If None, uses state.speed
     """
-    # Adding soft limit to reduce hardware sound
-    # soft_limit_inner = 0.01
-    # if rho < soft_limit_inner:
-    #     rho = soft_limit_inner
-    
-    # soft_limit_outter = 0.015
-    # if rho > (1-soft_limit_outter):
-    #     rho = (1-soft_limit_outter)
-    
-    if state.table_type == 'dune_weaver_mini':
-        x_scaling_factor = 2
-        y_scaling_factor = 3.7
-    else:
-        x_scaling_factor = 2
-        y_scaling_factor = 5
-    
-    delta_theta = theta - state.current_theta
-    delta_rho = rho - state.current_rho
-    x_increment = delta_theta * 100 / (2 * pi * x_scaling_factor)  # Added -1 to reverse direction
-    y_increment = delta_rho * 100 / y_scaling_factor
-    
-    x_total_steps = state.x_steps_per_mm * (100/x_scaling_factor)
-    y_total_steps = state.y_steps_per_mm * (100/y_scaling_factor)
-        
-    offset = x_increment * (x_total_steps * x_scaling_factor / (state.gear_ratio * y_total_steps * y_scaling_factor))
+    # Ensure motion control thread is running
+    if not motion_controller.running:
+        motion_controller.start()
 
-    if state.table_type == 'dune_weaver_mini':
-        y_increment -= offset
-    else:
-        y_increment += offset
-    
-    new_x_abs = state.machine_x + x_increment
-    new_y_abs = state.machine_y + y_increment
-    
-    # Use provided speed or fall back to state.speed
-    actual_speed = speed if speed is not None else state.speed
-    
-    # dynamic_speed = compute_dynamic_speed(rho, max_speed=actual_speed)
-    
-    connection_manager.send_grbl_coordinates(round(new_x_abs, 3), round(new_y_abs,3), actual_speed)
-    state.current_theta = theta
-    state.current_rho = rho
-    state.machine_x = new_x_abs
-    state.machine_y = new_y_abs
+    # Create future for async/await pattern
+    loop = asyncio.get_event_loop()
+    future = loop.create_future()
+
+    # Create and queue motion command
+    command = MotionCommand(
+        command_type='move',
+        theta=theta,
+        rho=rho,
+        speed=speed,
+        future=future
+    )
+
+    motion_controller.command_queue.put(command)
+    logger.debug(f"Queued motion command: theta={theta}, rho={rho}, speed={speed}")
+
+    # Wait for command completion
+    await future
     
 def pause_execution():
     """Pause pattern execution using asyncio Event."""
@@ -612,10 +967,11 @@ def resume_execution():
     pause_event.set()  # Set the event to resume execution
     return True
     
-def reset_theta():
+async def reset_theta():
     logger.info('Resetting Theta')
     state.current_theta = state.current_theta % (2 * pi)
-    connection_manager.update_machine_position()
+    # Call async function directly since we're in async context
+    await connection_manager.update_machine_position()
 
 def set_speed(new_speed):
     state.speed = new_speed
@@ -625,7 +981,9 @@ def get_status():
     """Get the current status of pattern execution."""
     status = {
         "current_file": state.current_playing_file,
-        "is_paused": state.pause_requested,
+        "is_paused": state.pause_requested or is_in_scheduled_pause_period(),
+        "manual_pause": state.pause_requested,
+        "scheduled_pause": is_in_scheduled_pause_period(),
         "is_running": bool(state.current_playing_file and not state.stop_requested),
         "progress": None,
         "playlist": None,

+ 12 - 1
modules/core/state.py

@@ -56,10 +56,15 @@ class AppState:
         # auto_play mode settings
         self.auto_play_enabled = False
         self.auto_play_playlist = None  # Playlist to auto-play in auto_play mode
-        self.auto_play_run_mode = "loop"  # "single" or "loop" 
+        self.auto_play_run_mode = "loop"  # "single" or "loop"
         self.auto_play_pause_time = 5.0  # Pause between patterns in seconds
         self.auto_play_clear_pattern = "adaptive"  # Clear pattern option
         self.auto_play_shuffle = False  # Shuffle playlist
+
+        # Still Sands settings
+        self.scheduled_pause_enabled = False
+        self.scheduled_pause_time_slots = []  # List of time slot dictionaries
+        self.scheduled_pause_control_wled = False  # Turn off WLED during pause periods
         
         self.load()
 
@@ -192,6 +197,9 @@ class AppState:
             "auto_play_pause_time": self.auto_play_pause_time,
             "auto_play_clear_pattern": self.auto_play_clear_pattern,
             "auto_play_shuffle": self.auto_play_shuffle,
+            "scheduled_pause_enabled": self.scheduled_pause_enabled,
+            "scheduled_pause_time_slots": self.scheduled_pause_time_slots,
+            "scheduled_pause_control_wled": self.scheduled_pause_control_wled,
         }
 
     def from_dict(self, data):
@@ -228,6 +236,9 @@ class AppState:
         self.auto_play_pause_time = data.get("auto_play_pause_time", 5.0)
         self.auto_play_clear_pattern = data.get("auto_play_clear_pattern", "adaptive")
         self.auto_play_shuffle = data.get("auto_play_shuffle", False)
+        self.scheduled_pause_enabled = data.get("scheduled_pause_enabled", False)
+        self.scheduled_pause_time_slots = data.get("scheduled_pause_time_slots", [])
+        self.scheduled_pause_control_wled = data.get("scheduled_pause_control_wled", False)
 
     def save(self):
         """Save the current state to a JSON file."""

+ 6 - 5
modules/core/version_manager.py

@@ -20,20 +20,21 @@ class VersionManager:
         self.github_api_url = f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}"
         self._current_version = None
         
-    def get_current_version(self) -> str:
-        """Read current version from VERSION file"""
+    async def get_current_version(self) -> str:
+        """Read current version from VERSION file (async)"""
         if self._current_version is None:
             try:
                 version_file = Path(__file__).parent.parent.parent / "VERSION"
                 if version_file.exists():
-                    self._current_version = version_file.read_text().strip()
+                    self._current_version = await asyncio.to_thread(version_file.read_text)
+                    self._current_version = self._current_version.strip()
                 else:
                     logger.warning("VERSION file not found, using default version")
                     self._current_version = "1.0.0"
             except Exception as e:
                 logger.error(f"Error reading VERSION file: {e}")
                 self._current_version = "1.0.0"
-        
+
         return self._current_version
     
     async def get_latest_release(self) -> Dict[str, any]:
@@ -94,7 +95,7 @@ class VersionManager:
     
     async def get_version_info(self) -> Dict[str, any]:
         """Get complete version information"""
-        current = self.get_current_version()
+        current = await self.get_current_version()
         latest_release = await self.get_latest_release()
         
         if latest_release:

+ 55 - 0
static/css/material-icons.css

@@ -0,0 +1,55 @@
+/* Material Icons - Local Font Files */
+
+@font-face {
+  font-family: 'Material Icons';
+  font-style: normal;
+  font-weight: 400;
+  src: url('/static/fonts/material-icons/MaterialIcons-Regular.woff2') format('woff2'),
+       url('/static/fonts/material-icons/MaterialIcons-Regular.woff') format('woff');
+  font-display: swap;
+}
+
+@font-face {
+  font-family: 'Material Icons Outlined';
+  font-style: normal;
+  font-weight: 400;
+  src: url('/static/fonts/material-icons/MaterialIconsOutlined-Regular.woff2') format('woff2'),
+       url('/static/fonts/material-icons/MaterialIconsOutlined-Regular.woff') format('woff');
+  font-display: swap;
+}
+
+.material-icons {
+  font-family: 'Material Icons';
+  font-weight: normal;
+  font-style: normal;
+  font-size: 24px;
+  display: inline-block;
+  line-height: 1;
+  text-transform: none;
+  letter-spacing: normal;
+  word-wrap: normal;
+  white-space: nowrap;
+  direction: ltr;
+  -webkit-font-smoothing: antialiased;
+  text-rendering: optimizeLegibility;
+  -moz-osx-font-smoothing: grayscale;
+  font-feature-settings: "liga";
+}
+
+.material-icons-outlined {
+  font-family: 'Material Icons Outlined';
+  font-weight: normal;
+  font-style: normal;
+  font-size: 24px;
+  display: inline-block;
+  line-height: 1;
+  text-transform: none;
+  letter-spacing: normal;
+  word-wrap: normal;
+  white-space: nowrap;
+  direction: ltr;
+  -webkit-font-smoothing: antialiased;
+  text-rendering: optimizeLegibility;
+  -moz-osx-font-smoothing: grayscale;
+  font-feature-settings: "liga";
+}

+ 0 - 17
static/css/tailwind-input.css

@@ -3,23 +3,6 @@
 @tailwind utilities;
 
 /* Custom styles from base.html */
-.material-icons {
-  font-family: "Material Icons";
-  font-weight: normal;
-  font-style: normal;
-  font-size: 24px;
-  display: inline-block;
-  line-height: 1;
-  text-transform: none;
-  letter-spacing: normal;
-  word-wrap: normal;
-  white-space: nowrap;
-  direction: ltr;
-  -webkit-font-smoothing: antialiased;
-  text-rendering: optimizeLegibility;
-  -moz-osx-font-smoothing: grayscale;
-  font-feature-settings: "liga";
-}
 
 /* Dark mode styles */
 .dark {

+ 11 - 0
static/fonts/material-icons/MaterialIcons-Regular.woff

@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<html lang=en>
+  <meta charset=utf-8>
+  <meta name=viewport content="initial-scale=1, minimum-scale=1, width=device-width">
+  <title>Error 404 (Not Found)!!1</title>
+  <style>
+    *{margin:0;padding:0}html,code{font:15px/22px arial,sans-serif}html{background:#fff;color:#222;padding:15px}body{margin:7% auto 0;max-width:390px;min-height:180px;padding:30px 0 15px}* > body{background:url(//www.google.com/images/errors/robot.png) 100% 5px no-repeat;padding-right:205px}p{margin:11px 0 22px;overflow:hidden}ins{color:#777;text-decoration:none}a img{border:0}@media screen and (max-width:772px){body{background:none;margin-top:0;max-width:none;padding-right:0}}#logo{background:url(//www.google.com/images/branding/googlelogo/1x/googlelogo_color_150x54dp.png) no-repeat;margin-left:-5px}@media only screen and (min-resolution:192dpi){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat 0% 0%/100% 100%;-moz-border-image:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) 0}}@media only screen and (-webkit-min-device-pixel-ratio:2){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat;-webkit-background-size:100% 100%}}#logo{display:inline-block;height:54px;width:150px}
+  </style>
+  <a href=//www.google.com/><span id=logo aria-label=Google></span></a>
+  <p><b>404.</b> <ins>That’s an error.</ins>
+  <p>The requested URL <code>/s/materialicons/v140/flUhRq6tzZclQEJ-Vdg-IuiaDsNc.woff</code> was not found on this server.  <ins>That’s all we know.</ins>

BIN
static/fonts/material-icons/MaterialIcons-Regular.woff2


+ 11 - 0
static/fonts/material-icons/MaterialIconsOutlined-Regular.woff

@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<html lang=en>
+  <meta charset=utf-8>
+  <meta name=viewport content="initial-scale=1, minimum-scale=1, width=device-width">
+  <title>Error 404 (Not Found)!!1</title>
+  <style>
+    *{margin:0;padding:0}html,code{font:15px/22px arial,sans-serif}html{background:#fff;color:#222;padding:15px}body{margin:7% auto 0;max-width:390px;min-height:180px;padding:30px 0 15px}* > body{background:url(//www.google.com/images/errors/robot.png) 100% 5px no-repeat;padding-right:205px}p{margin:11px 0 22px;overflow:hidden}ins{color:#777;text-decoration:none}a img{border:0}@media screen and (max-width:772px){body{background:none;margin-top:0;max-width:none;padding-right:0}}#logo{background:url(//www.google.com/images/branding/googlelogo/1x/googlelogo_color_150x54dp.png) no-repeat;margin-left:-5px}@media only screen and (min-resolution:192dpi){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat 0% 0%/100% 100%;-moz-border-image:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) 0}}@media only screen and (-webkit-min-device-pixel-ratio:2){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat;-webkit-background-size:100% 100%}}#logo{display:inline-block;height:54px;width:150px}
+  </style>
+  <a href=//www.google.com/><span id=logo aria-label=Google></span></a>
+  <p><b>404.</b> <ins>That’s an error.</ins>
+  <p>The requested URL <code>/s/materialiconsoutlined/v108/gok-H7zzDkdnRel8-DQ6KAXJ69wP1tGnf4ZGhUcel5euIg.woff</code> was not found on this server.  <ins>That’s all we know.</ins>

BIN
static/fonts/material-icons/MaterialIconsOutlined-Regular.woff2


+ 75 - 3
static/js/base.js

@@ -1,5 +1,78 @@
 // Player status bar functionality - Updated to fix logMessage errors
 
+// Pattern files cache for improved performance with localStorage persistence
+const PATTERN_CACHE_KEY = 'dune_weaver_pattern_files_cache';
+const PATTERN_CACHE_EXPIRY = 30 * 60 * 1000; // 30 minutes cache (longer since it persists)
+
+// Function to get cached pattern files or fetch fresh data
+async function getCachedPatternFiles(forceRefresh = false) {
+    const now = Date.now();
+
+    // Try to load from localStorage first
+    if (!forceRefresh) {
+        try {
+            const cachedData = localStorage.getItem(PATTERN_CACHE_KEY);
+            if (cachedData) {
+                const { files, timestamp } = JSON.parse(cachedData);
+                if (files && timestamp && (now - timestamp) < PATTERN_CACHE_EXPIRY) {
+                    console.log('Using cached pattern files from localStorage');
+                    return files;
+                }
+            }
+        } catch (error) {
+            console.warn('Error reading pattern files cache from localStorage:', error);
+        }
+    }
+
+    try {
+        console.log('Fetching fresh pattern files from server');
+        const response = await fetch('/list_theta_rho_files');
+        if (!response.ok) {
+            throw new Error(`Failed to fetch pattern files: ${response.status}`);
+        }
+
+        const files = await response.json();
+
+        // Store in localStorage
+        try {
+            const cacheData = { files, timestamp: now };
+            localStorage.setItem(PATTERN_CACHE_KEY, JSON.stringify(cacheData));
+        } catch (error) {
+            console.warn('Error storing pattern files cache in localStorage:', error);
+        }
+
+        return files;
+    } catch (error) {
+        console.error('Error fetching pattern files:', error);
+
+        // Try to return any cached data as fallback, even if expired
+        try {
+            const cachedData = localStorage.getItem(PATTERN_CACHE_KEY);
+            if (cachedData) {
+                const { files } = JSON.parse(cachedData);
+                if (files) {
+                    console.log('Using expired cached pattern files as fallback');
+                    return files;
+                }
+            }
+        } catch (fallbackError) {
+            console.warn('Error reading fallback cache:', fallbackError);
+        }
+
+        return [];
+    }
+}
+
+// Function to invalidate pattern files cache
+function invalidatePatternFilesCache() {
+    try {
+        localStorage.removeItem(PATTERN_CACHE_KEY);
+        console.log('Pattern files cache invalidated');
+    } catch (error) {
+        console.warn('Error invalidating pattern files cache:', error);
+    }
+}
+
 // Helper function to normalize file paths for cross-platform compatibility
 function normalizeFilePath(filePath) {
     if (!filePath) return '';
@@ -905,9 +978,8 @@ function initializeCacheAllPrompt() {
 
 async function startCacheAllProcess() {
     try {
-        // Get list of patterns
-        const response = await fetch('/list_theta_rho_files');
-        const patterns = await response.json();
+        // Get list of patterns using cached function
+        const patterns = await getCachedPatternFiles();
         
         if (!patterns || patterns.length === 0) {
             throw new Error('No patterns found');

+ 5 - 4
static/js/image2sand-init.js

@@ -166,7 +166,10 @@ async function saveConvertedPattern() {
             const fileInput = document.getElementById('upload_file');
             const finalFileName = 'custom_patterns/' + thrFileName;
             logMessage(`Image converted and saved as ${finalFileName}`, LOG_TYPE.SUCCESS);
-            
+
+            // Invalidate pattern files cache to include new file
+            invalidatePatternFilesCache();
+
             // Close the converter dialog
             closeImageConverter();
 
@@ -386,9 +389,7 @@ async function loadThetaRhoFiles() {
 
     try {
         // Fetch the file list from your backend
-        const response = await fetch('/list_theta_rho_files');
-        if (!response.ok) throw new Error('Failed to fetch file list');
-        const files = await response.json();
+        const files = await getCachedPatternFiles();
 
         // Populate the list
         files.forEach(filename => {

+ 53 - 16
static/js/index.js

@@ -647,8 +647,7 @@ async function loadPatterns(forceRefresh = false) {
         
         // First load basic patterns list for fast initial display
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
-        const basicResponse = await fetch('/list_theta_rho_files');
-        const basicPatterns = await basicResponse.json();
+        const basicPatterns = await getCachedPatternFiles(forceRefresh);
         const thrPatterns = basicPatterns.filter(file => file.endsWith('.thr'));
         logMessage(`Received ${thrPatterns.length} basic patterns from server`, LOG_TYPE.INFO);
         
@@ -674,13 +673,25 @@ async function loadPatterns(forceRefresh = false) {
                     metadataAbortController.abort();
                 }
                 
-                // Create new AbortController for this request
+                // Create new AbortController for this request with timeout
                 metadataAbortController = new AbortController();
-                
+
+                // Set a timeout to prevent hanging on slow Pi systems
+                const timeoutId = setTimeout(() => {
+                    metadataAbortController.abort();
+                    logMessage('Metadata loading timed out after 30 seconds', LOG_TYPE.WARNING);
+                }, 30000); // 30 second timeout
+
                 logMessage('Loading enhanced metadata...', LOG_TYPE.DEBUG);
                 const metadataResponse = await fetch('/list_theta_rho_files_with_metadata', {
-                    signal: metadataAbortController.signal
+                    signal: metadataAbortController.signal,
+                    headers: {
+                        'Cache-Control': 'no-cache'
+                    }
                 });
+
+                // Clear timeout if request succeeds
+                clearTimeout(timeoutId);
                 const patternsWithMetadata = await metadataResponse.json();
                 
                 // Store enhanced patterns data
@@ -698,11 +709,31 @@ async function loadPatterns(forceRefresh = false) {
                 metadataAbortController = null;
             } catch (metadataError) {
                 if (metadataError.name === 'AbortError') {
-                    logMessage('Metadata loading cancelled (navigating away)', LOG_TYPE.DEBUG);
+                    logMessage('Metadata loading cancelled or timed out', LOG_TYPE.WARNING);
                 } else {
                     logMessage(`Failed to load enhanced metadata: ${metadataError.message}`, LOG_TYPE.WARNING);
                 }
-                // No fallback needed - basic patterns already displayed
+
+                // Create basic metadata from file list to populate categories
+                if (allPatterns && allPatterns.length > 0) {
+                    allPatternsWithMetadata = allPatterns.map(pattern => {
+                        const pathParts = pattern.split('/');
+                        const category = pathParts.length > 1 ? pathParts.slice(0, -1).join('/') : 'root';
+                        const fileName = pathParts[pathParts.length - 1].replace('.thr', '');
+                        return {
+                            path: pattern,
+                            name: fileName,
+                            category: category,
+                            date_modified: 0,
+                            coordinates_count: 0
+                        };
+                    });
+
+                    // Update category filter with basic data
+                    updateBrowseCategoryFilter();
+                    logMessage('Using basic category data (metadata unavailable)', LOG_TYPE.INFO);
+                }
+
                 metadataAbortController = null;
             }
         }, 100); // Small delay to let initial render complete
@@ -1107,7 +1138,10 @@ function setupPreviewPanelEvents(pattern) {
                 if (result.success) {
                     logMessage(`Pattern deleted successfully: ${pattern}`, LOG_TYPE.SUCCESS);
                     showStatusMessage(`Pattern "${pattern.split('/').pop()}" deleted successfully`);
-                    
+
+                    // Invalidate pattern files cache
+                    invalidatePatternFilesCache();
+
                     // Clear from in-memory caches
                     previewCache.delete(pattern);
                     imageCache.delete(pattern);
@@ -1143,8 +1177,8 @@ function setupPreviewPanelEvents(pattern) {
                     document.getElementById('patternPreviewTitle').textContent = 'Pattern Details';
                     document.getElementById('firstCoordinate').textContent = '(0, 0)';
                     document.getElementById('lastCoordinate').textContent = '(0, 0)';
-                    // Refresh the pattern list (force refresh since pattern was deleted)
-                    await loadPatterns(true);
+                    // Refresh the pattern list (cache already invalidated above)
+                    await loadPatterns();
                 } else {
                     throw new Error(result.error || 'Unknown error');
                 }
@@ -1642,11 +1676,14 @@ function setupUploadEventHandlers() {
                     const result = await response.json();
                     if (result.success) {
                         successCount++;
-                        
+
+                        // Invalidate pattern files cache to include new file
+                        invalidatePatternFilesCache();
+
                         // Clear any existing cache for this pattern to ensure fresh loading
                         const newPatternPath = `custom_patterns/${file.name}`;
                         previewCache.delete(newPatternPath);
-                        
+
                         logMessage(`Successfully uploaded: ${file.name}`, LOG_TYPE.SUCCESS);
                     } else {
                         failCount++;
@@ -1672,8 +1709,8 @@ function setupUploadEventHandlers() {
                 // Add a small delay to allow backend preview generation to complete
                 await new Promise(resolve => setTimeout(resolve, 1000));
                 
-                // Refresh the pattern list (force refresh since new patterns were uploaded)
-                await loadPatterns(true);
+                // Refresh the pattern list (cache already invalidated above)
+                await loadPatterns();
                 
                 // Trigger preview loading for newly uploaded patterns
                 setTimeout(() => {
@@ -1715,8 +1752,8 @@ function setupUploadEventHandlers() {
                 const patternToDelete = confirmBtn.dataset.pattern;
                 if (patternToDelete) {
                     await deletePattern(patternToDelete);
-                    // Force refresh after deletion
-                    await loadPatterns(true);
+                    // Refresh after deletion (cache invalidated in deletePattern)
+                    await loadPatterns();
                 }
                 deleteModal.classList.add('hidden');
             });

+ 1 - 6
static/js/playlists.js

@@ -1041,12 +1041,7 @@ async function loadAvailablePatterns(forceRefresh = false) {
     try {
         // First load basic patterns list for fast initial display
         logMessage('Fetching basic patterns list from server', LOG_TYPE.DEBUG);
-        const basicResponse = await fetch('/list_theta_rho_files');
-        if (!basicResponse.ok) {
-            throw new Error('Failed to load available patterns');
-        }
-        
-        const patterns = await basicResponse.json();
+        const patterns = await getCachedPatternFiles(forceRefresh);
         const thrPatterns = patterns.filter(file => file.endsWith('.thr'));
         availablePatterns = [...thrPatterns];
         filteredPatterns = [...availablePatterns];

+ 347 - 3
static/js/settings.js

@@ -171,7 +171,7 @@ document.addEventListener('DOMContentLoaded', async () => {
         fetch('/list_serial_ports').then(response => response.json()).catch(() => []),
         
         // Load available pattern files for clear pattern selection
-        fetch('/list_theta_rho_files').then(response => response.json()).catch(() => []),
+        getCachedPatternFiles().catch(() => []),
         
         // Load current custom clear patterns
         fetch('/api/custom_clear_patterns').then(response => response.json()).catch(() => ({ custom_clear_from_in: null, custom_clear_from_out: null })),
@@ -180,8 +180,11 @@ document.addEventListener('DOMContentLoaded', async () => {
         fetch('/api/clear_pattern_speed').then(response => response.json()).catch(() => ({ clear_pattern_speed: 200 })),
         
         // Load current app name
-        fetch('/api/app-name').then(response => response.json()).catch(() => ({ app_name: 'Dune Weaver' }))
-    ]).then(([statusData, wledData, updateData, ports, patterns, clearPatterns, clearSpeedData, appNameData]) => {
+        fetch('/api/app-name').then(response => response.json()).catch(() => ({ app_name: 'Dune Weaver' })),
+
+        // Load Still Sands settings
+        fetch('/api/scheduled-pause').then(response => response.json()).catch(() => ({ enabled: false, time_slots: [] }))
+    ]).then(([statusData, wledData, updateData, ports, patterns, clearPatterns, clearSpeedData, appNameData, scheduledPauseData]) => {
         // Update connection status
         setCachedConnectionStatus(statusData);
         updateConnectionUI(statusData);
@@ -299,6 +302,9 @@ document.addEventListener('DOMContentLoaded', async () => {
         if (appNameInput && appNameData.app_name) {
             appNameInput.value = appNameData.app_name;
         }
+
+        // Store Still Sands data for later initialization
+        window.initialStillSandsData = scheduledPauseData;
     }).catch(error => {
         logMessage(`Error initializing settings page: ${error.message}`, LOG_TYPE.ERROR);
     });
@@ -1020,4 +1026,342 @@ async function initializeauto_playMode() {
 // Initialize auto_play mode when DOM is ready
 document.addEventListener('DOMContentLoaded', function() {
     initializeauto_playMode();
+    initializeStillSandsMode();
 });
+
+// Still Sands Mode Functions
+async function initializeStillSandsMode() {
+    logMessage('Initializing Still Sands mode', LOG_TYPE.INFO);
+
+    const stillSandsToggle = document.getElementById('scheduledPauseToggle');
+    const stillSandsSettings = document.getElementById('scheduledPauseSettings');
+    const addTimeSlotButton = document.getElementById('addTimeSlotButton');
+    const saveStillSandsButton = document.getElementById('savePauseSettings');
+    const timeSlotsContainer = document.getElementById('timeSlotsContainer');
+    const wledControlToggle = document.getElementById('stillSandsWledControl');
+
+    // Check if elements exist
+    if (!stillSandsToggle || !stillSandsSettings || !addTimeSlotButton || !saveStillSandsButton || !timeSlotsContainer) {
+        logMessage('Still Sands elements not found, skipping initialization', LOG_TYPE.WARNING);
+        logMessage(`Found elements: toggle=${!!stillSandsToggle}, settings=${!!stillSandsSettings}, addBtn=${!!addTimeSlotButton}, saveBtn=${!!saveStillSandsButton}, container=${!!timeSlotsContainer}`, LOG_TYPE.WARNING);
+        return;
+    }
+
+    logMessage('All Still Sands elements found successfully', LOG_TYPE.INFO);
+
+    // Track time slots
+    let timeSlots = [];
+    let slotIdCounter = 0;
+
+    // Load current Still Sands settings from initial data
+    try {
+        // Use the data loaded during page initialization, fallback to API if not available
+        let data;
+        if (window.initialStillSandsData) {
+            data = window.initialStillSandsData;
+            // Clear the global variable after use
+            delete window.initialStillSandsData;
+        } else {
+            // Fallback to API call if initial data not available
+            const response = await fetch('/api/scheduled-pause');
+            data = await response.json();
+        }
+
+        stillSandsToggle.checked = data.enabled || false;
+        if (data.enabled) {
+            stillSandsSettings.style.display = 'block';
+        }
+
+        // Load WLED control setting
+        if (wledControlToggle) {
+            wledControlToggle.checked = data.control_wled || false;
+        }
+
+        // Load existing time slots
+        timeSlots = data.time_slots || [];
+
+        // Assign IDs to loaded slots BEFORE rendering
+        if (timeSlots.length > 0) {
+            slotIdCounter = 0;
+            timeSlots.forEach(slot => {
+                slot.id = ++slotIdCounter;
+            });
+        }
+
+        renderTimeSlots();
+    } catch (error) {
+        logMessage(`Error loading Still Sands settings: ${error.message}`, LOG_TYPE.ERROR);
+        // Initialize with empty settings if load fails
+        timeSlots = [];
+        renderTimeSlots();
+    }
+
+    // Function to validate time format (HH:MM)
+    function isValidTime(timeString) {
+        const timeRegex = /^([01]?[0-9]|2[0-3]):[0-5][0-9]$/;
+        return timeRegex.test(timeString);
+    }
+
+    // Function to create a new time slot element
+    function createTimeSlotElement(slot) {
+        const slotDiv = document.createElement('div');
+        slotDiv.className = 'time-slot-item';
+        slotDiv.dataset.slotId = slot.id;
+
+        slotDiv.innerHTML = `
+            <div class="flex items-center gap-3">
+                <div class="flex-1 grid grid-cols-1 md:grid-cols-2 gap-3">
+                    <div class="flex flex-col gap-1">
+                        <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">Start Time</label>
+                        <input
+                            type="time"
+                            class="start-time form-input resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal leading-normal transition-colors"
+                            value="${slot.start_time || ''}"
+                            required
+                        />
+                    </div>
+                    <div class="flex flex-col gap-1">
+                        <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">End Time</label>
+                        <input
+                            type="time"
+                            class="end-time form-input resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal leading-normal transition-colors"
+                            value="${slot.end_time || ''}"
+                            required
+                        />
+                    </div>
+                </div>
+                <div class="flex flex-col gap-1">
+                    <label class="text-slate-700 dark:text-slate-300 text-xs font-medium">Days</label>
+                    <select class="days-select form-select resize-none overflow-hidden rounded-lg text-slate-900 focus:outline-0 focus:ring-2 focus:ring-sky-500 border border-slate-300 bg-white focus:border-sky-500 h-9 px-3 text-sm font-normal transition-colors">
+                        <option value="daily" ${slot.days === 'daily' ? 'selected' : ''}>Daily</option>
+                        <option value="weekdays" ${slot.days === 'weekdays' ? 'selected' : ''}>Weekdays</option>
+                        <option value="weekends" ${slot.days === 'weekends' ? 'selected' : ''}>Weekends</option>
+                        <option value="custom" ${slot.days === 'custom' ? 'selected' : ''}>Custom</option>
+                    </select>
+                </div>
+                <button
+                    type="button"
+                    class="remove-slot-btn flex items-center justify-center w-9 h-9 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded-lg transition-colors"
+                    title="Remove time slot"
+                >
+                    <span class="material-icons text-base">delete</span>
+                </button>
+            </div>
+            <div class="custom-days-container mt-2" style="display: ${slot.days === 'custom' ? 'block' : 'none'};">
+                <label class="text-slate-700 dark:text-slate-300 text-xs font-medium mb-1 block">Select Days</label>
+                <div class="flex flex-wrap gap-2">
+                    ${['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'].map(day => `
+                        <label class="flex items-center gap-1 text-xs">
+                            <input
+                                type="checkbox"
+                                name="custom-days-${slot.id}"
+                                value="${day}"
+                                ${slot.custom_days && slot.custom_days.includes(day) ? 'checked' : ''}
+                                class="rounded border-slate-300 text-sky-600 focus:ring-sky-500"
+                            />
+                            <span class="text-slate-700 dark:text-slate-300 capitalize">${day.substring(0, 3)}</span>
+                        </label>
+                    `).join('')}
+                </div>
+            </div>
+        `;
+
+        // Add event listeners for this slot
+        const startTimeInput = slotDiv.querySelector('.start-time');
+        const endTimeInput = slotDiv.querySelector('.end-time');
+        const daysSelect = slotDiv.querySelector('.days-select');
+        const customDaysContainer = slotDiv.querySelector('.custom-days-container');
+        const removeButton = slotDiv.querySelector('.remove-slot-btn');
+
+        // Show/hide custom days based on selection
+        daysSelect.addEventListener('change', () => {
+            customDaysContainer.style.display = daysSelect.value === 'custom' ? 'block' : 'none';
+            updateTimeSlot(slot.id);
+        });
+
+        // Update slot data when inputs change
+        startTimeInput.addEventListener('change', () => updateTimeSlot(slot.id));
+        endTimeInput.addEventListener('change', () => updateTimeSlot(slot.id));
+
+        // Handle custom day checkboxes
+        customDaysContainer.addEventListener('change', () => updateTimeSlot(slot.id));
+
+        // Remove slot button
+        removeButton.addEventListener('click', () => {
+            removeTimeSlot(slot.id);
+        });
+
+        return slotDiv;
+    }
+
+    // Function to render all time slots
+    function renderTimeSlots() {
+        timeSlotsContainer.innerHTML = '';
+
+        if (timeSlots.length === 0) {
+            timeSlotsContainer.innerHTML = `
+                <div class="text-center py-8 text-slate-500 dark:text-slate-400">
+                    <span class="material-icons text-4xl mb-2 block">schedule</span>
+                    <p>No time slots configured</p>
+                    <p class="text-xs mt-1">Click "Add Time Slot" to create a pause schedule</p>
+                </div>
+            `;
+            return;
+        }
+
+        timeSlots.forEach(slot => {
+            const slotElement = createTimeSlotElement(slot);
+            timeSlotsContainer.appendChild(slotElement);
+        });
+    }
+
+    // Function to add a new time slot
+    function addTimeSlot() {
+        const newSlot = {
+            id: ++slotIdCounter,
+            start_time: '22:00',
+            end_time: '08:00',
+            days: 'daily',
+            custom_days: []
+        };
+
+        timeSlots.push(newSlot);
+        renderTimeSlots();
+    }
+
+    // Function to remove a time slot
+    function removeTimeSlot(slotId) {
+        timeSlots = timeSlots.filter(slot => slot.id !== slotId);
+        renderTimeSlots();
+    }
+
+    // Function to update a time slot's data
+    function updateTimeSlot(slotId) {
+        const slotElement = timeSlotsContainer.querySelector(`[data-slot-id="${slotId}"]`);
+        if (!slotElement) return;
+
+        const slot = timeSlots.find(s => s.id === slotId);
+        if (!slot) return;
+
+        // Update slot data from inputs
+        slot.start_time = slotElement.querySelector('.start-time').value;
+        slot.end_time = slotElement.querySelector('.end-time').value;
+        slot.days = slotElement.querySelector('.days-select').value;
+
+        // Update custom days if applicable
+        if (slot.days === 'custom') {
+            const checkedDays = Array.from(slotElement.querySelectorAll(`input[name="custom-days-${slotId}"]:checked`))
+                .map(cb => cb.value);
+            slot.custom_days = checkedDays;
+        } else {
+            slot.custom_days = [];
+        }
+    }
+
+    // Function to validate all time slots
+    function validateTimeSlots() {
+        const errors = [];
+
+        timeSlots.forEach((slot, index) => {
+            if (!slot.start_time || !isValidTime(slot.start_time)) {
+                errors.push(`Time slot ${index + 1}: Invalid start time`);
+            }
+            if (!slot.end_time || !isValidTime(slot.end_time)) {
+                errors.push(`Time slot ${index + 1}: Invalid end time`);
+            }
+            if (slot.days === 'custom' && (!slot.custom_days || slot.custom_days.length === 0)) {
+                errors.push(`Time slot ${index + 1}: Please select at least one day for custom schedule`);
+            }
+        });
+
+        return errors;
+    }
+
+    // Function to save settings
+    async function saveStillSandsSettings() {
+        // Update all slots from current form values
+        timeSlots.forEach(slot => updateTimeSlot(slot.id));
+
+        // Validate time slots
+        const validationErrors = validateTimeSlots();
+        if (validationErrors.length > 0) {
+            showStatusMessage(`Validation errors: ${validationErrors.join(', ')}`, 'error');
+            return;
+        }
+
+        // Update button UI to show loading state
+        const originalButtonHTML = saveStillSandsButton.innerHTML;
+        saveStillSandsButton.disabled = true;
+        saveStillSandsButton.innerHTML = '<span class="material-icons text-lg animate-spin">refresh</span><span class="truncate">Saving...</span>';
+
+        try {
+            const response = await fetch('/api/scheduled-pause', {
+                method: 'POST',
+                headers: { 'Content-Type': 'application/json' },
+                body: JSON.stringify({
+                    enabled: stillSandsToggle.checked,
+                    control_wled: wledControlToggle ? wledControlToggle.checked : false,
+                    time_slots: timeSlots.map(slot => ({
+                        start_time: slot.start_time,
+                        end_time: slot.end_time,
+                        days: slot.days,
+                        custom_days: slot.custom_days
+                    }))
+                })
+            });
+
+            if (!response.ok) {
+                const errorData = await response.json();
+                throw new Error(errorData.detail || 'Failed to save Still Sands settings');
+            }
+
+            // Show success state temporarily
+            saveStillSandsButton.innerHTML = '<span class="material-icons text-lg">check</span><span class="truncate">Saved!</span>';
+            showStatusMessage('Still Sands settings saved successfully', 'success');
+
+            // Restore button after 2 seconds
+            setTimeout(() => {
+                saveStillSandsButton.innerHTML = originalButtonHTML;
+                saveStillSandsButton.disabled = false;
+            }, 2000);
+        } catch (error) {
+            logMessage(`Error saving Still Sands settings: ${error.message}`, LOG_TYPE.ERROR);
+            showStatusMessage(`Failed to save settings: ${error.message}`, 'error');
+
+            // Restore button immediately on error
+            saveStillSandsButton.innerHTML = originalButtonHTML;
+            saveStillSandsButton.disabled = false;
+        }
+    }
+
+    // Note: Slot IDs are now assigned during initialization above, before first render
+
+    // Event listeners
+    stillSandsToggle.addEventListener('change', async () => {
+        logMessage(`Still Sands toggle changed: ${stillSandsToggle.checked}`, LOG_TYPE.INFO);
+        stillSandsSettings.style.display = stillSandsToggle.checked ? 'block' : 'none';
+        logMessage(`Settings display set to: ${stillSandsSettings.style.display}`, LOG_TYPE.INFO);
+
+        // Auto-save when toggle changes
+        try {
+            await saveStillSandsSettings();
+            const statusText = stillSandsToggle.checked ? 'enabled' : 'disabled';
+            showStatusMessage(`Still Sands ${statusText} successfully`, 'success');
+        } catch (error) {
+            logMessage(`Error saving Still Sands toggle: ${error.message}`, LOG_TYPE.ERROR);
+            showStatusMessage(`Failed to save Still Sands setting: ${error.message}`, 'error');
+        }
+    });
+
+    addTimeSlotButton.addEventListener('click', addTimeSlot);
+    saveStillSandsButton.addEventListener('click', saveStillSandsSettings);
+
+    // Add listener for WLED control toggle
+    if (wledControlToggle) {
+        wledControlToggle.addEventListener('change', async () => {
+            logMessage(`WLED control toggle changed: ${wledControlToggle.checked}`, LOG_TYPE.INFO);
+            // Auto-save when WLED control changes
+            await saveStillSandsSettings();
+        });
+    }
+}

+ 1 - 25
templates/base.html

@@ -25,32 +25,8 @@
     <link rel="icon" type="image/x-icon" href="/static/favicon.ico">
     <link rel="manifest" href="/static/site.webmanifest?v=2">
     <link rel="stylesheet" href="/static/css/tailwind.css">
-    <link
-      href="https://fonts.googleapis.com/icon?family=Material+Icons"
-      rel="stylesheet"
-    />
-    <link
-      href="https://fonts.googleapis.com/icon?family=Material+Icons+Outlined"
-      rel="stylesheet"
-    />
+    <link rel="stylesheet" href="/static/css/material-icons.css">
     <style>
-      .material-icons {
-        font-family: "Material Icons";
-        font-weight: normal;
-        font-style: normal;
-        font-size: 24px;
-        display: inline-block;
-        line-height: 1;
-        text-transform: none;
-        letter-spacing: normal;
-        word-wrap: normal;
-        white-space: nowrap;
-        direction: ltr;
-        -webkit-font-smoothing: antialiased;
-        text-rendering: optimizeLegibility;
-        -moz-osx-font-smoothing: grayscale;
-        font-feature-settings: "liga";
-      }
 
       /* Mobile navigation styles */
       @media (max-width: 640px) {

+ 209 - 0
templates/settings.html

@@ -112,6 +112,128 @@ endblock %}
   background-color: #92400e;
   color: #fef3c7;
 }
+
+/* Toggle switch styles */
+.switch {
+  position: relative;
+  display: inline-block;
+  width: 60px;
+  height: 34px;
+}
+
+.switch input {
+  opacity: 0;
+  width: 0;
+  height: 0;
+}
+
+.slider {
+  position: absolute;
+  cursor: pointer;
+  top: 0;
+  left: 0;
+  right: 0;
+  bottom: 0;
+  background-color: #ccc;
+  transition: .4s;
+}
+
+.slider:before {
+  position: absolute;
+  content: "";
+  height: 26px;
+  width: 26px;
+  left: 4px;
+  bottom: 4px;
+  background-color: white;
+  transition: .4s;
+}
+
+input:checked + .slider {
+  background-color: #0c7ff2;
+}
+
+input:focus + .slider {
+  box-shadow: 0 0 1px #0c7ff2;
+}
+
+input:checked + .slider:before {
+  transform: translateX(26px);
+}
+
+.slider.round {
+  border-radius: 34px;
+}
+
+.slider.round:before {
+  border-radius: 50%;
+}
+
+/* Dark mode for switches */
+.dark .slider {
+  background-color: #404040;
+}
+
+.dark input:checked + .slider {
+  background-color: #0c7ff2;
+}
+
+/* Spin animation for loading states */
+@keyframes spin {
+  from {
+    transform: rotate(0deg);
+  }
+  to {
+    transform: rotate(360deg);
+  }
+}
+
+.animate-spin {
+  animation: spin 1s linear infinite;
+}
+
+/* Time slot specific styles */
+.time-slot-item {
+  background-color: #f8fafc;
+  border: 1px solid #e2e8f0;
+  border-radius: 8px;
+  padding: 16px;
+  transition: all 0.15s;
+}
+
+.dark .time-slot-item {
+  background-color: #1e293b;
+  border-color: #475569;
+}
+
+.time-slot-item:hover {
+  border-color: #cbd5e1;
+}
+
+.dark .time-slot-item:hover {
+  border-color: #64748b;
+}
+
+/* Info box dark mode */
+.dark .bg-blue-50 {
+  background-color: #1e3a8a;
+}
+
+.dark .border-blue-200 {
+  border-color: #1e40af;
+}
+
+.dark .text-blue-600 {
+  color: #60a5fa;
+}
+
+.dark .text-blue-800 {
+  color: #dbeafe;
+}
+
+.dark .text-blue-700 {
+  color: #bfdbfe;
+}
 {% endblock %}
 
 {% block content %}
@@ -488,6 +610,93 @@ endblock %}
       </div>
     </div>
   </section>
+  <section class="bg-white rounded-xl shadow-sm overflow-hidden">
+    <h2
+      class="text-slate-800 text-xl sm:text-2xl font-semibold leading-tight tracking-[-0.01em] px-6 py-4 border-b border-slate-200"
+    >
+      Still Sands
+    </h2>
+    <div class="px-6 py-5 space-y-6">
+      <div class="flex items-center justify-between">
+        <div class="flex-1">
+          <h3 class="text-slate-700 text-base font-medium leading-normal">Enable Still Sands</h3>
+          <p class="text-xs text-slate-500 mt-1">
+            Automatically bring the sands to rest during specified time periods.
+          </p>
+        </div>
+        <label class="switch">
+          <input type="checkbox" id="scheduledPauseToggle">
+          <span class="slider round"></span>
+        </label>
+      </div>
+
+      <div id="scheduledPauseSettings" class="space-y-4" style="display: none;">
+        <!-- WLED Control Option -->
+        <div class="bg-amber-50 rounded-lg p-4 border border-amber-200">
+          <div class="flex items-center justify-between">
+            <div class="flex-1">
+              <h4 class="text-slate-800 text-sm font-medium flex items-center gap-2">
+                <span class="material-icons text-amber-600 text-base">lightbulb</span>
+                Control WLED Lights
+              </h4>
+              <p class="text-xs text-slate-600 mt-1">
+                Turn off WLED lights during still periods for complete rest
+              </p>
+            </div>
+            <label class="switch">
+              <input type="checkbox" id="stillSandsWledControl">
+              <span class="slider round"></span>
+            </label>
+          </div>
+        </div>
+
+        <div class="bg-slate-50 rounded-lg p-4 space-y-4">
+          <div class="flex items-center justify-between">
+            <h4 class="text-slate-800 text-base font-semibold">Still Periods</h4>
+            <button
+              id="addTimeSlotButton"
+              class="flex items-center justify-center gap-2 cursor-pointer rounded-lg h-9 px-3 bg-sky-600 hover:bg-sky-700 text-white text-xs font-medium leading-normal tracking-[0.015em] transition-colors"
+            >
+              <span class="material-icons text-base">add</span>
+              <span>Add Still Period</span>
+            </button>
+          </div>
+          <p class="text-sm text-slate-600">
+            Define time periods when the sands should rest in stillness. Patterns will resume automatically when still periods end.
+          </p>
+
+          <div id="timeSlotsContainer" class="space-y-3">
+            <!-- Time slots will be dynamically added here -->
+          </div>
+
+          <div class="text-xs text-slate-500 bg-blue-50 border border-blue-200 rounded-lg p-3">
+            <div class="flex items-start gap-2">
+              <span class="material-icons text-blue-600 text-base">info</span>
+              <div>
+                <p class="font-medium text-blue-800">Important Notes:</p>
+                <ul class="mt-1 space-y-1 text-blue-700">
+                  <li>• Times are based on your system's local time zone</li>
+                  <li>• Currently running patterns will pause immediately when entering a still period</li>
+                  <li>• Patterns will resume automatically when exiting a still period</li>
+                  <li>• Still periods that span midnight (e.g., 22:00 to 06:00) are supported</li>
+                </ul>
+              </div>
+            </div>
+          </div>
+        </div>
+
+        <div class="flex justify-end">
+          <button
+            id="savePauseSettings"
+            class="flex items-center justify-center gap-2 min-w-[140px] cursor-pointer rounded-lg h-10 px-4 bg-sky-600 hover:bg-sky-700 text-white text-sm font-medium leading-normal tracking-[0.015em] transition-colors"
+          >
+            <span class="material-icons text-lg">save</span>
+            <span class="truncate">Save Still Sands</span>
+          </button>
+        </div>
+      </div>
+    </div>
+  </section>
   <section class="bg-white rounded-xl shadow-sm overflow-hidden">
     <h2
       class="text-slate-800 text-xl sm:text-2xl font-semibold leading-tight tracking-[-0.01em] px-6 py-4 border-b border-slate-200"