Explorar o código

add cache progress to fe

tuanchris hai 5 meses
pai
achega
1606057398
Modificáronse 100 ficheiros con 1141 adicións e 897 borrados
  1. 92 0
      CLAUDE.md
  2. 1 1
      Dockerfile
  3. 2 2
      README.md
  4. 849 814
      main.py
  5. 197 80
      modules/core/cache_manager.py
  6. BIN=BIN
      patterns/cached_images_bk/0-0-rotating-hearts.thr.webp
  7. BIN=BIN
      patterns/cached_images_bk/0-1-hubcap.thr.webp
  8. BIN=BIN
      patterns/cached_images_bk/03 pnuttrellis (E) (N N).thr.webp
  9. BIN=BIN
      patterns/cached_images_bk/1-0-open-your-heart-and-mind.thr.webp
  10. BIN=BIN
      patterns/cached_images_bk/1-0-spirograph-medallion.thr.webp
  11. BIN=BIN
      patterns/cached_images_bk/1-1-alien-sludge.thr.webp
  12. BIN=BIN
      patterns/cached_images_bk/1-1-ibex.thr.webp
  13. BIN=BIN
      patterns/cached_images_bk/1-1-pizza-slice-swirl.thr.webp
  14. BIN=BIN
      patterns/cached_images_bk/10_sided_polygon.thr.webp
  15. BIN=BIN
      patterns/cached_images_bk/13b Battlesbury (C C).thr.webp
  16. BIN=BIN
      patterns/cached_images_bk/19 Itsyourmove (E) (C NW).thr.webp
  17. BIN=BIN
      patterns/cached_images_bk/33 Labyrinth1 (S S)_preview.thr.webp
  18. BIN=BIN
      patterns/cached_images_bk/6_sided_polygon.thr.webp
  19. BIN=BIN
      patterns/cached_images_bk/AcklingDykeDorset 6-4-2018.thr.webp
  20. BIN=BIN
      patterns/cached_images_bk/BattlesburyCampWilts 7-5-2017.thr.webp
  21. BIN=BIN
      patterns/cached_images_bk/BucklandDownDorset 5-26-2018.thr.webp
  22. BIN=BIN
      patterns/cached_images_bk/Butterfly.thr.webp
  23. BIN=BIN
      patterns/cached_images_bk/ChartresLabyrinthe.thr.webp
  24. BIN=BIN
      patterns/cached_images_bk/Cheesefoot Head 8-9-2012.thr.webp
  25. BIN=BIN
      patterns/cached_images_bk/CleyHill 7-18-2017.thr.webp
  26. BIN=BIN
      patterns/cached_images_bk/Fractal.thr.webp
  27. BIN=BIN
      patterns/cached_images_bk/HackpenHill 6-9-2018.thr.webp
  28. BIN=BIN
      patterns/cached_images_bk/Hampton-on-Lucy 8-8-2015.thr.webp
  29. BIN=BIN
      patterns/cached_images_bk/HamptonHillsCross 2-18-2015.thr.webp
  30. BIN=BIN
      patterns/cached_images_bk/HexagonAlley.thr.webp
  31. BIN=BIN
      patterns/cached_images_bk/Hosta.thr.webp
  32. BIN=BIN
      patterns/cached_images_bk/KeysleyDown 6-10-2018.thr.webp
  33. BIN=BIN
      patterns/cached_images_bk/LiddingtonCastle 6-24-2001.thr.webp
  34. BIN=BIN
      patterns/cached_images_bk/LongwoodWarrenHants 7-10-2018.thr.webp
  35. BIN=BIN
      patterns/cached_images_bk/MilkHill 6-2-2009.thr.webp
  36. BIN=BIN
      patterns/cached_images_bk/MilkHill 7-8-2011.thr.webp
  37. BIN=BIN
      patterns/cached_images_bk/Muncombe Hill 7-14-2018.thr.webp
  38. BIN=BIN
      patterns/cached_images_bk/OareWiltshire 6-21-2010.thr.webp
  39. BIN=BIN
      patterns/cached_images_bk/Petalar.thr.webp
  40. BIN=BIN
      patterns/cached_images_bk/SierpinskiTriangle (1).thr.webp
  41. BIN=BIN
      patterns/cached_images_bk/SimpleRadiance.thr.webp
  42. BIN=BIN
      patterns/cached_images_bk/SineVsBezier2.thr.webp
  43. BIN=BIN
      patterns/cached_images_bk/SixPennyHandley 4-6-2009.thr.webp
  44. BIN=BIN
      patterns/cached_images_bk/Spiral6.thr.webp
  45. BIN=BIN
      patterns/cached_images_bk/SpiralBezier (1).thr.webp
  46. BIN=BIN
      patterns/cached_images_bk/SpiralGyrations-2.thr.webp
  47. BIN=BIN
      patterns/cached_images_bk/Sponge.thr.webp
  48. BIN=BIN
      patterns/cached_images_bk/StarryNight.thr.webp
  49. BIN=BIN
      patterns/cached_images_bk/Swirly1.thr.webp
  50. BIN=BIN
      patterns/cached_images_bk/SwoopyRadiance.thr.webp
  51. BIN=BIN
      patterns/cached_images_bk/TriangleSwoop.thr.webp
  52. BIN=BIN
      patterns/cached_images_bk/WiltonWindmill 5-22-2010.thr.webp
  53. BIN=BIN
      patterns/cached_images_bk/WinterbourneBassett 6-1-1997.thr.webp
  54. BIN=BIN
      patterns/cached_images_bk/WinterbourneStokeDown 7-18-2018.thr.webp
  55. BIN=BIN
      patterns/cached_images_bk/atwood-quote.thr.webp
  56. BIN=BIN
      patterns/cached_images_bk/beach.thr.webp
  57. BIN=BIN
      patterns/cached_images_bk/bear.thr.webp
  58. BIN=BIN
      patterns/cached_images_bk/chartres_labyrinthe.thr.webp
  59. BIN=BIN
      patterns/cached_images_bk/circle-packer-rings.thr.webp
  60. BIN=BIN
      patterns/cached_images_bk/circle_normalized.thr.webp
  61. BIN=BIN
      patterns/cached_images_bk/circle_packer.thr.webp
  62. BIN=BIN
      patterns/cached_images_bk/clear_from_in.thr.webp
  63. BIN=BIN
      patterns/cached_images_bk/clear_from_in_Ultra.thr.webp
  64. BIN=BIN
      patterns/cached_images_bk/clear_from_in_mini.thr.webp
  65. BIN=BIN
      patterns/cached_images_bk/clear_from_in_pro.thr.webp
  66. BIN=BIN
      patterns/cached_images_bk/clear_from_out.thr.webp
  67. BIN=BIN
      patterns/cached_images_bk/clear_from_out_Ultra.thr.webp
  68. BIN=BIN
      patterns/cached_images_bk/clear_from_out_mini.thr.webp
  69. BIN=BIN
      patterns/cached_images_bk/clear_from_out_pro.thr.webp
  70. BIN=BIN
      patterns/cached_images_bk/clear_sideway.thr.webp
  71. BIN=BIN
      patterns/cached_images_bk/clear_sideway_mini.thr.webp
  72. BIN=BIN
      patterns/cached_images_bk/clear_sideway_pro.thr.webp
  73. BIN=BIN
      patterns/cached_images_bk/custom_patterns/AcklingDykeDorset 6-4-2018.thr.webp
  74. BIN=BIN
      patterns/cached_images_bk/custom_patterns/BucklandDownDorset 5-26-2018.thr.webp
  75. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Butterfly.thr.webp
  76. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Can+Bryant.thr.webp
  77. BIN=BIN
      patterns/cached_images_bk/custom_patterns/ChartresLabyrinthe.thr.webp
  78. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Cheesefoot Head 8-9-2012.thr.webp
  79. BIN=BIN
      patterns/cached_images_bk/custom_patterns/CleyHill 7-18-2017.thr.webp
  80. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Fractal.thr.webp
  81. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Fractal2 (1).thr.webp
  82. BIN=BIN
      patterns/cached_images_bk/custom_patterns/HackpenHill 6-9-2018.thr.webp
  83. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Hampton-on-Lucy 8-8-2015.thr.webp
  84. BIN=BIN
      patterns/cached_images_bk/custom_patterns/HamptonHillsCross 2-18-2015.thr.webp
  85. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Hosta.thr.webp
  86. BIN=BIN
      patterns/cached_images_bk/custom_patterns/KeysleyDown 6-10-2018.thr.webp
  87. BIN=BIN
      patterns/cached_images_bk/custom_patterns/LiddingtonCastle 6-24-2001.thr.webp
  88. BIN=BIN
      patterns/cached_images_bk/custom_patterns/LongwoodWarrenHants 7-10-2018.thr.webp
  89. BIN=BIN
      patterns/cached_images_bk/custom_patterns/MilkHill 6-2-2009.thr.webp
  90. BIN=BIN
      patterns/cached_images_bk/custom_patterns/MilkHill 7-8-2011.thr.webp
  91. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Muncombe Hill 7-14-2018.thr.webp
  92. BIN=BIN
      patterns/cached_images_bk/custom_patterns/OareWiltshire 6-21-2010.thr.webp
  93. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Shubis_Christmas.thr.webp
  94. BIN=BIN
      patterns/cached_images_bk/custom_patterns/SimpleRadiance.thr.webp
  95. BIN=BIN
      patterns/cached_images_bk/custom_patterns/SineVsBezier2.thr.webp
  96. BIN=BIN
      patterns/cached_images_bk/custom_patterns/SixPennyHandley 4-6-2009.thr.webp
  97. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Spiral5.thr.webp
  98. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Sponge.thr.webp
  99. BIN=BIN
      patterns/cached_images_bk/custom_patterns/StarryNight.thr.webp
  100. BIN=BIN
      patterns/cached_images_bk/custom_patterns/Swirly1.thr.webp

+ 92 - 0
CLAUDE.md

@@ -0,0 +1,92 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Development Commands
+
+### CSS/Frontend Development
+- `npm run dev` or `npm run watch-css` - Watch mode for Tailwind CSS development 
+- `npm run build-css` - Build and minify Tailwind CSS for production
+
+### Python Application
+- `python main.py` - Start the FastAPI server on port 8080
+- The application uses uvicorn internally and runs on 0.0.0.0:8080
+
+## Architecture Overview
+
+Dune Weaver is a web-controlled kinetic sand table system with both hardware and software components:
+
+### Core Application Structure
+- **FastAPI backend** (`main.py`) - Main web server with REST APIs and WebSocket support
+- **Modular design** with organized modules:
+  - `modules/connection/` - Serial and WebSocket connection management for hardware
+  - `modules/core/` - Core business logic (patterns, playlists, state management, caching)
+  - `modules/led/` - WLED integration for lighting effects  
+  - `modules/mqtt/` - MQTT integration capabilities
+  - `modules/update/` - Software update management
+
+### Coordinate System
+The sand table uses **polar coordinates (θ, ρ)** instead of traditional Cartesian:
+- **Theta (θ)**: Angular position in degrees (0-360°)
+- **Rho (ρ)**: Radial distance from center (0.0 at center, 1.0 at perimeter)
+
+### Pattern System
+- **Pattern files**: `.thr` files in `patterns/` directory containing theta-rho coordinate pairs
+- **Pattern format**: Each line contains `theta rho` values separated by space, comments start with `#`
+- **Cached previews**: WebP images generated in `patterns/cached_images/` for UI display
+- **Custom patterns**: User uploads stored in `patterns/custom_patterns/`
+
+### Hardware Communication
+- Supports both **Serial** and **WebSocket** connections to hardware controllers
+- **ESP32** or **Arduino** boards control stepper motors
+- **Homing system**: Crash-homing method without limit switches
+- **Hardware coupling**: Angular and radial axes are mechanically coupled, requiring software compensation
+
+### State Management
+- Global state managed in `modules/core/state.py`
+- Persistent state saved to `state.json`
+- Real-time status updates via WebSocket (`/ws/status`)
+
+### Key Features
+- **Playlist system**: Sequential pattern execution with timing control
+- **WLED integration**: Synchronized lighting effects during pattern execution
+- **Image caching**: Automatic preview generation for all patterns
+- **Pattern execution control**: Play, pause, resume, stop, skip functionality
+- **MQTT support**: External system integration
+- **Software updates**: Git-based update system
+
+## Important Implementation Notes
+
+### Cursor Rules Integration
+The project follows FastAPI best practices from `.cursorrules`:
+- Use functional programming patterns where possible
+- Implement proper error handling with early returns
+- Use Pydantic models for request/response validation
+- Prefer async operations for I/O-bound tasks
+- Follow proper dependency injection patterns
+
+### Hardware Constraints
+- Angular axis movement affects radial position due to mechanical coupling
+- Software compensates for this coupling automatically
+- No physical limit switches - relies on crash-homing for position reference
+
+### Threading and Concurrency
+- Uses asyncio for concurrent operations
+- Pattern execution runs in background tasks
+- Thread-safe connection management with locks
+- WebSocket connections for real-time status updates
+
+## Testing and Development
+
+### Running the Application
+1. Install Python dependencies: `pip install -r requirements.txt`
+2. Install Node dependencies: `npm install`
+3. Build CSS: `npm run build-css`
+4. Start server: `python main.py`
+
+### File Structure Conventions
+- Pattern files in `patterns/` (can have subdirectories)
+- Static assets in `static/` (CSS, JS, images)
+- HTML templates in `templates/`
+- Configuration files in root directory
+- Firmware configurations in `firmware/` subdirectories for different hardware versions

+ 1 - 1
Dockerfile

@@ -19,4 +19,4 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
 COPY . .
 
 EXPOSE 8080
-CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]
+CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080"]

+ 2 - 2
README.md

@@ -4,7 +4,7 @@
 
 ![Dune Weaver Gif](./static/IMG_7404.gif)
 
-Dune Weaver is a project for a mesmerizing, motorized sand table that draws intricate patterns in sand using a steel ball moved by a magnet. This project combines hardware and software, leveraging an Arduino for hardware control and a Python/Flask-based web interface for interaction. 
+Dune Weaver is a project for a mesmerizing, motorized sand table that draws intricate patterns in sand using a steel ball moved by a magnet. This project combines hardware and software, leveraging an Arduino for hardware control and a Python/FastAPI-based web interface for interaction. 
 
 ### **Check out the wiki [here](https://github.com/tuanchris/dune-weaver/wiki/Wiring) for more details.**
 
@@ -94,7 +94,7 @@ The project exposes RESTful APIs for various actions. Here are some key endpoint
 
 ```
 dune-weaver/
-├── app.py              # Flask app and core logic
+├── main.py             # FastAPI app and core logic
 ├── patterns/           # Directory for theta-rho files
 ├── static/             # Static files (CSS, JS)
 ├── templates/          # HTML templates for the web interface

+ 849 - 814
app.py → main.py

@@ -1,814 +1,849 @@
-from fastapi import FastAPI, UploadFile, File, HTTPException, BackgroundTasks, WebSocket, WebSocketDisconnect, Request
-from fastapi.responses import JSONResponse, FileResponse, Response
-from fastapi.staticfiles import StaticFiles
-from fastapi.templating import Jinja2Templates
-from pydantic import BaseModel
-from typing import List, Optional, Tuple, Dict, Any, Union
-import atexit
-import os
-import logging
-from datetime import datetime, time
-from modules.connection import connection_manager
-from modules.core import pattern_manager
-from modules.core.pattern_manager import parse_theta_rho_file, THETA_RHO_DIR
-from modules.core import playlist_manager
-from modules.update import update_manager
-from modules.core.state import state
-from modules import mqtt
-import signal
-import sys
-import asyncio
-from contextlib import asynccontextmanager
-from modules.led.led_controller import LEDController, effect_idle
-import math
-from modules.core.cache_manager import generate_all_image_previews, get_cache_path, generate_image_preview, get_pattern_metadata
-import json
-import base64
-import time
-import argparse
-
-# Get log level from environment variable, default to INFO
-log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
-log_level = getattr(logging, log_level_str, logging.INFO)
-
-logging.basicConfig(
-    level=log_level,
-    format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
-    handlers=[
-        logging.StreamHandler(),
-    ]
-)
-
-logger = logging.getLogger(__name__)
-
-@asynccontextmanager
-async def lifespan(app: FastAPI):
-    # Startup
-    logger.info("Starting Dune Weaver application...")
-    # Register signal handlers
-    signal.signal(signal.SIGINT, signal_handler)
-    signal.signal(signal.SIGTERM, signal_handler)
-    
-    try:
-        connection_manager.connect_device()
-    except Exception as e:
-        logger.warning(f"Failed to auto-connect to serial port: {str(e)}")
-        
-    try:
-        mqtt_handler = mqtt.init_mqtt()
-    except Exception as e:
-        logger.warning(f"Failed to initialize MQTT: {str(e)}")
-    
-    # Generate metadata cache and image previews for all patterns
-    try:
-        logger.info("Starting cache generation...")
-        from modules.core.cache_manager import generate_metadata_cache, generate_all_image_previews
-        await generate_metadata_cache()
-        await generate_all_image_previews()
-        logger.info("Cache generation completed successfully")
-    except Exception as e:
-        logger.warning(f"Failed to generate cache: {str(e)}")
-
-    yield  # This separates startup from shutdown code
-
-
-app = FastAPI(lifespan=lifespan)
-templates = Jinja2Templates(directory="templates")
-app.mount("/static", StaticFiles(directory="static"), name="static")
-
-# Pydantic models for request/response validation
-class ConnectRequest(BaseModel):
-    port: Optional[str] = None
-
-class CoordinateRequest(BaseModel):
-    theta: float
-    rho: float
-
-class PlaylistRequest(BaseModel):
-    playlist_name: str
-    files: List[str] = []
-    pause_time: float = 0
-    clear_pattern: Optional[str] = None
-    run_mode: str = "single"
-    shuffle: bool = False
-
-class PlaylistRunRequest(BaseModel):
-    playlist_name: str
-    pause_time: Optional[float] = 0
-    clear_pattern: Optional[str] = None
-    run_mode: Optional[str] = "single"
-    shuffle: Optional[bool] = False
-    start_time: Optional[str] = None
-    end_time: Optional[str] = None
-
-class SpeedRequest(BaseModel):
-    speed: float
-
-class WLEDRequest(BaseModel):
-    wled_ip: Optional[str] = None
-
-class DeletePlaylistRequest(BaseModel):
-    playlist_name: str
-
-class ThetaRhoRequest(BaseModel):
-    file_name: str
-    pre_execution: Optional[str] = "none"
-
-class GetCoordinatesRequest(BaseModel):
-    file_name: str
-
-# Store active WebSocket connections
-active_status_connections = set()
-
-@app.websocket("/ws/status")
-async def websocket_status_endpoint(websocket: WebSocket):
-    await websocket.accept()
-    active_status_connections.add(websocket)
-    try:
-        while True:
-            status = pattern_manager.get_status()
-            try:
-                await websocket.send_json({
-                    "type": "status_update",
-                    "data": status
-                })
-            except RuntimeError as e:
-                if "close message has been sent" in str(e):
-                    break
-                raise
-            await asyncio.sleep(1)
-    except WebSocketDisconnect:
-        pass
-    finally:
-        active_status_connections.discard(websocket)
-        try:
-            await websocket.close()
-        except RuntimeError:
-            pass
-
-async def broadcast_status_update(status: dict):
-    """Broadcast status update to all connected clients."""
-    disconnected = set()
-    for websocket in active_status_connections:
-        try:
-            await websocket.send_json({
-                "type": "status_update",
-                "data": status
-            })
-        except WebSocketDisconnect:
-            disconnected.add(websocket)
-        except RuntimeError:
-            disconnected.add(websocket)
-    
-    active_status_connections.difference_update(disconnected)
-
-# FastAPI routes
-@app.get("/")
-async def index(request: Request):
-    return templates.TemplateResponse("index.html", {"request": request})
-
-@app.get("/settings")
-async def settings(request: Request):
-    return templates.TemplateResponse("settings.html", {"request": request})
-
-@app.get("/list_serial_ports")
-async def list_ports():
-    logger.debug("Listing available serial ports")
-    return connection_manager.list_serial_ports()
-
-@app.post("/connect")
-async def connect(request: ConnectRequest):
-    if not request.port:
-        state.conn = connection_manager.WebSocketConnection('ws://fluidnc.local:81')
-        connection_manager.device_init()
-        logger.info('Successfully connected to websocket ws://fluidnc.local:81')
-        return {"success": True}
-
-    try:
-        state.conn = connection_manager.SerialConnection(request.port)
-        connection_manager.device_init()
-        logger.info(f'Successfully connected to serial port {request.port}')
-        return {"success": True}
-    except Exception as e:
-        logger.error(f'Failed to connect to serial port {request.port}: {str(e)}')
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/disconnect")
-async def disconnect():
-    try:
-        state.conn.close()
-        logger.info('Successfully disconnected from serial port')
-        return {"success": True}
-    except Exception as e:
-        logger.error(f'Failed to disconnect serial: {str(e)}')
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/restart_connection")
-async def restart(request: ConnectRequest):
-    if not request.port:
-        logger.warning("Restart serial request received without port")
-        raise HTTPException(status_code=400, detail="No port provided")
-
-    try:
-        logger.info(f"Restarting connection on port {request.port}")
-        connection_manager.restart_connection()
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to restart serial on port {request.port}: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.get("/list_theta_rho_files")
-async def list_theta_rho_files():
-    logger.debug("Listing theta-rho files")
-    files = pattern_manager.list_theta_rho_files()
-    return sorted(files)
-
-@app.post("/upload_theta_rho")
-async def upload_theta_rho(file: UploadFile = File(...)):
-    """Upload a theta-rho file."""
-    try:
-        # Save the file
-        # Ensure custom_patterns directory exists
-        custom_patterns_dir = os.path.join(pattern_manager.THETA_RHO_DIR, "custom_patterns")
-        os.makedirs(custom_patterns_dir, exist_ok=True)
-        
-        file_path_in_patterns_dir = os.path.join("custom_patterns", file.filename)
-        full_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path_in_patterns_dir)
-        
-        # Save the uploaded file
-        with open(full_file_path, "wb") as f:
-            f.write(await file.read())
-        
-        logger.info(f"File {file.filename} saved successfully")
-        
-        # Generate image preview for the new file with retry logic
-        max_retries = 3
-        for attempt in range(max_retries):
-            try:
-                logger.info(f"Generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}/{max_retries})")
-                success = await generate_image_preview(file_path_in_patterns_dir)
-                if success:
-                    logger.info(f"Preview generated successfully for {file_path_in_patterns_dir}")
-                    break
-                else:
-                    logger.warning(f"Preview generation failed for {file_path_in_patterns_dir} (attempt {attempt + 1})")
-                    if attempt < max_retries - 1:
-                        await asyncio.sleep(0.5)  # Small delay before retry
-            except Exception as e:
-                logger.error(f"Error generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}): {str(e)}")
-                if attempt < max_retries - 1:
-                    await asyncio.sleep(0.5)  # Small delay before retry
-        
-        return {"success": True, "message": f"File {file.filename} uploaded successfully"}
-    except Exception as e:
-        logger.error(f"Error uploading file: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/get_theta_rho_coordinates")
-async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
-    """Get theta-rho coordinates for animated preview."""
-    try:
-        # Handle file paths that may include the patterns directory prefix
-        file_name = request.file_name
-        if file_name.startswith('./patterns/'):
-            file_name = file_name[11:]  # Remove './patterns/' prefix
-        elif file_name.startswith('patterns/'):
-            file_name = file_name[9:]   # Remove 'patterns/' prefix
-            
-        file_path = os.path.join(THETA_RHO_DIR, file_name)
-        
-        if not os.path.exists(file_path):
-            raise HTTPException(status_code=404, detail=f"File {file_name} not found")
-        
-        # Parse the theta-rho file
-        coordinates = parse_theta_rho_file(file_path)
-        
-        if not coordinates:
-            raise HTTPException(status_code=400, detail="No valid coordinates found in file")
-        
-        return {
-            "success": True,
-            "coordinates": coordinates,
-            "total_points": len(coordinates)
-        }
-        
-    except Exception as e:
-        logger.error(f"Error getting coordinates for {request.file_name}: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/run_theta_rho")
-async def run_theta_rho(request: ThetaRhoRequest, background_tasks: BackgroundTasks):
-    if not request.file_name:
-        logger.warning('Run theta-rho request received without file name')
-        raise HTTPException(status_code=400, detail="No file name provided")
-    
-    file_path = None
-    if 'clear' in request.file_name:
-        logger.info(f'Clear pattern file: {request.file_name.split(".")[0]}')
-        file_path = pattern_manager.get_clear_pattern_file(request.file_name.split('.')[0])
-        logger.info(f'Clear pattern file: {file_path}')
-    if not file_path:
-        file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
-    if not os.path.exists(file_path):
-        logger.error(f'Theta-rho file not found: {file_path}')
-        raise HTTPException(status_code=404, detail="File not found")
-
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to run a pattern without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-        
-        if pattern_manager.pattern_lock.locked():
-            logger.warning("Attempted to run a pattern while another is already running")
-            raise HTTPException(status_code=409, detail="Another pattern is already running")
-            
-        files_to_run = [file_path]
-        logger.info(f'Running theta-rho file: {request.file_name} with pre_execution={request.pre_execution}')
-        
-        # Only include clear_pattern if it's not "none"
-        kwargs = {}
-        if request.pre_execution != "none":
-            kwargs['clear_pattern'] = request.pre_execution
-        
-        # Pass arguments properly
-        background_tasks.add_task(
-            pattern_manager.run_theta_rho_files,
-            files_to_run,  # First positional argument
-            **kwargs  # Spread keyword arguments
-        )
-        return {"success": True}
-    except HTTPException as http_exc:
-        logger.error(f'Failed to run theta-rho file {request.file_name}: {http_exc.detail}')
-        raise http_exc
-    except Exception as e:
-        logger.error(f'Failed to run theta-rho file {request.file_name}: {str(e)}')
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/stop_execution")
-async def stop_execution():
-    if not (state.conn.is_connected() if state.conn else False):
-        logger.warning("Attempted to stop without a connection")
-        raise HTTPException(status_code=400, detail="Connection not established")
-    pattern_manager.stop_actions()
-    return {"success": True}
-
-@app.post("/send_home")
-async def send_home():
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to move to home without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-        connection_manager.home()
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to send home command: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/run_theta_rho_file/{file_name}")
-async def run_specific_theta_rho_file(file_name: str):
-    file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
-    if not os.path.exists(file_path):
-        raise HTTPException(status_code=404, detail="File not found")
-        
-    if not (state.conn.is_connected() if state.conn else False):
-        logger.warning("Attempted to run a pattern without a connection")
-        raise HTTPException(status_code=400, detail="Connection not established")
-
-    pattern_manager.run_theta_rho_file(file_path)
-    return {"success": True}
-
-class DeleteFileRequest(BaseModel):
-    file_name: str
-
-@app.post("/delete_theta_rho_file")
-async def delete_theta_rho_file(request: DeleteFileRequest):
-    if not request.file_name:
-        logger.warning("Delete theta-rho file request received without filename")
-        raise HTTPException(status_code=400, detail="No file name provided")
-
-    file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
-    if not os.path.exists(file_path):
-        logger.error(f"Attempted to delete non-existent file: {file_path}")
-        raise HTTPException(status_code=404, detail="File not found")
-
-    try:
-        os.remove(file_path)
-        logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to delete theta-rho file {request.file_name}: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/move_to_center")
-async def move_to_center():
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to move to center without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-
-        logger.info("Moving device to center position")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 0)
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to move to center: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/move_to_perimeter")
-async def move_to_perimeter():
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to move to perimeter without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-        pattern_manager.reset_theta()
-        pattern_manager.move_polar(0, 1)
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to move to perimeter: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/preview_thr")
-async def preview_thr(request: DeleteFileRequest):
-    if not request.file_name:
-        logger.warning("Preview theta-rho request received without filename")
-        raise HTTPException(status_code=400, detail="No file name provided")
-
-    # Construct the full path to the pattern file to check existence
-    pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
-    if not os.path.exists(pattern_file_path):
-        logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
-        raise HTTPException(status_code=404, detail="Pattern file not found")
-
-    try:
-        cache_path = get_cache_path(request.file_name)
-        
-        if not os.path.exists(cache_path):
-            logger.info(f"Cache miss for {request.file_name}. Generating preview...")
-            # Attempt to generate the preview if it's missing
-            success = await generate_image_preview(request.file_name)
-            if not success or not os.path.exists(cache_path):
-                logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
-                raise HTTPException(status_code=500, detail="Failed to generate preview image.")
-
-        # Try to get coordinates from metadata cache first
-        metadata = get_pattern_metadata(request.file_name)
-        if metadata:
-            first_coord_obj = metadata.get('first_coordinate')
-            last_coord_obj = metadata.get('last_coordinate')
-        else:
-            # Fallback to parsing file if metadata not cached (shouldn't happen after initial cache)
-            logger.debug(f"Metadata cache miss for {request.file_name}, parsing file")
-            coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
-            first_coord = coordinates[0] if coordinates else None
-            last_coord = coordinates[-1] if coordinates else None
-            
-            # Format coordinates as objects with x and y properties
-            first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
-            last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
-
-        # Return JSON with preview URL and coordinates
-        # URL encode the file_name for the preview URL
-        encoded_filename = request.file_name.replace('/', '--')
-        return {
-            "preview_url": f"/preview/{encoded_filename}",
-            "first_coordinate": first_coord_obj,
-            "last_coordinate": last_coord_obj
-        }
-
-    except HTTPException:
-        raise
-    except Exception as e:
-        logger.error(f"Failed to generate or serve preview for {request.file_name}: {str(e)}")
-        raise HTTPException(status_code=500, detail=f"Failed to serve preview image: {str(e)}")
-
-@app.get("/preview/{encoded_filename}")
-async def serve_preview(encoded_filename: str):
-    """Serve a preview image for a pattern file."""
-    # Decode the filename by replacing -- with /
-    file_name = encoded_filename.replace('--', '/')
-    cache_path = get_cache_path(file_name)
-    
-    if not os.path.exists(cache_path):
-        logger.error(f"Preview image not found for {file_name}")
-        raise HTTPException(status_code=404, detail="Preview image not found")
-    
-    # Add caching headers
-    headers = {
-        "Cache-Control": "public, max-age=31536000",  # Cache for 1 year
-        "Content-Type": "image/webp",
-        "Accept-Ranges": "bytes"
-    }
-    
-    return FileResponse(
-        cache_path,
-        media_type="image/webp",
-        headers=headers
-    )
-
-@app.post("/send_coordinate")
-async def send_coordinate(request: CoordinateRequest):
-    if not (state.conn.is_connected() if state.conn else False):
-        logger.warning("Attempted to send coordinate without a connection")
-        raise HTTPException(status_code=400, detail="Connection not established")
-
-    try:
-        logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
-        pattern_manager.move_polar(request.theta, request.rho)
-        return {"success": True}
-    except Exception as e:
-        logger.error(f"Failed to send coordinate: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.get("/download/{filename}")
-async def download_file(filename: str):
-    return FileResponse(
-        os.path.join(pattern_manager.THETA_RHO_DIR, filename),
-        filename=filename
-    )
-
-@app.get("/serial_status")
-async def serial_status():
-    connected = state.conn.is_connected() if state.conn else False
-    port = state.port
-    logger.debug(f"Serial status check - connected: {connected}, port: {port}")
-    return {
-        "connected": connected,
-        "port": port
-    }
-
-@app.post("/pause_execution")
-async def pause_execution():
-    if pattern_manager.pause_execution():
-        return {"success": True, "message": "Execution paused"}
-    raise HTTPException(status_code=500, detail="Failed to pause execution")
-
-@app.post("/resume_execution")
-async def resume_execution():
-    if pattern_manager.resume_execution():
-        return {"success": True, "message": "Execution resumed"}
-    raise HTTPException(status_code=500, detail="Failed to resume execution")
-
-# Playlist endpoints
-@app.get("/list_all_playlists")
-async def list_all_playlists():
-    playlist_names = playlist_manager.list_all_playlists()
-    return playlist_names
-
-@app.get("/get_playlist")
-async def get_playlist(name: str):
-    if not name:
-        raise HTTPException(status_code=400, detail="Missing playlist name parameter")
-
-    playlist = playlist_manager.get_playlist(name)
-    if not playlist:
-        raise HTTPException(status_code=404, detail=f"Playlist '{name}' not found")
-
-    return playlist
-
-@app.post("/create_playlist")
-async def create_playlist(request: PlaylistRequest):
-    success = playlist_manager.create_playlist(request.playlist_name, request.files)
-    return {
-        "success": success,
-        "message": f"Playlist '{request.playlist_name}' created/updated"
-    }
-
-@app.post("/modify_playlist")
-async def modify_playlist(request: PlaylistRequest):
-    success = playlist_manager.modify_playlist(request.playlist_name, request.files)
-    return {
-        "success": success,
-        "message": f"Playlist '{request.playlist_name}' updated"
-    }
-
-@app.delete("/delete_playlist")
-async def delete_playlist(request: DeletePlaylistRequest):
-    success = playlist_manager.delete_playlist(request.playlist_name)
-    if not success:
-        raise HTTPException(
-            status_code=404,
-            detail=f"Playlist '{request.playlist_name}' not found"
-        )
-
-    return {
-        "success": True,
-        "message": f"Playlist '{request.playlist_name}' deleted"
-    }
-
-class AddToPlaylistRequest(BaseModel):
-    playlist_name: str
-    pattern: str
-
-@app.post("/add_to_playlist")
-async def add_to_playlist(request: AddToPlaylistRequest):
-    success = playlist_manager.add_to_playlist(request.playlist_name, request.pattern)
-    if not success:
-        raise HTTPException(status_code=404, detail="Playlist not found")
-    return {"success": True}
-
-@app.post("/run_playlist")
-async def run_playlist_endpoint(request: PlaylistRequest):
-    """Run a playlist with specified parameters."""
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to run a playlist without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-        
-        if not os.path.exists(playlist_manager.PLAYLISTS_FILE):
-            raise HTTPException(status_code=404, detail=f"Playlist '{request.playlist_name}' not found")
-
-        # Start the playlist execution
-        success, message = await playlist_manager.run_playlist(
-            request.playlist_name,
-            pause_time=request.pause_time,
-            clear_pattern=request.clear_pattern,
-            run_mode=request.run_mode,
-            shuffle=request.shuffle
-        )
-        if not success:
-            raise HTTPException(status_code=409, detail=message)
-
-        return {"message": f"Started playlist: {request.playlist_name}"}
-    except Exception as e:
-        logger.error(f"Error running playlist: {e}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/set_speed")
-async def set_speed(request: SpeedRequest):
-    try:
-        if not (state.conn.is_connected() if state.conn else False):
-            logger.warning("Attempted to change speed without a connection")
-            raise HTTPException(status_code=400, detail="Connection not established")
-        
-        if request.speed <= 0:
-            logger.warning(f"Invalid speed value received: {request.speed}")
-            raise HTTPException(status_code=400, detail="Invalid speed value")
-        
-        state.speed = request.speed
-        return {"success": True, "speed": request.speed}
-    except Exception as e:
-        logger.error(f"Failed to set speed: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-@app.get("/check_software_update")
-async def check_updates():
-    update_info = update_manager.check_git_updates()
-    return update_info
-
-@app.post("/update_software")
-async def update_software():
-    logger.info("Starting software update process")
-    success, error_message, error_log = update_manager.update_software()
-    
-    if success:
-        logger.info("Software update completed successfully")
-        return {"success": True}
-    else:
-        logger.error(f"Software update failed: {error_message}\nDetails: {error_log}")
-        raise HTTPException(
-            status_code=500,
-            detail={
-                "error": error_message,
-                "details": error_log
-            }
-        )
-
-@app.post("/set_wled_ip")
-async def set_wled_ip(request: WLEDRequest):
-    state.wled_ip = request.wled_ip
-    state.led_controller = LEDController(request.wled_ip)
-    effect_idle(state.led_controller)
-    state.save()
-    logger.info(f"WLED IP updated: {request.wled_ip}")
-    return {"success": True, "wled_ip": state.wled_ip}
-
-@app.get("/get_wled_ip")
-async def get_wled_ip():
-    if not state.wled_ip:
-        raise HTTPException(status_code=404, detail="No WLED IP set")
-    return {"success": True, "wled_ip": state.wled_ip}
-
-@app.post("/skip_pattern")
-async def skip_pattern():
-    if not state.current_playlist:
-        raise HTTPException(status_code=400, detail="No playlist is currently running")
-    state.skip_requested = True
-    return {"success": True}
-
-@app.post("/preview_thr_batch")
-async def preview_thr_batch(request: dict):
-    start = time.time()
-    if not request.get("file_names"):
-        logger.warning("Batch preview request received without filenames")
-        raise HTTPException(status_code=400, detail="No file names provided")
-
-    file_names = request["file_names"]
-    if not isinstance(file_names, list):
-        raise HTTPException(status_code=400, detail="file_names must be a list")
-
-    headers = {
-        "Cache-Control": "public, max-age=3600",  # Cache for 1 hour
-        "Content-Type": "application/json"
-    }
-
-    results = {}
-    for file_name in file_names:
-        t1 = time.time()
-        try:
-            pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
-            if not os.path.exists(pattern_file_path):
-                logger.warning(f"Pattern file not found: {pattern_file_path}")
-                results[file_name] = {"error": "Pattern file not found"}
-                continue
-
-            cache_path = get_cache_path(file_name)
-            
-            if not os.path.exists(cache_path):
-                logger.info(f"Cache miss for {file_name}. Generating preview...")
-                success = await generate_image_preview(file_name)
-                if not success or not os.path.exists(cache_path):
-                    logger.error(f"Failed to generate or find preview for {file_name}")
-                    results[file_name] = {"error": "Failed to generate preview"}
-                    continue
-
-            metadata = get_pattern_metadata(file_name)
-            if metadata:
-                first_coord_obj = metadata.get('first_coordinate')
-                last_coord_obj = metadata.get('last_coordinate')
-            else:
-                logger.debug(f"Metadata cache miss for {file_name}, parsing file")
-                coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
-                first_coord = coordinates[0] if coordinates else None
-                last_coord = coordinates[-1] if coordinates else None
-                first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
-                last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
-
-            with open(cache_path, 'rb') as f:
-                image_data = f.read()
-            image_b64 = base64.b64encode(image_data).decode('utf-8')
-            results[file_name] = {
-                "image_data": f"data:image/webp;base64,{image_b64}",
-                "first_coordinate": first_coord_obj,
-                "last_coordinate": last_coord_obj
-            }
-        except Exception as e:
-            logger.error(f"Error processing {file_name}: {str(e)}")
-            results[file_name] = {"error": str(e)}
-        finally:
-            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
-
-    logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
-    return JSONResponse(content=results, headers=headers)
-
-@app.get("/playlists")
-async def playlists(request: Request):
-    logger.debug("Rendering playlists page")
-    return templates.TemplateResponse("playlists.html", {"request": request})
-
-@app.get("/image2sand")
-async def image2sand(request: Request):
-    return templates.TemplateResponse("image2sand.html", {"request": request})
-
-@app.get("/wled")
-async def wled(request: Request):
-    return templates.TemplateResponse("wled.html", {"request": request})
-
-@app.get("/table_control")
-async def table_control(request: Request):
-    return templates.TemplateResponse("table_control.html", {"request": request})
-
-@app.post("/rebuild_cache")
-async def rebuild_cache_endpoint():
-    """Trigger a rebuild of the pattern cache."""
-    try:
-        from modules.core.cache_manager import rebuild_cache
-        await rebuild_cache()
-        return {"success": True, "message": "Cache rebuild completed successfully"}
-    except Exception as e:
-        logger.error(f"Failed to rebuild cache: {str(e)}")
-        raise HTTPException(status_code=500, detail=str(e))
-
-def signal_handler(signum, frame):
-    """Handle shutdown signals gracefully but forcefully."""
-    logger.info("Received shutdown signal, cleaning up...")
-    try:
-        if state.led_controller:
-            state.led_controller.set_power(0)
-        # Run cleanup operations synchronously to ensure completion
-        pattern_manager.stop_actions()
-        state.save()
-        
-        logger.info("Cleanup completed")
-    except Exception as e:
-        logger.error(f"Error during cleanup: {str(e)}")
-    finally:
-        logger.info("Exiting application...")
-        os._exit(0)  # Force exit regardless of other threads
-
-def entrypoint():
-    import uvicorn
-    logger.info("Starting FastAPI server on port 8080...")
-    uvicorn.run(app, host="0.0.0.0", port=8080, workers=1)  # Set workers to 1 to avoid multiple signal handlers
-
-if __name__ == "__main__":
-    entrypoint()
+from fastapi import FastAPI, UploadFile, File, HTTPException, BackgroundTasks, WebSocket, WebSocketDisconnect, Request
+from fastapi.responses import JSONResponse, FileResponse, Response
+from fastapi.staticfiles import StaticFiles
+from fastapi.templating import Jinja2Templates
+from pydantic import BaseModel
+from typing import List, Optional, Tuple, Dict, Any, Union
+import atexit
+import os
+import logging
+from datetime import datetime, time
+from modules.connection import connection_manager
+from modules.core import pattern_manager
+from modules.core.pattern_manager import parse_theta_rho_file, THETA_RHO_DIR
+from modules.core import playlist_manager
+from modules.update import update_manager
+from modules.core.state import state
+from modules import mqtt
+import signal
+import sys
+import asyncio
+from contextlib import asynccontextmanager
+from modules.led.led_controller import LEDController, effect_idle
+import math
+from modules.core.cache_manager import generate_all_image_previews, get_cache_path, generate_image_preview, get_pattern_metadata
+import json
+import base64
+import time
+import argparse
+
+# Get log level from environment variable, default to INFO
+log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
+log_level = getattr(logging, log_level_str, logging.INFO)
+
+logging.basicConfig(
+    level=log_level,
+    format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
+    handlers=[
+        logging.StreamHandler(),
+    ]
+)
+
+logger = logging.getLogger(__name__)
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+    # Startup
+    logger.info("Starting Dune Weaver application...")
+    # Register signal handlers
+    signal.signal(signal.SIGINT, signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+    
+    try:
+        connection_manager.connect_device()
+    except Exception as e:
+        logger.warning(f"Failed to auto-connect to serial port: {str(e)}")
+        
+    try:
+        mqtt_handler = mqtt.init_mqtt()
+    except Exception as e:
+        logger.warning(f"Failed to initialize MQTT: {str(e)}")
+    
+    # Start cache generation in background if needed
+    try:
+        from modules.core.cache_manager import is_cache_generation_needed, generate_cache_background
+        if is_cache_generation_needed():
+            logger.info("Cache generation needed, starting background task...")
+            asyncio.create_task(generate_cache_background())
+        else:
+            logger.info("Cache is up to date, skipping generation")
+    except Exception as e:
+        logger.warning(f"Failed to start cache generation: {str(e)}")
+
+    yield  # This separates startup from shutdown code
+
+
+app = FastAPI(lifespan=lifespan)
+templates = Jinja2Templates(directory="templates")
+app.mount("/static", StaticFiles(directory="static"), name="static")
+
+# Pydantic models for request/response validation
+class ConnectRequest(BaseModel):
+    port: Optional[str] = None
+
+class CoordinateRequest(BaseModel):
+    theta: float
+    rho: float
+
+class PlaylistRequest(BaseModel):
+    playlist_name: str
+    files: List[str] = []
+    pause_time: float = 0
+    clear_pattern: Optional[str] = None
+    run_mode: str = "single"
+    shuffle: bool = False
+
+class PlaylistRunRequest(BaseModel):
+    playlist_name: str
+    pause_time: Optional[float] = 0
+    clear_pattern: Optional[str] = None
+    run_mode: Optional[str] = "single"
+    shuffle: Optional[bool] = False
+    start_time: Optional[str] = None
+    end_time: Optional[str] = None
+
+class SpeedRequest(BaseModel):
+    speed: float
+
+class WLEDRequest(BaseModel):
+    wled_ip: Optional[str] = None
+
+class DeletePlaylistRequest(BaseModel):
+    playlist_name: str
+
+class ThetaRhoRequest(BaseModel):
+    file_name: str
+    pre_execution: Optional[str] = "none"
+
+class GetCoordinatesRequest(BaseModel):
+    file_name: str
+
+# Store active WebSocket connections
+active_status_connections = set()
+active_cache_progress_connections = set()
+
+@app.websocket("/ws/status")
+async def websocket_status_endpoint(websocket: WebSocket):
+    await websocket.accept()
+    active_status_connections.add(websocket)
+    try:
+        while True:
+            status = pattern_manager.get_status()
+            try:
+                await websocket.send_json({
+                    "type": "status_update",
+                    "data": status
+                })
+            except RuntimeError as e:
+                if "close message has been sent" in str(e):
+                    break
+                raise
+            await asyncio.sleep(1)
+    except WebSocketDisconnect:
+        pass
+    finally:
+        active_status_connections.discard(websocket)
+        try:
+            await websocket.close()
+        except RuntimeError:
+            pass
+
+async def broadcast_status_update(status: dict):
+    """Broadcast status update to all connected clients."""
+    disconnected = set()
+    for websocket in active_status_connections:
+        try:
+            await websocket.send_json({
+                "type": "status_update",
+                "data": status
+            })
+        except WebSocketDisconnect:
+            disconnected.add(websocket)
+        except RuntimeError:
+            disconnected.add(websocket)
+    
+    active_status_connections.difference_update(disconnected)
+
+@app.websocket("/ws/cache-progress")
+async def websocket_cache_progress_endpoint(websocket: WebSocket):
+    await websocket.accept()
+    active_cache_progress_connections.add(websocket)
+    try:
+        while True:
+            from modules.core.cache_manager import get_cache_progress
+            progress = get_cache_progress()
+            try:
+                await websocket.send_json({
+                    "type": "cache_progress",
+                    "data": progress
+                })
+            except RuntimeError as e:
+                if "close message has been sent" in str(e):
+                    break
+                raise
+            await asyncio.sleep(0.5)  # Update every 500ms
+    except WebSocketDisconnect:
+        pass
+    finally:
+        active_cache_progress_connections.discard(websocket)
+        try:
+            await websocket.close()
+        except RuntimeError:
+            pass
+
+# FastAPI routes
+@app.get("/")
+async def index(request: Request):
+    return templates.TemplateResponse("index.html", {"request": request})
+
+@app.get("/settings")
+async def settings(request: Request):
+    return templates.TemplateResponse("settings.html", {"request": request})
+
+@app.get("/list_serial_ports")
+async def list_ports():
+    logger.debug("Listing available serial ports")
+    return connection_manager.list_serial_ports()
+
+@app.post("/connect")
+async def connect(request: ConnectRequest):
+    if not request.port:
+        state.conn = connection_manager.WebSocketConnection('ws://fluidnc.local:81')
+        connection_manager.device_init()
+        logger.info('Successfully connected to websocket ws://fluidnc.local:81')
+        return {"success": True}
+
+    try:
+        state.conn = connection_manager.SerialConnection(request.port)
+        connection_manager.device_init()
+        logger.info(f'Successfully connected to serial port {request.port}')
+        return {"success": True}
+    except Exception as e:
+        logger.error(f'Failed to connect to serial port {request.port}: {str(e)}')
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/disconnect")
+async def disconnect():
+    try:
+        state.conn.close()
+        logger.info('Successfully disconnected from serial port')
+        return {"success": True}
+    except Exception as e:
+        logger.error(f'Failed to disconnect serial: {str(e)}')
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/restart_connection")
+async def restart(request: ConnectRequest):
+    if not request.port:
+        logger.warning("Restart serial request received without port")
+        raise HTTPException(status_code=400, detail="No port provided")
+
+    try:
+        logger.info(f"Restarting connection on port {request.port}")
+        connection_manager.restart_connection()
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to restart serial on port {request.port}: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.get("/list_theta_rho_files")
+async def list_theta_rho_files():
+    logger.debug("Listing theta-rho files")
+    files = pattern_manager.list_theta_rho_files()
+    return sorted(files)
+
+@app.post("/upload_theta_rho")
+async def upload_theta_rho(file: UploadFile = File(...)):
+    """Upload a theta-rho file."""
+    try:
+        # Save the file
+        # Ensure custom_patterns directory exists
+        custom_patterns_dir = os.path.join(pattern_manager.THETA_RHO_DIR, "custom_patterns")
+        os.makedirs(custom_patterns_dir, exist_ok=True)
+        
+        file_path_in_patterns_dir = os.path.join("custom_patterns", file.filename)
+        full_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path_in_patterns_dir)
+        
+        # Save the uploaded file
+        with open(full_file_path, "wb") as f:
+            f.write(await file.read())
+        
+        logger.info(f"File {file.filename} saved successfully")
+        
+        # Generate image preview for the new file with retry logic
+        max_retries = 3
+        for attempt in range(max_retries):
+            try:
+                logger.info(f"Generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}/{max_retries})")
+                success = await generate_image_preview(file_path_in_patterns_dir)
+                if success:
+                    logger.info(f"Preview generated successfully for {file_path_in_patterns_dir}")
+                    break
+                else:
+                    logger.warning(f"Preview generation failed for {file_path_in_patterns_dir} (attempt {attempt + 1})")
+                    if attempt < max_retries - 1:
+                        await asyncio.sleep(0.5)  # Small delay before retry
+            except Exception as e:
+                logger.error(f"Error generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}): {str(e)}")
+                if attempt < max_retries - 1:
+                    await asyncio.sleep(0.5)  # Small delay before retry
+        
+        return {"success": True, "message": f"File {file.filename} uploaded successfully"}
+    except Exception as e:
+        logger.error(f"Error uploading file: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/get_theta_rho_coordinates")
+async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
+    """Get theta-rho coordinates for animated preview."""
+    try:
+        # Handle file paths that may include the patterns directory prefix
+        file_name = request.file_name
+        if file_name.startswith('./patterns/'):
+            file_name = file_name[11:]  # Remove './patterns/' prefix
+        elif file_name.startswith('patterns/'):
+            file_name = file_name[9:]   # Remove 'patterns/' prefix
+            
+        file_path = os.path.join(THETA_RHO_DIR, file_name)
+        
+        if not os.path.exists(file_path):
+            raise HTTPException(status_code=404, detail=f"File {file_name} not found")
+        
+        # Parse the theta-rho file
+        coordinates = parse_theta_rho_file(file_path)
+        
+        if not coordinates:
+            raise HTTPException(status_code=400, detail="No valid coordinates found in file")
+        
+        return {
+            "success": True,
+            "coordinates": coordinates,
+            "total_points": len(coordinates)
+        }
+        
+    except Exception as e:
+        logger.error(f"Error getting coordinates for {request.file_name}: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/run_theta_rho")
+async def run_theta_rho(request: ThetaRhoRequest, background_tasks: BackgroundTasks):
+    if not request.file_name:
+        logger.warning('Run theta-rho request received without file name')
+        raise HTTPException(status_code=400, detail="No file name provided")
+    
+    file_path = None
+    if 'clear' in request.file_name:
+        logger.info(f'Clear pattern file: {request.file_name.split(".")[0]}')
+        file_path = pattern_manager.get_clear_pattern_file(request.file_name.split('.')[0])
+        logger.info(f'Clear pattern file: {file_path}')
+    if not file_path:
+        file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
+    if not os.path.exists(file_path):
+        logger.error(f'Theta-rho file not found: {file_path}')
+        raise HTTPException(status_code=404, detail="File not found")
+
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to run a pattern without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+        
+        if pattern_manager.pattern_lock.locked():
+            logger.warning("Attempted to run a pattern while another is already running")
+            raise HTTPException(status_code=409, detail="Another pattern is already running")
+            
+        files_to_run = [file_path]
+        logger.info(f'Running theta-rho file: {request.file_name} with pre_execution={request.pre_execution}')
+        
+        # Only include clear_pattern if it's not "none"
+        kwargs = {}
+        if request.pre_execution != "none":
+            kwargs['clear_pattern'] = request.pre_execution
+        
+        # Pass arguments properly
+        background_tasks.add_task(
+            pattern_manager.run_theta_rho_files,
+            files_to_run,  # First positional argument
+            **kwargs  # Spread keyword arguments
+        )
+        return {"success": True}
+    except HTTPException as http_exc:
+        logger.error(f'Failed to run theta-rho file {request.file_name}: {http_exc.detail}')
+        raise http_exc
+    except Exception as e:
+        logger.error(f'Failed to run theta-rho file {request.file_name}: {str(e)}')
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/stop_execution")
+async def stop_execution():
+    if not (state.conn.is_connected() if state.conn else False):
+        logger.warning("Attempted to stop without a connection")
+        raise HTTPException(status_code=400, detail="Connection not established")
+    pattern_manager.stop_actions()
+    return {"success": True}
+
+@app.post("/send_home")
+async def send_home():
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to move to home without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+        connection_manager.home()
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to send home command: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/run_theta_rho_file/{file_name}")
+async def run_specific_theta_rho_file(file_name: str):
+    file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
+    if not os.path.exists(file_path):
+        raise HTTPException(status_code=404, detail="File not found")
+        
+    if not (state.conn.is_connected() if state.conn else False):
+        logger.warning("Attempted to run a pattern without a connection")
+        raise HTTPException(status_code=400, detail="Connection not established")
+
+    pattern_manager.run_theta_rho_file(file_path)
+    return {"success": True}
+
+class DeleteFileRequest(BaseModel):
+    file_name: str
+
+@app.post("/delete_theta_rho_file")
+async def delete_theta_rho_file(request: DeleteFileRequest):
+    if not request.file_name:
+        logger.warning("Delete theta-rho file request received without filename")
+        raise HTTPException(status_code=400, detail="No file name provided")
+
+    file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
+    if not os.path.exists(file_path):
+        logger.error(f"Attempted to delete non-existent file: {file_path}")
+        raise HTTPException(status_code=404, detail="File not found")
+
+    try:
+        os.remove(file_path)
+        logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to delete theta-rho file {request.file_name}: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/move_to_center")
+async def move_to_center():
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to move to center without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+
+        logger.info("Moving device to center position")
+        pattern_manager.reset_theta()
+        pattern_manager.move_polar(0, 0)
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to move to center: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/move_to_perimeter")
+async def move_to_perimeter():
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to move to perimeter without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+        pattern_manager.reset_theta()
+        pattern_manager.move_polar(0, 1)
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to move to perimeter: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/preview_thr")
+async def preview_thr(request: DeleteFileRequest):
+    if not request.file_name:
+        logger.warning("Preview theta-rho request received without filename")
+        raise HTTPException(status_code=400, detail="No file name provided")
+
+    # Construct the full path to the pattern file to check existence
+    pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, request.file_name)
+    if not os.path.exists(pattern_file_path):
+        logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
+        raise HTTPException(status_code=404, detail="Pattern file not found")
+
+    try:
+        cache_path = get_cache_path(request.file_name)
+        
+        if not os.path.exists(cache_path):
+            logger.info(f"Cache miss for {request.file_name}. Generating preview...")
+            # Attempt to generate the preview if it's missing
+            success = await generate_image_preview(request.file_name)
+            if not success or not os.path.exists(cache_path):
+                logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
+                raise HTTPException(status_code=500, detail="Failed to generate preview image.")
+
+        # Try to get coordinates from metadata cache first
+        metadata = get_pattern_metadata(request.file_name)
+        if metadata:
+            first_coord_obj = metadata.get('first_coordinate')
+            last_coord_obj = metadata.get('last_coordinate')
+        else:
+            # Fallback to parsing file if metadata not cached (shouldn't happen after initial cache)
+            logger.debug(f"Metadata cache miss for {request.file_name}, parsing file")
+            coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
+            first_coord = coordinates[0] if coordinates else None
+            last_coord = coordinates[-1] if coordinates else None
+            
+            # Format coordinates as objects with x and y properties
+            first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
+            last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
+
+        # Return JSON with preview URL and coordinates
+        # URL encode the file_name for the preview URL
+        encoded_filename = request.file_name.replace('/', '--')
+        return {
+            "preview_url": f"/preview/{encoded_filename}",
+            "first_coordinate": first_coord_obj,
+            "last_coordinate": last_coord_obj
+        }
+
+    except HTTPException:
+        raise
+    except Exception as e:
+        logger.error(f"Failed to generate or serve preview for {request.file_name}: {str(e)}")
+        raise HTTPException(status_code=500, detail=f"Failed to serve preview image: {str(e)}")
+
+@app.get("/preview/{encoded_filename}")
+async def serve_preview(encoded_filename: str):
+    """Serve a preview image for a pattern file."""
+    # Decode the filename by replacing -- with /
+    file_name = encoded_filename.replace('--', '/')
+    cache_path = get_cache_path(file_name)
+    
+    if not os.path.exists(cache_path):
+        logger.error(f"Preview image not found for {file_name}")
+        raise HTTPException(status_code=404, detail="Preview image not found")
+    
+    # Add caching headers
+    headers = {
+        "Cache-Control": "public, max-age=31536000",  # Cache for 1 year
+        "Content-Type": "image/webp",
+        "Accept-Ranges": "bytes"
+    }
+    
+    return FileResponse(
+        cache_path,
+        media_type="image/webp",
+        headers=headers
+    )
+
+@app.post("/send_coordinate")
+async def send_coordinate(request: CoordinateRequest):
+    if not (state.conn.is_connected() if state.conn else False):
+        logger.warning("Attempted to send coordinate without a connection")
+        raise HTTPException(status_code=400, detail="Connection not established")
+
+    try:
+        logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
+        pattern_manager.move_polar(request.theta, request.rho)
+        return {"success": True}
+    except Exception as e:
+        logger.error(f"Failed to send coordinate: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.get("/download/{filename}")
+async def download_file(filename: str):
+    return FileResponse(
+        os.path.join(pattern_manager.THETA_RHO_DIR, filename),
+        filename=filename
+    )
+
+@app.get("/serial_status")
+async def serial_status():
+    connected = state.conn.is_connected() if state.conn else False
+    port = state.port
+    logger.debug(f"Serial status check - connected: {connected}, port: {port}")
+    return {
+        "connected": connected,
+        "port": port
+    }
+
+@app.post("/pause_execution")
+async def pause_execution():
+    if pattern_manager.pause_execution():
+        return {"success": True, "message": "Execution paused"}
+    raise HTTPException(status_code=500, detail="Failed to pause execution")
+
+@app.post("/resume_execution")
+async def resume_execution():
+    if pattern_manager.resume_execution():
+        return {"success": True, "message": "Execution resumed"}
+    raise HTTPException(status_code=500, detail="Failed to resume execution")
+
+# Playlist endpoints
+@app.get("/list_all_playlists")
+async def list_all_playlists():
+    playlist_names = playlist_manager.list_all_playlists()
+    return playlist_names
+
+@app.get("/get_playlist")
+async def get_playlist(name: str):
+    if not name:
+        raise HTTPException(status_code=400, detail="Missing playlist name parameter")
+
+    playlist = playlist_manager.get_playlist(name)
+    if not playlist:
+        raise HTTPException(status_code=404, detail=f"Playlist '{name}' not found")
+
+    return playlist
+
+@app.post("/create_playlist")
+async def create_playlist(request: PlaylistRequest):
+    success = playlist_manager.create_playlist(request.playlist_name, request.files)
+    return {
+        "success": success,
+        "message": f"Playlist '{request.playlist_name}' created/updated"
+    }
+
+@app.post("/modify_playlist")
+async def modify_playlist(request: PlaylistRequest):
+    success = playlist_manager.modify_playlist(request.playlist_name, request.files)
+    return {
+        "success": success,
+        "message": f"Playlist '{request.playlist_name}' updated"
+    }
+
+@app.delete("/delete_playlist")
+async def delete_playlist(request: DeletePlaylistRequest):
+    success = playlist_manager.delete_playlist(request.playlist_name)
+    if not success:
+        raise HTTPException(
+            status_code=404,
+            detail=f"Playlist '{request.playlist_name}' not found"
+        )
+
+    return {
+        "success": True,
+        "message": f"Playlist '{request.playlist_name}' deleted"
+    }
+
+class AddToPlaylistRequest(BaseModel):
+    playlist_name: str
+    pattern: str
+
+@app.post("/add_to_playlist")
+async def add_to_playlist(request: AddToPlaylistRequest):
+    success = playlist_manager.add_to_playlist(request.playlist_name, request.pattern)
+    if not success:
+        raise HTTPException(status_code=404, detail="Playlist not found")
+    return {"success": True}
+
+@app.post("/run_playlist")
+async def run_playlist_endpoint(request: PlaylistRequest):
+    """Run a playlist with specified parameters."""
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to run a playlist without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+        
+        if not os.path.exists(playlist_manager.PLAYLISTS_FILE):
+            raise HTTPException(status_code=404, detail=f"Playlist '{request.playlist_name}' not found")
+
+        # Start the playlist execution
+        success, message = await playlist_manager.run_playlist(
+            request.playlist_name,
+            pause_time=request.pause_time,
+            clear_pattern=request.clear_pattern,
+            run_mode=request.run_mode,
+            shuffle=request.shuffle
+        )
+        if not success:
+            raise HTTPException(status_code=409, detail=message)
+
+        return {"message": f"Started playlist: {request.playlist_name}"}
+    except Exception as e:
+        logger.error(f"Error running playlist: {e}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.post("/set_speed")
+async def set_speed(request: SpeedRequest):
+    try:
+        if not (state.conn.is_connected() if state.conn else False):
+            logger.warning("Attempted to change speed without a connection")
+            raise HTTPException(status_code=400, detail="Connection not established")
+        
+        if request.speed <= 0:
+            logger.warning(f"Invalid speed value received: {request.speed}")
+            raise HTTPException(status_code=400, detail="Invalid speed value")
+        
+        state.speed = request.speed
+        return {"success": True, "speed": request.speed}
+    except Exception as e:
+        logger.error(f"Failed to set speed: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+@app.get("/check_software_update")
+async def check_updates():
+    update_info = update_manager.check_git_updates()
+    return update_info
+
+@app.post("/update_software")
+async def update_software():
+    logger.info("Starting software update process")
+    success, error_message, error_log = update_manager.update_software()
+    
+    if success:
+        logger.info("Software update completed successfully")
+        return {"success": True}
+    else:
+        logger.error(f"Software update failed: {error_message}\nDetails: {error_log}")
+        raise HTTPException(
+            status_code=500,
+            detail={
+                "error": error_message,
+                "details": error_log
+            }
+        )
+
+@app.post("/set_wled_ip")
+async def set_wled_ip(request: WLEDRequest):
+    state.wled_ip = request.wled_ip
+    state.led_controller = LEDController(request.wled_ip)
+    effect_idle(state.led_controller)
+    state.save()
+    logger.info(f"WLED IP updated: {request.wled_ip}")
+    return {"success": True, "wled_ip": state.wled_ip}
+
+@app.get("/get_wled_ip")
+async def get_wled_ip():
+    if not state.wled_ip:
+        raise HTTPException(status_code=404, detail="No WLED IP set")
+    return {"success": True, "wled_ip": state.wled_ip}
+
+@app.post("/skip_pattern")
+async def skip_pattern():
+    if not state.current_playlist:
+        raise HTTPException(status_code=400, detail="No playlist is currently running")
+    state.skip_requested = True
+    return {"success": True}
+
+@app.post("/preview_thr_batch")
+async def preview_thr_batch(request: dict):
+    start = time.time()
+    if not request.get("file_names"):
+        logger.warning("Batch preview request received without filenames")
+        raise HTTPException(status_code=400, detail="No file names provided")
+
+    file_names = request["file_names"]
+    if not isinstance(file_names, list):
+        raise HTTPException(status_code=400, detail="file_names must be a list")
+
+    headers = {
+        "Cache-Control": "public, max-age=3600",  # Cache for 1 hour
+        "Content-Type": "application/json"
+    }
+
+    results = {}
+    for file_name in file_names:
+        t1 = time.time()
+        try:
+            pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
+            if not os.path.exists(pattern_file_path):
+                logger.warning(f"Pattern file not found: {pattern_file_path}")
+                results[file_name] = {"error": "Pattern file not found"}
+                continue
+
+            cache_path = get_cache_path(file_name)
+            
+            if not os.path.exists(cache_path):
+                logger.info(f"Cache miss for {file_name}. Generating preview...")
+                success = await generate_image_preview(file_name)
+                if not success or not os.path.exists(cache_path):
+                    logger.error(f"Failed to generate or find preview for {file_name}")
+                    results[file_name] = {"error": "Failed to generate preview"}
+                    continue
+
+            metadata = get_pattern_metadata(file_name)
+            if metadata:
+                first_coord_obj = metadata.get('first_coordinate')
+                last_coord_obj = metadata.get('last_coordinate')
+            else:
+                logger.debug(f"Metadata cache miss for {file_name}, parsing file")
+                coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
+                first_coord = coordinates[0] if coordinates else None
+                last_coord = coordinates[-1] if coordinates else None
+                first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
+                last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
+
+            with open(cache_path, 'rb') as f:
+                image_data = f.read()
+            image_b64 = base64.b64encode(image_data).decode('utf-8')
+            results[file_name] = {
+                "image_data": f"data:image/webp;base64,{image_b64}",
+                "first_coordinate": first_coord_obj,
+                "last_coordinate": last_coord_obj
+            }
+        except Exception as e:
+            logger.error(f"Error processing {file_name}: {str(e)}")
+            results[file_name] = {"error": str(e)}
+        finally:
+            logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
+
+    logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
+    return JSONResponse(content=results, headers=headers)
+
+@app.get("/playlists")
+async def playlists(request: Request):
+    logger.debug("Rendering playlists page")
+    return templates.TemplateResponse("playlists.html", {"request": request})
+
+@app.get("/image2sand")
+async def image2sand(request: Request):
+    return templates.TemplateResponse("image2sand.html", {"request": request})
+
+@app.get("/wled")
+async def wled(request: Request):
+    return templates.TemplateResponse("wled.html", {"request": request})
+
+@app.get("/table_control")
+async def table_control(request: Request):
+    return templates.TemplateResponse("table_control.html", {"request": request})
+
+@app.get("/cache-progress")
+async def get_cache_progress_endpoint():
+    """Get the current cache generation progress."""
+    from modules.core.cache_manager import get_cache_progress
+    return get_cache_progress()
+
+@app.post("/rebuild_cache")
+async def rebuild_cache_endpoint():
+    """Trigger a rebuild of the pattern cache."""
+    try:
+        from modules.core.cache_manager import rebuild_cache
+        await rebuild_cache()
+        return {"success": True, "message": "Cache rebuild completed successfully"}
+    except Exception as e:
+        logger.error(f"Failed to rebuild cache: {str(e)}")
+        raise HTTPException(status_code=500, detail=str(e))
+
+def signal_handler(signum, frame):
+    """Handle shutdown signals gracefully but forcefully."""
+    logger.info("Received shutdown signal, cleaning up...")
+    try:
+        if state.led_controller:
+            state.led_controller.set_power(0)
+        # Run cleanup operations synchronously to ensure completion
+        pattern_manager.stop_actions()
+        state.save()
+        
+        logger.info("Cleanup completed")
+    except Exception as e:
+        logger.error(f"Error during cleanup: {str(e)}")
+    finally:
+        logger.info("Exiting application...")
+        os._exit(0)  # Force exit regardless of other threads
+
+def entrypoint():
+    import uvicorn
+    logger.info("Starting FastAPI server on port 8080...")
+    uvicorn.run(app, host="0.0.0.0", port=8080, workers=1)  # Set workers to 1 to avoid multiple signal handlers
+
+if __name__ == "__main__":
+    entrypoint()

+ 197 - 80
modules/core/cache_manager.py

@@ -8,6 +8,16 @@ from modules.core.pattern_manager import list_theta_rho_files, THETA_RHO_DIR, pa
 
 logger = logging.getLogger(__name__)
 
+# Global cache progress state
+cache_progress = {
+    "is_running": False,
+    "total_files": 0,
+    "processed_files": 0,
+    "current_file": "",
+    "stage": "idle",  # idle, metadata, images, complete
+    "error": None
+}
+
 # Constants
 CACHE_DIR = os.path.join(THETA_RHO_DIR, "cached_images")
 METADATA_CACHE_FILE = "metadata_cache.json"  # Now in root directory
@@ -205,93 +215,138 @@ async def generate_image_preview(pattern_file):
         return False
 
 async def generate_all_image_previews():
-    """Generate image previews for all pattern files."""
-    ensure_cache_dir()
-    
-    pattern_files = [f for f in list_theta_rho_files() if f.endswith('.thr')]
-    
-    if not pattern_files:
-        logger.info("No .thr pattern files found. Skipping image preview generation.")
-        return
-    
-    patterns_to_cache = [f for f in pattern_files if needs_cache(f)]
-    total_files = len(patterns_to_cache)
-    skipped_files = len(pattern_files) - total_files
+    """Generate image previews for all pattern files with progress tracking."""
+    global cache_progress
     
-    if total_files == 0:
-        logger.info(f"All {skipped_files} pattern files already have image previews. Skipping image generation.")
-        return
+    try:
+        ensure_cache_dir()
         
-    logger.info(f"Generating image cache for {total_files} uncached .thr patterns ({skipped_files} already cached)...")
-    
-    batch_size = 5
-    successful = 0
-    for i in range(0, total_files, batch_size):
-        batch = patterns_to_cache[i:i + batch_size]
-        tasks = [generate_image_preview(file) for file in batch]
-        results = await asyncio.gather(*tasks)
-        successful += sum(1 for r in results if r)
+        pattern_files = [f for f in list_theta_rho_files() if f.endswith('.thr')]
         
-        # Log progress
-        progress = min(i + batch_size, total_files)
-        logger.info(f"Image cache generation progress: {progress}/{total_files} files processed")
-    
-    logger.info(f"Image cache generation completed: {successful}/{total_files} patterns cached successfully, {skipped_files} patterns skipped (already cached)")
+        if not pattern_files:
+            logger.info("No .thr pattern files found. Skipping image preview generation.")
+            return
+        
+        patterns_to_cache = [f for f in pattern_files if needs_cache(f)]
+        total_files = len(patterns_to_cache)
+        skipped_files = len(pattern_files) - total_files
+        
+        if total_files == 0:
+            logger.info(f"All {skipped_files} pattern files already have image previews. Skipping image generation.")
+            return
+            
+        # Update progress state
+        cache_progress.update({
+            "stage": "images",
+            "total_files": total_files,
+            "processed_files": 0,
+            "current_file": "",
+            "error": None
+        })
+        
+        logger.info(f"Generating image cache for {total_files} uncached .thr patterns ({skipped_files} already cached)...")
+        
+        batch_size = 5
+        successful = 0
+        for i in range(0, total_files, batch_size):
+            batch = patterns_to_cache[i:i + batch_size]
+            tasks = [generate_image_preview(file) for file in batch]
+            results = await asyncio.gather(*tasks)
+            successful += sum(1 for r in results if r)
+            
+            # Update progress
+            cache_progress["processed_files"] = min(i + batch_size, total_files)
+            if i < total_files:
+                cache_progress["current_file"] = patterns_to_cache[min(i + batch_size - 1, total_files - 1)]
+            
+            # Log progress
+            progress = min(i + batch_size, total_files)
+            logger.info(f"Image cache generation progress: {progress}/{total_files} files processed")
+        
+        logger.info(f"Image cache generation completed: {successful}/{total_files} patterns cached successfully, {skipped_files} patterns skipped (already cached)")
+        
+    except Exception as e:
+        logger.error(f"Error during image cache generation: {str(e)}")
+        cache_progress["error"] = str(e)
+        raise
 
 async def generate_metadata_cache():
-    """Generate metadata cache for all pattern files."""
-    logger.info("Starting metadata cache generation...")
-    
-    # Get all pattern files using the same function as the rest of the codebase
-    pattern_files = list_theta_rho_files()
+    """Generate metadata cache for all pattern files with progress tracking."""
+    global cache_progress
     
-    if not pattern_files:
-        logger.info("No pattern files found. Skipping metadata cache generation.")
-        return
-    
-    # Filter out files that already have valid metadata cache
-    files_to_process = []
-    for file_name in pattern_files:
-        if get_pattern_metadata(file_name) is None:
-            files_to_process.append(file_name)
-    
-    total_files = len(files_to_process)
-    skipped_files = len(pattern_files) - total_files
-    
-    if total_files == 0:
-        logger.info(f"All {skipped_files} files already have metadata cache. Skipping metadata generation.")
-        return
+    try:
+        logger.info("Starting metadata cache generation...")
         
-    logger.info(f"Generating metadata cache for {total_files} new files ({skipped_files} files already cached)...")
-    
-    # Process in batches
-    batch_size = 5
-    successful = 0
-    for i in range(0, total_files, batch_size):
-        batch = files_to_process[i:i + batch_size]
-        tasks = []
-        for file_name in batch:
-            pattern_path = os.path.join(THETA_RHO_DIR, file_name)
-            try:
-                # Parse file to get metadata
-                coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_path)
-                if coordinates:
-                    first_coord = {"x": coordinates[0][0], "y": coordinates[0][1]}
-                    last_coord = {"x": coordinates[-1][0], "y": coordinates[-1][1]}
-                    total_coords = len(coordinates)
-                    
-                    # Cache the metadata
-                    cache_pattern_metadata(file_name, first_coord, last_coord, total_coords)
-                    successful += 1
-                    logger.debug(f"Generated metadata for {file_name}")
-            except Exception as e:
-                logger.error(f"Failed to generate metadata for {file_name}: {str(e)}")
+        # Get all pattern files using the same function as the rest of the codebase
+        pattern_files = list_theta_rho_files()
         
-        # Log progress
-        progress = min(i + batch_size, total_files)
-        logger.info(f"Metadata cache generation progress: {progress}/{total_files} files processed")
-    
-    logger.info(f"Metadata cache generation completed: {successful}/{total_files} patterns cached successfully, {skipped_files} patterns skipped (already cached)")
+        if not pattern_files:
+            logger.info("No pattern files found. Skipping metadata cache generation.")
+            return
+        
+        # Filter out files that already have valid metadata cache
+        files_to_process = []
+        for file_name in pattern_files:
+            if get_pattern_metadata(file_name) is None:
+                files_to_process.append(file_name)
+        
+        total_files = len(files_to_process)
+        skipped_files = len(pattern_files) - total_files
+        
+        if total_files == 0:
+            logger.info(f"All {skipped_files} files already have metadata cache. Skipping metadata generation.")
+            return
+            
+        # Update progress state
+        cache_progress.update({
+            "stage": "metadata",
+            "total_files": total_files,
+            "processed_files": 0,
+            "current_file": "",
+            "error": None
+        })
+        
+        logger.info(f"Generating metadata cache for {total_files} new files ({skipped_files} files already cached)...")
+        
+        # Process in batches
+        batch_size = 5
+        successful = 0
+        for i in range(0, total_files, batch_size):
+            batch = files_to_process[i:i + batch_size]
+            tasks = []
+            for file_name in batch:
+                pattern_path = os.path.join(THETA_RHO_DIR, file_name)
+                try:
+                    # Parse file to get metadata
+                    coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_path)
+                    if coordinates:
+                        first_coord = {"x": coordinates[0][0], "y": coordinates[0][1]}
+                        last_coord = {"x": coordinates[-1][0], "y": coordinates[-1][1]}
+                        total_coords = len(coordinates)
+                        
+                        # Cache the metadata
+                        cache_pattern_metadata(file_name, first_coord, last_coord, total_coords)
+                        successful += 1
+                        logger.debug(f"Generated metadata for {file_name}")
+                        
+                        # Update current file being processed
+                        cache_progress["current_file"] = file_name
+                except Exception as e:
+                    logger.error(f"Failed to generate metadata for {file_name}: {str(e)}")
+            
+            # Update progress
+            cache_progress["processed_files"] = min(i + batch_size, total_files)
+            
+            # Log progress
+            progress = min(i + batch_size, total_files)
+            logger.info(f"Metadata cache generation progress: {progress}/{total_files} files processed")
+        
+        logger.info(f"Metadata cache generation completed: {successful}/{total_files} patterns cached successfully, {skipped_files} patterns skipped (already cached)")
+        
+    except Exception as e:
+        logger.error(f"Error during metadata cache generation: {str(e)}")
+        cache_progress["error"] = str(e)
+        raise
 
 async def rebuild_cache():
     """Rebuild the entire cache for all pattern files."""
@@ -326,4 +381,66 @@ async def rebuild_cache():
         progress = min(i + batch_size, total_files)
         logger.info(f"Image preview generation progress: {progress}/{total_files} files processed")
     
-    logger.info(f"Cache rebuild completed: {successful}/{total_files} patterns cached successfully")
+    logger.info(f"Cache rebuild completed: {successful}/{total_files} patterns cached successfully")
+
+async def generate_cache_background():
+    """Run cache generation in the background with progress tracking."""
+    global cache_progress
+    
+    try:
+        cache_progress.update({
+            "is_running": True,
+            "stage": "starting",
+            "total_files": 0,
+            "processed_files": 0,
+            "current_file": "",
+            "error": None
+        })
+        
+        # First generate metadata cache
+        await generate_metadata_cache()
+        
+        # Then generate image previews
+        await generate_all_image_previews()
+        
+        # Mark as complete
+        cache_progress.update({
+            "is_running": False,
+            "stage": "complete",
+            "current_file": "",
+            "error": None
+        })
+        
+        logger.info("Background cache generation completed successfully")
+        
+    except Exception as e:
+        logger.error(f"Background cache generation failed: {str(e)}")
+        cache_progress.update({
+            "is_running": False,
+            "stage": "error",
+            "error": str(e)
+        })
+        raise
+
+def get_cache_progress():
+    """Get the current cache generation progress."""
+    global cache_progress
+    return cache_progress.copy()
+
+def is_cache_generation_needed():
+    """Check if cache generation is needed."""
+    pattern_files = [f for f in list_theta_rho_files() if f.endswith('.thr')]
+    
+    if not pattern_files:
+        return False
+    
+    # Check if any files need caching
+    patterns_to_cache = [f for f in pattern_files if needs_cache(f)]
+    
+    # Check metadata cache
+    files_needing_metadata = []
+    for file_name in pattern_files:
+        if get_pattern_metadata(file_name) is None:
+            files_needing_metadata.append(file_name)
+    
+    return len(patterns_to_cache) > 0 or len(files_needing_metadata) > 0

BIN=BIN
patterns/cached_images_bk/0-0-rotating-hearts.thr.webp


BIN=BIN
patterns/cached_images_bk/0-1-hubcap.thr.webp


BIN=BIN
patterns/cached_images_bk/03 pnuttrellis (E) (N N).thr.webp


BIN=BIN
patterns/cached_images_bk/1-0-open-your-heart-and-mind.thr.webp


BIN=BIN
patterns/cached_images_bk/1-0-spirograph-medallion.thr.webp


BIN=BIN
patterns/cached_images_bk/1-1-alien-sludge.thr.webp


BIN=BIN
patterns/cached_images_bk/1-1-ibex.thr.webp


BIN=BIN
patterns/cached_images_bk/1-1-pizza-slice-swirl.thr.webp


BIN=BIN
patterns/cached_images_bk/10_sided_polygon.thr.webp


BIN=BIN
patterns/cached_images_bk/13b Battlesbury (C C).thr.webp


BIN=BIN
patterns/cached_images_bk/19 Itsyourmove (E) (C NW).thr.webp


BIN=BIN
patterns/cached_images_bk/33 Labyrinth1 (S S)_preview.thr.webp


BIN=BIN
patterns/cached_images_bk/6_sided_polygon.thr.webp


BIN=BIN
patterns/cached_images_bk/AcklingDykeDorset 6-4-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/BattlesburyCampWilts 7-5-2017.thr.webp


BIN=BIN
patterns/cached_images_bk/BucklandDownDorset 5-26-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/Butterfly.thr.webp


BIN=BIN
patterns/cached_images_bk/ChartresLabyrinthe.thr.webp


BIN=BIN
patterns/cached_images_bk/Cheesefoot Head 8-9-2012.thr.webp


BIN=BIN
patterns/cached_images_bk/CleyHill 7-18-2017.thr.webp


BIN=BIN
patterns/cached_images_bk/Fractal.thr.webp


BIN=BIN
patterns/cached_images_bk/HackpenHill 6-9-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/Hampton-on-Lucy 8-8-2015.thr.webp


BIN=BIN
patterns/cached_images_bk/HamptonHillsCross 2-18-2015.thr.webp


BIN=BIN
patterns/cached_images_bk/HexagonAlley.thr.webp


BIN=BIN
patterns/cached_images_bk/Hosta.thr.webp


BIN=BIN
patterns/cached_images_bk/KeysleyDown 6-10-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/LiddingtonCastle 6-24-2001.thr.webp


BIN=BIN
patterns/cached_images_bk/LongwoodWarrenHants 7-10-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/MilkHill 6-2-2009.thr.webp


BIN=BIN
patterns/cached_images_bk/MilkHill 7-8-2011.thr.webp


BIN=BIN
patterns/cached_images_bk/Muncombe Hill 7-14-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/OareWiltshire 6-21-2010.thr.webp


BIN=BIN
patterns/cached_images_bk/Petalar.thr.webp


BIN=BIN
patterns/cached_images_bk/SierpinskiTriangle (1).thr.webp


BIN=BIN
patterns/cached_images_bk/SimpleRadiance.thr.webp


BIN=BIN
patterns/cached_images_bk/SineVsBezier2.thr.webp


BIN=BIN
patterns/cached_images_bk/SixPennyHandley 4-6-2009.thr.webp


BIN=BIN
patterns/cached_images_bk/Spiral6.thr.webp


BIN=BIN
patterns/cached_images_bk/SpiralBezier (1).thr.webp


BIN=BIN
patterns/cached_images_bk/SpiralGyrations-2.thr.webp


BIN=BIN
patterns/cached_images_bk/Sponge.thr.webp


BIN=BIN
patterns/cached_images_bk/StarryNight.thr.webp


BIN=BIN
patterns/cached_images_bk/Swirly1.thr.webp


BIN=BIN
patterns/cached_images_bk/SwoopyRadiance.thr.webp


BIN=BIN
patterns/cached_images_bk/TriangleSwoop.thr.webp


BIN=BIN
patterns/cached_images_bk/WiltonWindmill 5-22-2010.thr.webp


BIN=BIN
patterns/cached_images_bk/WinterbourneBassett 6-1-1997.thr.webp


BIN=BIN
patterns/cached_images_bk/WinterbourneStokeDown 7-18-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/atwood-quote.thr.webp


BIN=BIN
patterns/cached_images_bk/beach.thr.webp


BIN=BIN
patterns/cached_images_bk/bear.thr.webp


BIN=BIN
patterns/cached_images_bk/chartres_labyrinthe.thr.webp


BIN=BIN
patterns/cached_images_bk/circle-packer-rings.thr.webp


BIN=BIN
patterns/cached_images_bk/circle_normalized.thr.webp


BIN=BIN
patterns/cached_images_bk/circle_packer.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_in.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_in_Ultra.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_in_mini.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_in_pro.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_out.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_out_Ultra.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_out_mini.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_from_out_pro.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_sideway.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_sideway_mini.thr.webp


BIN=BIN
patterns/cached_images_bk/clear_sideway_pro.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/AcklingDykeDorset 6-4-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/BucklandDownDorset 5-26-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Butterfly.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Can+Bryant.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/ChartresLabyrinthe.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Cheesefoot Head 8-9-2012.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/CleyHill 7-18-2017.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Fractal.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Fractal2 (1).thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/HackpenHill 6-9-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Hampton-on-Lucy 8-8-2015.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/HamptonHillsCross 2-18-2015.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Hosta.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/KeysleyDown 6-10-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/LiddingtonCastle 6-24-2001.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/LongwoodWarrenHants 7-10-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/MilkHill 6-2-2009.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/MilkHill 7-8-2011.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Muncombe Hill 7-14-2018.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/OareWiltshire 6-21-2010.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Shubis_Christmas.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/SimpleRadiance.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/SineVsBezier2.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/SixPennyHandley 4-6-2009.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Spiral5.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Sponge.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/StarryNight.thr.webp


BIN=BIN
patterns/cached_images_bk/custom_patterns/Swirly1.thr.webp


Algúns arquivos non se mostraron porque demasiados arquivos cambiaron neste cambio