main.py 58 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392
  1. from fastapi import FastAPI, UploadFile, File, HTTPException, BackgroundTasks, WebSocket, WebSocketDisconnect, Request
  2. from fastapi.responses import JSONResponse, FileResponse, Response
  3. from fastapi.staticfiles import StaticFiles
  4. from fastapi.templating import Jinja2Templates
  5. from pydantic import BaseModel
  6. from typing import List, Optional, Tuple, Dict, Any, Union
  7. import atexit
  8. import os
  9. import logging
  10. from datetime import datetime, time
  11. from modules.connection import connection_manager
  12. from modules.core import pattern_manager
  13. from modules.core.pattern_manager import parse_theta_rho_file, THETA_RHO_DIR
  14. from modules.core import playlist_manager
  15. from modules.update import update_manager
  16. from modules.core.state import state
  17. from modules import mqtt
  18. import signal
  19. import sys
  20. import asyncio
  21. from contextlib import asynccontextmanager
  22. from modules.led.led_controller import LEDController, effect_idle
  23. import math
  24. from modules.core.cache_manager import generate_all_image_previews, get_cache_path, generate_image_preview, get_pattern_metadata
  25. from modules.core.version_manager import version_manager
  26. import json
  27. import base64
  28. import time
  29. import argparse
  30. from concurrent.futures import ProcessPoolExecutor
  31. import multiprocessing
  32. # Get log level from environment variable, default to INFO
  33. log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
  34. log_level = getattr(logging, log_level_str, logging.INFO)
  35. # Create a process pool for CPU-intensive tasks
  36. # Limit to reasonable number of workers for embedded systems
  37. cpu_count = multiprocessing.cpu_count()
  38. # Maximum 3 workers (leaving 1 for motion), minimum 1
  39. process_pool_size = min(3, max(1, cpu_count - 1))
  40. process_pool = None # Will be initialized in lifespan
  41. logging.basicConfig(
  42. level=log_level,
  43. format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
  44. handlers=[
  45. logging.StreamHandler(),
  46. ]
  47. )
  48. logger = logging.getLogger(__name__)
  49. def normalize_file_path(file_path: str) -> str:
  50. """Normalize file path separators for consistent cross-platform handling."""
  51. if not file_path:
  52. return ''
  53. # First normalize path separators
  54. normalized = file_path.replace('\\', '/')
  55. # Remove only the patterns directory prefix from the beginning, not patterns within the path
  56. if normalized.startswith('./patterns/'):
  57. normalized = normalized[11:]
  58. elif normalized.startswith('patterns/'):
  59. normalized = normalized[9:]
  60. return normalized
  61. @asynccontextmanager
  62. async def lifespan(app: FastAPI):
  63. # Startup
  64. logger.info("Starting Dune Weaver application...")
  65. # Register signal handlers
  66. signal.signal(signal.SIGINT, signal_handler)
  67. signal.signal(signal.SIGTERM, signal_handler)
  68. # Initialize process pool for CPU-intensive tasks
  69. global process_pool
  70. process_pool = ProcessPoolExecutor(max_workers=process_pool_size)
  71. logger.info(f"Initialized process pool with {process_pool_size} workers (detected {cpu_count} cores total)")
  72. try:
  73. connection_manager.connect_device()
  74. except Exception as e:
  75. logger.warning(f"Failed to auto-connect to serial port: {str(e)}")
  76. # Check if auto_play mode is enabled and auto-play playlist (right after connection attempt)
  77. if state.auto_play_enabled and state.auto_play_playlist:
  78. logger.info(f"auto_play mode enabled, checking for connection before auto-playing playlist: {state.auto_play_playlist}")
  79. try:
  80. # Check if we have a valid connection before starting playlist
  81. if state.conn and hasattr(state.conn, 'is_connected') and state.conn.is_connected():
  82. logger.info(f"Connection available, starting auto-play playlist: {state.auto_play_playlist} with options: run_mode={state.auto_play_run_mode}, pause_time={state.auto_play_pause_time}, clear_pattern={state.auto_play_clear_pattern}, shuffle={state.auto_play_shuffle}")
  83. asyncio.create_task(playlist_manager.run_playlist(
  84. state.auto_play_playlist,
  85. pause_time=state.auto_play_pause_time,
  86. clear_pattern=state.auto_play_clear_pattern,
  87. run_mode=state.auto_play_run_mode,
  88. shuffle=state.auto_play_shuffle
  89. ))
  90. else:
  91. logger.warning("No hardware connection available, skipping auto_play mode auto-play")
  92. except Exception as e:
  93. logger.error(f"Failed to auto-play auto_play playlist: {str(e)}")
  94. try:
  95. mqtt_handler = mqtt.init_mqtt()
  96. except Exception as e:
  97. logger.warning(f"Failed to initialize MQTT: {str(e)}")
  98. # Schedule cache generation check for later (non-blocking startup)
  99. async def delayed_cache_check():
  100. """Check and generate cache in background."""
  101. try:
  102. logger.info("Starting cache check...")
  103. from modules.core.cache_manager import is_cache_generation_needed_async, generate_cache_background
  104. if await is_cache_generation_needed_async():
  105. logger.info("Cache generation needed, starting background task...")
  106. asyncio.create_task(generate_cache_background()) # Don't await - run in background
  107. else:
  108. logger.info("Cache is up to date, skipping generation")
  109. except Exception as e:
  110. logger.warning(f"Failed during cache generation: {str(e)}")
  111. # Start cache check in background immediately
  112. asyncio.create_task(delayed_cache_check())
  113. yield # This separates startup from shutdown code
  114. # Shutdown
  115. logger.info("Shutting down Dune Weaver application...")
  116. # Shutdown process pool
  117. if process_pool:
  118. process_pool.shutdown(wait=True)
  119. logger.info("Process pool shutdown complete")
  120. app = FastAPI(lifespan=lifespan)
  121. templates = Jinja2Templates(directory="templates")
  122. app.mount("/static", StaticFiles(directory="static"), name="static")
  123. # Pydantic models for request/response validation
  124. class ConnectRequest(BaseModel):
  125. port: Optional[str] = None
  126. class auto_playModeRequest(BaseModel):
  127. enabled: bool
  128. playlist: Optional[str] = None
  129. run_mode: Optional[str] = "loop"
  130. pause_time: Optional[float] = 5.0
  131. clear_pattern: Optional[str] = "adaptive"
  132. shuffle: Optional[bool] = False
  133. class TimeSlot(BaseModel):
  134. start_time: str # HH:MM format
  135. end_time: str # HH:MM format
  136. days: str # "daily", "weekdays", "weekends", or "custom"
  137. custom_days: Optional[List[str]] = [] # ["monday", "tuesday", etc.]
  138. class ScheduledPauseRequest(BaseModel):
  139. enabled: bool
  140. control_wled: Optional[bool] = False
  141. time_slots: List[TimeSlot] = []
  142. class CoordinateRequest(BaseModel):
  143. theta: float
  144. rho: float
  145. class PlaylistRequest(BaseModel):
  146. playlist_name: str
  147. files: List[str] = []
  148. pause_time: float = 0
  149. clear_pattern: Optional[str] = None
  150. run_mode: str = "single"
  151. shuffle: bool = False
  152. class PlaylistRunRequest(BaseModel):
  153. playlist_name: str
  154. pause_time: Optional[float] = 0
  155. clear_pattern: Optional[str] = None
  156. run_mode: Optional[str] = "single"
  157. shuffle: Optional[bool] = False
  158. start_time: Optional[str] = None
  159. end_time: Optional[str] = None
  160. class SpeedRequest(BaseModel):
  161. speed: float
  162. class WLEDRequest(BaseModel):
  163. wled_ip: Optional[str] = None
  164. class DeletePlaylistRequest(BaseModel):
  165. playlist_name: str
  166. class ThetaRhoRequest(BaseModel):
  167. file_name: str
  168. pre_execution: Optional[str] = "none"
  169. class GetCoordinatesRequest(BaseModel):
  170. file_name: str
  171. # Store active WebSocket connections
  172. active_status_connections = set()
  173. active_cache_progress_connections = set()
  174. @app.websocket("/ws/status")
  175. async def websocket_status_endpoint(websocket: WebSocket):
  176. await websocket.accept()
  177. active_status_connections.add(websocket)
  178. try:
  179. while True:
  180. status = pattern_manager.get_status()
  181. try:
  182. await websocket.send_json({
  183. "type": "status_update",
  184. "data": status
  185. })
  186. except RuntimeError as e:
  187. if "close message has been sent" in str(e):
  188. break
  189. raise
  190. await asyncio.sleep(1)
  191. except WebSocketDisconnect:
  192. pass
  193. finally:
  194. active_status_connections.discard(websocket)
  195. try:
  196. await websocket.close()
  197. except RuntimeError:
  198. pass
  199. async def broadcast_status_update(status: dict):
  200. """Broadcast status update to all connected clients."""
  201. disconnected = set()
  202. for websocket in active_status_connections:
  203. try:
  204. await websocket.send_json({
  205. "type": "status_update",
  206. "data": status
  207. })
  208. except WebSocketDisconnect:
  209. disconnected.add(websocket)
  210. except RuntimeError:
  211. disconnected.add(websocket)
  212. active_status_connections.difference_update(disconnected)
  213. @app.websocket("/ws/cache-progress")
  214. async def websocket_cache_progress_endpoint(websocket: WebSocket):
  215. from modules.core.cache_manager import get_cache_progress
  216. await websocket.accept()
  217. active_cache_progress_connections.add(websocket)
  218. try:
  219. while True:
  220. progress = get_cache_progress()
  221. try:
  222. await websocket.send_json({
  223. "type": "cache_progress",
  224. "data": progress
  225. })
  226. except RuntimeError as e:
  227. if "close message has been sent" in str(e):
  228. break
  229. raise
  230. await asyncio.sleep(1.0) # Update every 1 second (reduced frequency for better performance)
  231. except WebSocketDisconnect:
  232. pass
  233. finally:
  234. active_cache_progress_connections.discard(websocket)
  235. try:
  236. await websocket.close()
  237. except RuntimeError:
  238. pass
  239. # FastAPI routes
  240. @app.get("/")
  241. async def index(request: Request):
  242. return templates.TemplateResponse("index.html", {"request": request, "app_name": state.app_name})
  243. @app.get("/settings")
  244. async def settings(request: Request):
  245. return templates.TemplateResponse("settings.html", {"request": request, "app_name": state.app_name})
  246. @app.get("/api/auto_play-mode")
  247. async def get_auto_play_mode():
  248. """Get current auto_play mode settings."""
  249. return {
  250. "enabled": state.auto_play_enabled,
  251. "playlist": state.auto_play_playlist,
  252. "run_mode": state.auto_play_run_mode,
  253. "pause_time": state.auto_play_pause_time,
  254. "clear_pattern": state.auto_play_clear_pattern,
  255. "shuffle": state.auto_play_shuffle
  256. }
  257. @app.post("/api/auto_play-mode")
  258. async def set_auto_play_mode(request: auto_playModeRequest):
  259. """Update auto_play mode settings."""
  260. state.auto_play_enabled = request.enabled
  261. if request.playlist is not None:
  262. state.auto_play_playlist = request.playlist
  263. if request.run_mode is not None:
  264. state.auto_play_run_mode = request.run_mode
  265. if request.pause_time is not None:
  266. state.auto_play_pause_time = request.pause_time
  267. if request.clear_pattern is not None:
  268. state.auto_play_clear_pattern = request.clear_pattern
  269. if request.shuffle is not None:
  270. state.auto_play_shuffle = request.shuffle
  271. state.save()
  272. logger.info(f"auto_play mode {'enabled' if request.enabled else 'disabled'}, playlist: {request.playlist}")
  273. return {"success": True, "message": "auto_play mode settings updated"}
  274. @app.get("/api/scheduled-pause")
  275. async def get_scheduled_pause():
  276. """Get current Still Sands settings."""
  277. return {
  278. "enabled": state.scheduled_pause_enabled,
  279. "control_wled": state.scheduled_pause_control_wled,
  280. "time_slots": state.scheduled_pause_time_slots
  281. }
  282. @app.post("/api/scheduled-pause")
  283. async def set_scheduled_pause(request: ScheduledPauseRequest):
  284. """Update Still Sands settings."""
  285. try:
  286. # Validate time slots
  287. for i, slot in enumerate(request.time_slots):
  288. # Validate time format (HH:MM)
  289. try:
  290. start_time = datetime.strptime(slot.start_time, "%H:%M").time()
  291. end_time = datetime.strptime(slot.end_time, "%H:%M").time()
  292. except ValueError:
  293. raise HTTPException(
  294. status_code=400,
  295. detail=f"Invalid time format in slot {i+1}. Use HH:MM format."
  296. )
  297. # Validate days setting
  298. if slot.days not in ["daily", "weekdays", "weekends", "custom"]:
  299. raise HTTPException(
  300. status_code=400,
  301. detail=f"Invalid days setting in slot {i+1}. Must be 'daily', 'weekdays', 'weekends', or 'custom'."
  302. )
  303. # Validate custom days if applicable
  304. if slot.days == "custom":
  305. if not slot.custom_days or len(slot.custom_days) == 0:
  306. raise HTTPException(
  307. status_code=400,
  308. detail=f"Custom days must be specified for slot {i+1} when days is set to 'custom'."
  309. )
  310. valid_days = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
  311. for day in slot.custom_days:
  312. if day not in valid_days:
  313. raise HTTPException(
  314. status_code=400,
  315. detail=f"Invalid day '{day}' in slot {i+1}. Valid days are: {', '.join(valid_days)}"
  316. )
  317. # Update state
  318. state.scheduled_pause_enabled = request.enabled
  319. state.scheduled_pause_control_wled = request.control_wled
  320. state.scheduled_pause_time_slots = [slot.model_dump() for slot in request.time_slots]
  321. state.save()
  322. wled_msg = " (with WLED control)" if request.control_wled else ""
  323. logger.info(f"Still Sands {'enabled' if request.enabled else 'disabled'} with {len(request.time_slots)} time slots{wled_msg}")
  324. return {"success": True, "message": "Still Sands settings updated"}
  325. except HTTPException:
  326. raise
  327. except Exception as e:
  328. logger.error(f"Error updating Still Sands settings: {str(e)}")
  329. raise HTTPException(status_code=500, detail=f"Failed to update Still Sands settings: {str(e)}")
  330. @app.get("/list_serial_ports")
  331. async def list_ports():
  332. logger.debug("Listing available serial ports")
  333. return await asyncio.to_thread(connection_manager.list_serial_ports)
  334. @app.post("/connect")
  335. async def connect(request: ConnectRequest):
  336. if not request.port:
  337. state.conn = connection_manager.WebSocketConnection('ws://fluidnc.local:81')
  338. connection_manager.device_init()
  339. logger.info('Successfully connected to websocket ws://fluidnc.local:81')
  340. return {"success": True}
  341. try:
  342. state.conn = connection_manager.SerialConnection(request.port)
  343. connection_manager.device_init()
  344. logger.info(f'Successfully connected to serial port {request.port}')
  345. return {"success": True}
  346. except Exception as e:
  347. logger.error(f'Failed to connect to serial port {request.port}: {str(e)}')
  348. raise HTTPException(status_code=500, detail=str(e))
  349. @app.post("/disconnect")
  350. async def disconnect():
  351. try:
  352. state.conn.close()
  353. logger.info('Successfully disconnected from serial port')
  354. return {"success": True}
  355. except Exception as e:
  356. logger.error(f'Failed to disconnect serial: {str(e)}')
  357. raise HTTPException(status_code=500, detail=str(e))
  358. @app.post("/restart_connection")
  359. async def restart(request: ConnectRequest):
  360. if not request.port:
  361. logger.warning("Restart serial request received without port")
  362. raise HTTPException(status_code=400, detail="No port provided")
  363. try:
  364. logger.info(f"Restarting connection on port {request.port}")
  365. connection_manager.restart_connection()
  366. return {"success": True}
  367. except Exception as e:
  368. logger.error(f"Failed to restart serial on port {request.port}: {str(e)}")
  369. raise HTTPException(status_code=500, detail=str(e))
  370. @app.get("/list_theta_rho_files")
  371. async def list_theta_rho_files():
  372. logger.debug("Listing theta-rho files")
  373. # Run the blocking file system operation in a thread pool
  374. files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
  375. return sorted(files)
  376. @app.get("/list_theta_rho_files_with_metadata")
  377. async def list_theta_rho_files_with_metadata():
  378. """Get list of theta-rho files with metadata for sorting and filtering.
  379. Optimized to process files asynchronously and support request cancellation.
  380. """
  381. from modules.core.cache_manager import get_pattern_metadata
  382. import asyncio
  383. from concurrent.futures import ThreadPoolExecutor
  384. # Run the blocking file listing in a thread
  385. files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
  386. files_with_metadata = []
  387. # Use ThreadPoolExecutor for I/O-bound operations
  388. executor = ThreadPoolExecutor(max_workers=4)
  389. def process_file(file_path):
  390. """Process a single file and return its metadata."""
  391. try:
  392. full_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path)
  393. # Get file stats
  394. file_stat = os.stat(full_path)
  395. # Get cached metadata (this should be fast if cached)
  396. metadata = get_pattern_metadata(file_path)
  397. # Extract full folder path from file path
  398. path_parts = file_path.split('/')
  399. if len(path_parts) > 1:
  400. # Get everything except the filename (join all folder parts)
  401. category = '/'.join(path_parts[:-1])
  402. else:
  403. category = 'root'
  404. # Get file name without extension
  405. file_name = os.path.splitext(os.path.basename(file_path))[0]
  406. # Use modification time (mtime) for "date modified"
  407. date_modified = file_stat.st_mtime
  408. return {
  409. 'path': file_path,
  410. 'name': file_name,
  411. 'category': category,
  412. 'date_modified': date_modified,
  413. 'coordinates_count': metadata.get('total_coordinates', 0) if metadata else 0
  414. }
  415. except Exception as e:
  416. logger.warning(f"Error getting metadata for {file_path}: {str(e)}")
  417. # Include file with minimal info if metadata fails
  418. path_parts = file_path.split('/')
  419. if len(path_parts) > 1:
  420. category = '/'.join(path_parts[:-1])
  421. else:
  422. category = 'root'
  423. return {
  424. 'path': file_path,
  425. 'name': os.path.splitext(os.path.basename(file_path))[0],
  426. 'category': category,
  427. 'date_modified': 0,
  428. 'coordinates_count': 0
  429. }
  430. # Load the entire metadata cache at once (async)
  431. # This is much faster than 1000+ individual metadata lookups
  432. try:
  433. import json
  434. metadata_cache_path = "metadata_cache.json"
  435. # Use async file reading to avoid blocking the event loop
  436. cache_data = await asyncio.to_thread(lambda: json.load(open(metadata_cache_path, 'r')))
  437. cache_dict = cache_data.get('data', {})
  438. logger.debug(f"Loaded metadata cache with {len(cache_dict)} entries")
  439. # Process all files using cached data only
  440. for file_path in files:
  441. try:
  442. # Extract category from path
  443. path_parts = file_path.split('/')
  444. category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
  445. # Get file name without extension
  446. file_name = os.path.splitext(os.path.basename(file_path))[0]
  447. # Get metadata from cache
  448. cached_entry = cache_dict.get(file_path, {})
  449. if isinstance(cached_entry, dict) and 'metadata' in cached_entry:
  450. metadata = cached_entry['metadata']
  451. coords_count = metadata.get('total_coordinates', 0)
  452. date_modified = cached_entry.get('mtime', 0)
  453. else:
  454. coords_count = 0
  455. date_modified = 0
  456. files_with_metadata.append({
  457. 'path': file_path,
  458. 'name': file_name,
  459. 'category': category,
  460. 'date_modified': date_modified,
  461. 'coordinates_count': coords_count
  462. })
  463. except Exception as e:
  464. logger.warning(f"Error processing {file_path}: {e}")
  465. # Include file with minimal info if processing fails
  466. path_parts = file_path.split('/')
  467. category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
  468. files_with_metadata.append({
  469. 'path': file_path,
  470. 'name': os.path.splitext(os.path.basename(file_path))[0],
  471. 'category': category,
  472. 'date_modified': 0,
  473. 'coordinates_count': 0
  474. })
  475. except Exception as e:
  476. logger.error(f"Failed to load metadata cache, falling back to slow method: {e}")
  477. # Fallback to original method if cache loading fails
  478. # Create tasks only when needed
  479. loop = asyncio.get_event_loop()
  480. tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
  481. for task in asyncio.as_completed(tasks):
  482. try:
  483. result = await task
  484. files_with_metadata.append(result)
  485. except Exception as task_error:
  486. logger.error(f"Error processing file: {str(task_error)}")
  487. # Clean up executor
  488. executor.shutdown(wait=False)
  489. return files_with_metadata
  490. @app.post("/upload_theta_rho")
  491. async def upload_theta_rho(file: UploadFile = File(...)):
  492. """Upload a theta-rho file."""
  493. try:
  494. # Save the file
  495. # Ensure custom_patterns directory exists
  496. custom_patterns_dir = os.path.join(pattern_manager.THETA_RHO_DIR, "custom_patterns")
  497. os.makedirs(custom_patterns_dir, exist_ok=True)
  498. # Use forward slashes for internal path representation to maintain consistency
  499. file_path_in_patterns_dir = f"custom_patterns/{file.filename}"
  500. full_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path_in_patterns_dir)
  501. # Save the uploaded file with proper encoding for Windows compatibility
  502. file_content = await file.read()
  503. try:
  504. # First try to decode as UTF-8 and re-encode to ensure proper encoding
  505. text_content = file_content.decode('utf-8')
  506. with open(full_file_path, "w", encoding='utf-8') as f:
  507. f.write(text_content)
  508. except UnicodeDecodeError:
  509. # If UTF-8 decoding fails, save as binary (fallback)
  510. with open(full_file_path, "wb") as f:
  511. f.write(file_content)
  512. logger.info(f"File {file.filename} saved successfully")
  513. # Generate image preview for the new file with retry logic
  514. max_retries = 3
  515. for attempt in range(max_retries):
  516. try:
  517. logger.info(f"Generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}/{max_retries})")
  518. success = await generate_image_preview(file_path_in_patterns_dir)
  519. if success:
  520. logger.info(f"Preview generated successfully for {file_path_in_patterns_dir}")
  521. break
  522. else:
  523. logger.warning(f"Preview generation failed for {file_path_in_patterns_dir} (attempt {attempt + 1})")
  524. if attempt < max_retries - 1:
  525. await asyncio.sleep(0.5) # Small delay before retry
  526. except Exception as e:
  527. logger.error(f"Error generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}): {str(e)}")
  528. if attempt < max_retries - 1:
  529. await asyncio.sleep(0.5) # Small delay before retry
  530. return {"success": True, "message": f"File {file.filename} uploaded successfully"}
  531. except Exception as e:
  532. logger.error(f"Error uploading file: {str(e)}")
  533. raise HTTPException(status_code=500, detail=str(e))
  534. @app.post("/get_theta_rho_coordinates")
  535. async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
  536. """Get theta-rho coordinates for animated preview."""
  537. try:
  538. # Normalize file path for cross-platform compatibility and remove prefixes
  539. file_name = normalize_file_path(request.file_name)
  540. file_path = os.path.join(THETA_RHO_DIR, file_name)
  541. # Check file existence asynchronously
  542. exists = await asyncio.to_thread(os.path.exists, file_path)
  543. if not exists:
  544. raise HTTPException(status_code=404, detail=f"File {file_name} not found")
  545. # Parse the theta-rho file in a separate process for CPU-intensive work
  546. # This prevents blocking the motion control thread
  547. loop = asyncio.get_event_loop()
  548. coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, file_path)
  549. if not coordinates:
  550. raise HTTPException(status_code=400, detail="No valid coordinates found in file")
  551. return {
  552. "success": True,
  553. "coordinates": coordinates,
  554. "total_points": len(coordinates)
  555. }
  556. except Exception as e:
  557. logger.error(f"Error getting coordinates for {request.file_name}: {str(e)}")
  558. raise HTTPException(status_code=500, detail=str(e))
  559. @app.post("/run_theta_rho")
  560. async def run_theta_rho(request: ThetaRhoRequest, background_tasks: BackgroundTasks):
  561. if not request.file_name:
  562. logger.warning('Run theta-rho request received without file name')
  563. raise HTTPException(status_code=400, detail="No file name provided")
  564. file_path = None
  565. if 'clear' in request.file_name:
  566. logger.info(f'Clear pattern file: {request.file_name.split(".")[0]}')
  567. file_path = pattern_manager.get_clear_pattern_file(request.file_name.split('.')[0])
  568. logger.info(f'Clear pattern file: {file_path}')
  569. if not file_path:
  570. # Normalize file path for cross-platform compatibility
  571. normalized_file_name = normalize_file_path(request.file_name)
  572. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  573. if not os.path.exists(file_path):
  574. logger.error(f'Theta-rho file not found: {file_path}')
  575. raise HTTPException(status_code=404, detail="File not found")
  576. try:
  577. if not (state.conn.is_connected() if state.conn else False):
  578. logger.warning("Attempted to run a pattern without a connection")
  579. raise HTTPException(status_code=400, detail="Connection not established")
  580. if pattern_manager.pattern_lock.locked():
  581. logger.warning("Attempted to run a pattern while another is already running")
  582. raise HTTPException(status_code=409, detail="Another pattern is already running")
  583. files_to_run = [file_path]
  584. logger.info(f'Running theta-rho file: {request.file_name} with pre_execution={request.pre_execution}')
  585. # Only include clear_pattern if it's not "none"
  586. kwargs = {}
  587. if request.pre_execution != "none":
  588. kwargs['clear_pattern'] = request.pre_execution
  589. # Pass arguments properly
  590. background_tasks.add_task(
  591. pattern_manager.run_theta_rho_files,
  592. files_to_run, # First positional argument
  593. **kwargs # Spread keyword arguments
  594. )
  595. return {"success": True}
  596. except HTTPException as http_exc:
  597. logger.error(f'Failed to run theta-rho file {request.file_name}: {http_exc.detail}')
  598. raise http_exc
  599. except Exception as e:
  600. logger.error(f'Failed to run theta-rho file {request.file_name}: {str(e)}')
  601. raise HTTPException(status_code=500, detail=str(e))
  602. @app.post("/stop_execution")
  603. async def stop_execution():
  604. if not (state.conn.is_connected() if state.conn else False):
  605. logger.warning("Attempted to stop without a connection")
  606. raise HTTPException(status_code=400, detail="Connection not established")
  607. await pattern_manager.stop_actions()
  608. return {"success": True}
  609. @app.post("/send_home")
  610. async def send_home():
  611. try:
  612. if not (state.conn.is_connected() if state.conn else False):
  613. logger.warning("Attempted to move to home without a connection")
  614. raise HTTPException(status_code=400, detail="Connection not established")
  615. # Run homing with 15 second timeout
  616. success = await asyncio.to_thread(connection_manager.home)
  617. if not success:
  618. logger.error("Homing failed or timed out")
  619. raise HTTPException(status_code=500, detail="Homing failed or timed out after 15 seconds")
  620. return {"success": True}
  621. except HTTPException:
  622. raise
  623. except Exception as e:
  624. logger.error(f"Failed to send home command: {str(e)}")
  625. raise HTTPException(status_code=500, detail=str(e))
  626. @app.post("/run_theta_rho_file/{file_name}")
  627. async def run_specific_theta_rho_file(file_name: str):
  628. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
  629. if not os.path.exists(file_path):
  630. raise HTTPException(status_code=404, detail="File not found")
  631. if not (state.conn.is_connected() if state.conn else False):
  632. logger.warning("Attempted to run a pattern without a connection")
  633. raise HTTPException(status_code=400, detail="Connection not established")
  634. pattern_manager.run_theta_rho_file(file_path)
  635. return {"success": True}
  636. class DeleteFileRequest(BaseModel):
  637. file_name: str
  638. @app.post("/delete_theta_rho_file")
  639. async def delete_theta_rho_file(request: DeleteFileRequest):
  640. if not request.file_name:
  641. logger.warning("Delete theta-rho file request received without filename")
  642. raise HTTPException(status_code=400, detail="No file name provided")
  643. # Normalize file path for cross-platform compatibility
  644. normalized_file_name = normalize_file_path(request.file_name)
  645. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  646. # Check file existence asynchronously
  647. exists = await asyncio.to_thread(os.path.exists, file_path)
  648. if not exists:
  649. logger.error(f"Attempted to delete non-existent file: {file_path}")
  650. raise HTTPException(status_code=404, detail="File not found")
  651. try:
  652. # Delete the pattern file asynchronously
  653. await asyncio.to_thread(os.remove, file_path)
  654. logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
  655. # Clean up cached preview image and metadata asynchronously
  656. from modules.core.cache_manager import delete_pattern_cache
  657. cache_cleanup_success = await asyncio.to_thread(delete_pattern_cache, normalized_file_name)
  658. if cache_cleanup_success:
  659. logger.info(f"Successfully cleaned up cache for {request.file_name}")
  660. else:
  661. logger.warning(f"Cache cleanup failed for {request.file_name}, but pattern was deleted")
  662. return {"success": True, "cache_cleanup": cache_cleanup_success}
  663. except Exception as e:
  664. logger.error(f"Failed to delete theta-rho file {request.file_name}: {str(e)}")
  665. raise HTTPException(status_code=500, detail=str(e))
  666. @app.post("/move_to_center")
  667. async def move_to_center():
  668. try:
  669. if not (state.conn.is_connected() if state.conn else False):
  670. logger.warning("Attempted to move to center without a connection")
  671. raise HTTPException(status_code=400, detail="Connection not established")
  672. logger.info("Moving device to center position")
  673. await pattern_manager.reset_theta()
  674. await pattern_manager.move_polar(0, 0)
  675. return {"success": True}
  676. except Exception as e:
  677. logger.error(f"Failed to move to center: {str(e)}")
  678. raise HTTPException(status_code=500, detail=str(e))
  679. @app.post("/move_to_perimeter")
  680. async def move_to_perimeter():
  681. try:
  682. if not (state.conn.is_connected() if state.conn else False):
  683. logger.warning("Attempted to move to perimeter without a connection")
  684. raise HTTPException(status_code=400, detail="Connection not established")
  685. await pattern_manager.reset_theta()
  686. await pattern_manager.move_polar(0, 1)
  687. return {"success": True}
  688. except Exception as e:
  689. logger.error(f"Failed to move to perimeter: {str(e)}")
  690. raise HTTPException(status_code=500, detail=str(e))
  691. @app.post("/preview_thr")
  692. async def preview_thr(request: DeleteFileRequest):
  693. if not request.file_name:
  694. logger.warning("Preview theta-rho request received without filename")
  695. raise HTTPException(status_code=400, detail="No file name provided")
  696. # Normalize file path for cross-platform compatibility
  697. normalized_file_name = normalize_file_path(request.file_name)
  698. # Construct the full path to the pattern file to check existence
  699. pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  700. # Check file existence asynchronously
  701. exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
  702. if not exists:
  703. logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
  704. raise HTTPException(status_code=404, detail="Pattern file not found")
  705. try:
  706. cache_path = get_cache_path(normalized_file_name)
  707. # Check cache existence asynchronously
  708. cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
  709. if not cache_exists:
  710. logger.info(f"Cache miss for {request.file_name}. Generating preview...")
  711. # Attempt to generate the preview if it's missing
  712. success = await generate_image_preview(normalized_file_name)
  713. cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
  714. if not success or not cache_exists_after:
  715. logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
  716. raise HTTPException(status_code=500, detail="Failed to generate preview image.")
  717. # Try to get coordinates from metadata cache first
  718. metadata = get_pattern_metadata(normalized_file_name)
  719. if metadata:
  720. first_coord_obj = metadata.get('first_coordinate')
  721. last_coord_obj = metadata.get('last_coordinate')
  722. else:
  723. # Fallback to parsing file if metadata not cached (shouldn't happen after initial cache)
  724. logger.debug(f"Metadata cache miss for {request.file_name}, parsing file")
  725. coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
  726. first_coord = coordinates[0] if coordinates else None
  727. last_coord = coordinates[-1] if coordinates else None
  728. # Format coordinates as objects with x and y properties
  729. first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
  730. last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
  731. # Return JSON with preview URL and coordinates
  732. # URL encode the file_name for the preview URL
  733. # Handle both forward slashes and backslashes for cross-platform compatibility
  734. encoded_filename = normalized_file_name.replace('\\', '--').replace('/', '--')
  735. return {
  736. "preview_url": f"/preview/{encoded_filename}",
  737. "first_coordinate": first_coord_obj,
  738. "last_coordinate": last_coord_obj
  739. }
  740. except HTTPException:
  741. raise
  742. except Exception as e:
  743. logger.error(f"Failed to generate or serve preview for {request.file_name}: {str(e)}")
  744. raise HTTPException(status_code=500, detail=f"Failed to serve preview image: {str(e)}")
  745. @app.get("/preview/{encoded_filename}")
  746. async def serve_preview(encoded_filename: str):
  747. """Serve a preview image for a pattern file."""
  748. # Decode the filename by replacing -- with the original path separators
  749. # First try forward slash (most common case), then backslash if needed
  750. file_name = encoded_filename.replace('--', '/')
  751. # Apply normalization to handle any remaining path prefixes
  752. file_name = normalize_file_path(file_name)
  753. # Check if the decoded path exists, if not try backslash decoding
  754. cache_path = get_cache_path(file_name)
  755. if not os.path.exists(cache_path):
  756. # Try with backslash for Windows paths
  757. file_name_backslash = encoded_filename.replace('--', '\\')
  758. file_name_backslash = normalize_file_path(file_name_backslash)
  759. cache_path_backslash = get_cache_path(file_name_backslash)
  760. if os.path.exists(cache_path_backslash):
  761. file_name = file_name_backslash
  762. cache_path = cache_path_backslash
  763. # cache_path is already determined above in the decoding logic
  764. if not os.path.exists(cache_path):
  765. logger.error(f"Preview image not found for {file_name}")
  766. raise HTTPException(status_code=404, detail="Preview image not found")
  767. # Add caching headers
  768. headers = {
  769. "Cache-Control": "public, max-age=31536000", # Cache for 1 year
  770. "Content-Type": "image/webp",
  771. "Accept-Ranges": "bytes"
  772. }
  773. return FileResponse(
  774. cache_path,
  775. media_type="image/webp",
  776. headers=headers
  777. )
  778. @app.post("/send_coordinate")
  779. async def send_coordinate(request: CoordinateRequest):
  780. if not (state.conn.is_connected() if state.conn else False):
  781. logger.warning("Attempted to send coordinate without a connection")
  782. raise HTTPException(status_code=400, detail="Connection not established")
  783. try:
  784. logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
  785. await pattern_manager.move_polar(request.theta, request.rho)
  786. return {"success": True}
  787. except Exception as e:
  788. logger.error(f"Failed to send coordinate: {str(e)}")
  789. raise HTTPException(status_code=500, detail=str(e))
  790. @app.get("/download/{filename}")
  791. async def download_file(filename: str):
  792. return FileResponse(
  793. os.path.join(pattern_manager.THETA_RHO_DIR, filename),
  794. filename=filename
  795. )
  796. @app.get("/serial_status")
  797. async def serial_status():
  798. connected = state.conn.is_connected() if state.conn else False
  799. port = state.port
  800. logger.debug(f"Serial status check - connected: {connected}, port: {port}")
  801. return {
  802. "connected": connected,
  803. "port": port
  804. }
  805. @app.post("/pause_execution")
  806. async def pause_execution():
  807. if pattern_manager.pause_execution():
  808. return {"success": True, "message": "Execution paused"}
  809. raise HTTPException(status_code=500, detail="Failed to pause execution")
  810. @app.post("/resume_execution")
  811. async def resume_execution():
  812. if pattern_manager.resume_execution():
  813. return {"success": True, "message": "Execution resumed"}
  814. raise HTTPException(status_code=500, detail="Failed to resume execution")
  815. # Playlist endpoints
  816. @app.get("/list_all_playlists")
  817. async def list_all_playlists():
  818. playlist_names = playlist_manager.list_all_playlists()
  819. return playlist_names
  820. @app.get("/get_playlist")
  821. async def get_playlist(name: str):
  822. if not name:
  823. raise HTTPException(status_code=400, detail="Missing playlist name parameter")
  824. playlist = playlist_manager.get_playlist(name)
  825. if not playlist:
  826. raise HTTPException(status_code=404, detail=f"Playlist '{name}' not found")
  827. return playlist
  828. @app.post("/create_playlist")
  829. async def create_playlist(request: PlaylistRequest):
  830. success = playlist_manager.create_playlist(request.playlist_name, request.files)
  831. return {
  832. "success": success,
  833. "message": f"Playlist '{request.playlist_name}' created/updated"
  834. }
  835. @app.post("/modify_playlist")
  836. async def modify_playlist(request: PlaylistRequest):
  837. success = playlist_manager.modify_playlist(request.playlist_name, request.files)
  838. return {
  839. "success": success,
  840. "message": f"Playlist '{request.playlist_name}' updated"
  841. }
  842. @app.delete("/delete_playlist")
  843. async def delete_playlist(request: DeletePlaylistRequest):
  844. success = playlist_manager.delete_playlist(request.playlist_name)
  845. if not success:
  846. raise HTTPException(
  847. status_code=404,
  848. detail=f"Playlist '{request.playlist_name}' not found"
  849. )
  850. return {
  851. "success": True,
  852. "message": f"Playlist '{request.playlist_name}' deleted"
  853. }
  854. class AddToPlaylistRequest(BaseModel):
  855. playlist_name: str
  856. pattern: str
  857. @app.post("/add_to_playlist")
  858. async def add_to_playlist(request: AddToPlaylistRequest):
  859. success = playlist_manager.add_to_playlist(request.playlist_name, request.pattern)
  860. if not success:
  861. raise HTTPException(status_code=404, detail="Playlist not found")
  862. return {"success": True}
  863. @app.post("/run_playlist")
  864. async def run_playlist_endpoint(request: PlaylistRequest):
  865. """Run a playlist with specified parameters."""
  866. try:
  867. if not (state.conn.is_connected() if state.conn else False):
  868. logger.warning("Attempted to run a playlist without a connection")
  869. raise HTTPException(status_code=400, detail="Connection not established")
  870. if not os.path.exists(playlist_manager.PLAYLISTS_FILE):
  871. raise HTTPException(status_code=404, detail=f"Playlist '{request.playlist_name}' not found")
  872. # Start the playlist execution
  873. success, message = await playlist_manager.run_playlist(
  874. request.playlist_name,
  875. pause_time=request.pause_time,
  876. clear_pattern=request.clear_pattern,
  877. run_mode=request.run_mode,
  878. shuffle=request.shuffle
  879. )
  880. if not success:
  881. raise HTTPException(status_code=409, detail=message)
  882. return {"message": f"Started playlist: {request.playlist_name}"}
  883. except Exception as e:
  884. logger.error(f"Error running playlist: {e}")
  885. raise HTTPException(status_code=500, detail=str(e))
  886. @app.post("/set_speed")
  887. async def set_speed(request: SpeedRequest):
  888. try:
  889. if not (state.conn.is_connected() if state.conn else False):
  890. logger.warning("Attempted to change speed without a connection")
  891. raise HTTPException(status_code=400, detail="Connection not established")
  892. if request.speed <= 0:
  893. logger.warning(f"Invalid speed value received: {request.speed}")
  894. raise HTTPException(status_code=400, detail="Invalid speed value")
  895. state.speed = request.speed
  896. return {"success": True, "speed": request.speed}
  897. except Exception as e:
  898. logger.error(f"Failed to set speed: {str(e)}")
  899. raise HTTPException(status_code=500, detail=str(e))
  900. @app.get("/check_software_update")
  901. async def check_updates():
  902. update_info = update_manager.check_git_updates()
  903. return update_info
  904. @app.post("/update_software")
  905. async def update_software():
  906. logger.info("Starting software update process")
  907. success, error_message, error_log = update_manager.update_software()
  908. if success:
  909. logger.info("Software update completed successfully")
  910. return {"success": True}
  911. else:
  912. logger.error(f"Software update failed: {error_message}\nDetails: {error_log}")
  913. raise HTTPException(
  914. status_code=500,
  915. detail={
  916. "error": error_message,
  917. "details": error_log
  918. }
  919. )
  920. @app.post("/set_wled_ip")
  921. async def set_wled_ip(request: WLEDRequest):
  922. state.wled_ip = request.wled_ip
  923. state.led_controller = LEDController(request.wled_ip)
  924. effect_idle(state.led_controller)
  925. state.save()
  926. logger.info(f"WLED IP updated: {request.wled_ip}")
  927. return {"success": True, "wled_ip": state.wled_ip}
  928. @app.get("/get_wled_ip")
  929. async def get_wled_ip():
  930. if not state.wled_ip:
  931. raise HTTPException(status_code=404, detail="No WLED IP set")
  932. return {"success": True, "wled_ip": state.wled_ip}
  933. @app.post("/skip_pattern")
  934. async def skip_pattern():
  935. if not state.current_playlist:
  936. raise HTTPException(status_code=400, detail="No playlist is currently running")
  937. state.skip_requested = True
  938. return {"success": True}
  939. @app.get("/api/custom_clear_patterns")
  940. async def get_custom_clear_patterns():
  941. """Get the currently configured custom clear patterns."""
  942. return {
  943. "success": True,
  944. "custom_clear_from_in": state.custom_clear_from_in,
  945. "custom_clear_from_out": state.custom_clear_from_out
  946. }
  947. @app.post("/api/custom_clear_patterns")
  948. async def set_custom_clear_patterns(request: dict):
  949. """Set custom clear patterns for clear_from_in and clear_from_out."""
  950. try:
  951. # Validate that the patterns exist if they're provided
  952. if "custom_clear_from_in" in request and request["custom_clear_from_in"]:
  953. pattern_path = os.path.join(pattern_manager.THETA_RHO_DIR, request["custom_clear_from_in"])
  954. if not os.path.exists(pattern_path):
  955. raise HTTPException(status_code=400, detail=f"Pattern file not found: {request['custom_clear_from_in']}")
  956. state.custom_clear_from_in = request["custom_clear_from_in"]
  957. elif "custom_clear_from_in" in request:
  958. state.custom_clear_from_in = None
  959. if "custom_clear_from_out" in request and request["custom_clear_from_out"]:
  960. pattern_path = os.path.join(pattern_manager.THETA_RHO_DIR, request["custom_clear_from_out"])
  961. if not os.path.exists(pattern_path):
  962. raise HTTPException(status_code=400, detail=f"Pattern file not found: {request['custom_clear_from_out']}")
  963. state.custom_clear_from_out = request["custom_clear_from_out"]
  964. elif "custom_clear_from_out" in request:
  965. state.custom_clear_from_out = None
  966. state.save()
  967. logger.info(f"Custom clear patterns updated - in: {state.custom_clear_from_in}, out: {state.custom_clear_from_out}")
  968. return {
  969. "success": True,
  970. "custom_clear_from_in": state.custom_clear_from_in,
  971. "custom_clear_from_out": state.custom_clear_from_out
  972. }
  973. except Exception as e:
  974. logger.error(f"Failed to set custom clear patterns: {str(e)}")
  975. raise HTTPException(status_code=500, detail=str(e))
  976. @app.get("/api/clear_pattern_speed")
  977. async def get_clear_pattern_speed():
  978. """Get the current clearing pattern speed setting."""
  979. return {
  980. "success": True,
  981. "clear_pattern_speed": state.clear_pattern_speed,
  982. "effective_speed": state.clear_pattern_speed if state.clear_pattern_speed is not None else state.speed
  983. }
  984. @app.post("/api/clear_pattern_speed")
  985. async def set_clear_pattern_speed(request: dict):
  986. """Set the clearing pattern speed."""
  987. try:
  988. # If speed is None or "none", use default behavior (state.speed)
  989. speed_value = request.get("clear_pattern_speed")
  990. if speed_value is None or speed_value == "none" or speed_value == "":
  991. speed = None
  992. else:
  993. speed = int(speed_value)
  994. # Validate speed range (same as regular speed limits) only if speed is not None
  995. if speed is not None and not (50 <= speed <= 2000):
  996. raise HTTPException(status_code=400, detail="Speed must be between 50 and 2000")
  997. state.clear_pattern_speed = speed
  998. state.save()
  999. logger.info(f"Clear pattern speed set to {speed if speed is not None else 'default (state.speed)'}")
  1000. return {
  1001. "success": True,
  1002. "clear_pattern_speed": state.clear_pattern_speed,
  1003. "effective_speed": state.clear_pattern_speed if state.clear_pattern_speed is not None else state.speed
  1004. }
  1005. except ValueError:
  1006. raise HTTPException(status_code=400, detail="Invalid speed value")
  1007. except Exception as e:
  1008. logger.error(f"Failed to set clear pattern speed: {str(e)}")
  1009. raise HTTPException(status_code=500, detail=str(e))
  1010. @app.get("/api/app-name")
  1011. async def get_app_name():
  1012. """Get current application name."""
  1013. return {"app_name": state.app_name}
  1014. @app.post("/api/app-name")
  1015. async def set_app_name(request: dict):
  1016. """Update application name."""
  1017. app_name = request.get("app_name", "").strip()
  1018. if not app_name:
  1019. app_name = "Dune Weaver" # Reset to default if empty
  1020. state.app_name = app_name
  1021. state.save()
  1022. logger.info(f"Application name updated to: {app_name}")
  1023. return {"success": True, "app_name": app_name}
  1024. @app.post("/preview_thr_batch")
  1025. async def preview_thr_batch(request: dict):
  1026. start = time.time()
  1027. if not request.get("file_names"):
  1028. logger.warning("Batch preview request received without filenames")
  1029. raise HTTPException(status_code=400, detail="No file names provided")
  1030. file_names = request["file_names"]
  1031. if not isinstance(file_names, list):
  1032. raise HTTPException(status_code=400, detail="file_names must be a list")
  1033. headers = {
  1034. "Cache-Control": "public, max-age=3600", # Cache for 1 hour
  1035. "Content-Type": "application/json"
  1036. }
  1037. async def process_single_file(file_name):
  1038. """Process a single file and return its preview data."""
  1039. t1 = time.time()
  1040. try:
  1041. # Normalize file path for cross-platform compatibility
  1042. normalized_file_name = normalize_file_path(file_name)
  1043. pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  1044. # Check file existence asynchronously
  1045. exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
  1046. if not exists:
  1047. logger.warning(f"Pattern file not found: {pattern_file_path}")
  1048. return file_name, {"error": "Pattern file not found"}
  1049. cache_path = get_cache_path(normalized_file_name)
  1050. # Check cache existence asynchronously
  1051. cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
  1052. if not cache_exists:
  1053. logger.info(f"Cache miss for {file_name}. Generating preview...")
  1054. success = await generate_image_preview(normalized_file_name)
  1055. cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
  1056. if not success or not cache_exists_after:
  1057. logger.error(f"Failed to generate or find preview for {file_name}")
  1058. return file_name, {"error": "Failed to generate preview"}
  1059. metadata = get_pattern_metadata(normalized_file_name)
  1060. if metadata:
  1061. first_coord_obj = metadata.get('first_coordinate')
  1062. last_coord_obj = metadata.get('last_coordinate')
  1063. else:
  1064. logger.debug(f"Metadata cache miss for {file_name}, parsing file")
  1065. # Use process pool for CPU-intensive parsing
  1066. loop = asyncio.get_event_loop()
  1067. coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, pattern_file_path)
  1068. first_coord = coordinates[0] if coordinates else None
  1069. last_coord = coordinates[-1] if coordinates else None
  1070. first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
  1071. last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
  1072. # Read image file asynchronously
  1073. image_data = await asyncio.to_thread(lambda: open(cache_path, 'rb').read())
  1074. image_b64 = base64.b64encode(image_data).decode('utf-8')
  1075. result = {
  1076. "image_data": f"data:image/webp;base64,{image_b64}",
  1077. "first_coordinate": first_coord_obj,
  1078. "last_coordinate": last_coord_obj
  1079. }
  1080. logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
  1081. return file_name, result
  1082. except Exception as e:
  1083. logger.error(f"Error processing {file_name}: {str(e)}")
  1084. return file_name, {"error": str(e)}
  1085. # Process all files concurrently
  1086. tasks = [process_single_file(file_name) for file_name in file_names]
  1087. file_results = await asyncio.gather(*tasks)
  1088. # Convert results to dictionary
  1089. results = dict(file_results)
  1090. logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
  1091. return JSONResponse(content=results, headers=headers)
  1092. @app.get("/playlists")
  1093. async def playlists(request: Request):
  1094. logger.debug("Rendering playlists page")
  1095. return templates.TemplateResponse("playlists.html", {"request": request, "app_name": state.app_name})
  1096. @app.get("/image2sand")
  1097. async def image2sand(request: Request):
  1098. return templates.TemplateResponse("image2sand.html", {"request": request, "app_name": state.app_name})
  1099. @app.get("/wled")
  1100. async def wled(request: Request):
  1101. return templates.TemplateResponse("wled.html", {"request": request, "app_name": state.app_name})
  1102. @app.get("/table_control")
  1103. async def table_control(request: Request):
  1104. return templates.TemplateResponse("table_control.html", {"request": request, "app_name": state.app_name})
  1105. @app.get("/cache-progress")
  1106. async def get_cache_progress_endpoint():
  1107. """Get the current cache generation progress."""
  1108. from modules.core.cache_manager import get_cache_progress
  1109. return get_cache_progress()
  1110. @app.post("/rebuild_cache")
  1111. async def rebuild_cache_endpoint():
  1112. """Trigger a rebuild of the pattern cache."""
  1113. try:
  1114. from modules.core.cache_manager import rebuild_cache
  1115. await rebuild_cache()
  1116. return {"success": True, "message": "Cache rebuild completed successfully"}
  1117. except Exception as e:
  1118. logger.error(f"Failed to rebuild cache: {str(e)}")
  1119. raise HTTPException(status_code=500, detail=str(e))
  1120. def signal_handler(signum, frame):
  1121. """Handle shutdown signals gracefully but forcefully."""
  1122. logger.info("Received shutdown signal, cleaning up...")
  1123. try:
  1124. if state.led_controller:
  1125. state.led_controller.set_power(0)
  1126. # Run cleanup operations - need to handle async in sync context
  1127. try:
  1128. # Try to run in existing loop if available
  1129. import asyncio
  1130. loop = asyncio.get_running_loop()
  1131. # If we're in an event loop, schedule the coroutine
  1132. import concurrent.futures
  1133. with concurrent.futures.ThreadPoolExecutor() as executor:
  1134. future = executor.submit(asyncio.run, pattern_manager.stop_actions())
  1135. future.result(timeout=5.0) # Wait up to 5 seconds
  1136. except RuntimeError:
  1137. # No running loop, create a new one
  1138. import asyncio
  1139. asyncio.run(pattern_manager.stop_actions())
  1140. except Exception as cleanup_err:
  1141. logger.error(f"Error in async cleanup: {cleanup_err}")
  1142. state.save()
  1143. logger.info("Cleanup completed")
  1144. except Exception as e:
  1145. logger.error(f"Error during cleanup: {str(e)}")
  1146. finally:
  1147. logger.info("Exiting application...")
  1148. os._exit(0) # Force exit regardless of other threads
  1149. @app.get("/api/version")
  1150. async def get_version_info():
  1151. """Get current and latest version information"""
  1152. try:
  1153. version_info = await version_manager.get_version_info()
  1154. return JSONResponse(content=version_info)
  1155. except Exception as e:
  1156. logger.error(f"Error getting version info: {e}")
  1157. return JSONResponse(
  1158. content={
  1159. "current": version_manager.get_current_version(),
  1160. "latest": version_manager.get_current_version(),
  1161. "update_available": False,
  1162. "error": "Unable to check for updates"
  1163. },
  1164. status_code=200
  1165. )
  1166. @app.post("/api/update")
  1167. async def trigger_update():
  1168. """Trigger software update (placeholder for future implementation)"""
  1169. try:
  1170. # For now, just return the GitHub release URL
  1171. version_info = await version_manager.get_version_info()
  1172. if version_info.get("latest_release"):
  1173. return JSONResponse(content={
  1174. "success": False,
  1175. "message": "Automatic updates not implemented yet",
  1176. "manual_update_url": version_info["latest_release"].get("html_url"),
  1177. "instructions": "Please visit the GitHub release page to download and install the update manually"
  1178. })
  1179. else:
  1180. return JSONResponse(content={
  1181. "success": False,
  1182. "message": "No updates available"
  1183. })
  1184. except Exception as e:
  1185. logger.error(f"Error triggering update: {e}")
  1186. return JSONResponse(
  1187. content={"success": False, "message": "Failed to check for updates"},
  1188. status_code=500
  1189. )
  1190. def entrypoint():
  1191. import uvicorn
  1192. logger.info("Starting FastAPI server on port 8080...")
  1193. uvicorn.run(app, host="0.0.0.0", port=8080, workers=1) # Set workers to 1 to avoid multiple signal handlers
  1194. if __name__ == "__main__":
  1195. entrypoint()