main.py 58 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388
  1. from fastapi import FastAPI, UploadFile, File, HTTPException, BackgroundTasks, WebSocket, WebSocketDisconnect, Request
  2. from fastapi.responses import JSONResponse, FileResponse, Response
  3. from fastapi.staticfiles import StaticFiles
  4. from fastapi.templating import Jinja2Templates
  5. from pydantic import BaseModel
  6. from typing import List, Optional, Tuple, Dict, Any, Union
  7. import atexit
  8. import os
  9. import logging
  10. from datetime import datetime, time
  11. from modules.connection import connection_manager
  12. from modules.core import pattern_manager
  13. from modules.core.pattern_manager import parse_theta_rho_file, THETA_RHO_DIR
  14. from modules.core import playlist_manager
  15. from modules.update import update_manager
  16. from modules.core.state import state
  17. from modules import mqtt
  18. import signal
  19. import sys
  20. import asyncio
  21. from contextlib import asynccontextmanager
  22. from modules.led.led_controller import LEDController, effect_idle
  23. import math
  24. from modules.core.cache_manager import generate_all_image_previews, get_cache_path, generate_image_preview, get_pattern_metadata
  25. from modules.core.version_manager import version_manager
  26. import json
  27. import base64
  28. import time
  29. import argparse
  30. from concurrent.futures import ProcessPoolExecutor
  31. import multiprocessing
  32. # Get log level from environment variable, default to INFO
  33. log_level_str = os.getenv('LOG_LEVEL', 'INFO').upper()
  34. log_level = getattr(logging, log_level_str, logging.INFO)
  35. # Create a process pool for CPU-intensive tasks
  36. # Limit to reasonable number of workers for embedded systems
  37. cpu_count = multiprocessing.cpu_count()
  38. # Maximum 3 workers (leaving 1 for motion), minimum 1
  39. process_pool_size = min(3, max(1, cpu_count - 1))
  40. process_pool = None # Will be initialized in lifespan
  41. logging.basicConfig(
  42. level=log_level,
  43. format='%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s',
  44. handlers=[
  45. logging.StreamHandler(),
  46. ]
  47. )
  48. logger = logging.getLogger(__name__)
  49. def normalize_file_path(file_path: str) -> str:
  50. """Normalize file path separators for consistent cross-platform handling."""
  51. if not file_path:
  52. return ''
  53. # First normalize path separators
  54. normalized = file_path.replace('\\', '/')
  55. # Remove only the patterns directory prefix from the beginning, not patterns within the path
  56. if normalized.startswith('./patterns/'):
  57. normalized = normalized[11:]
  58. elif normalized.startswith('patterns/'):
  59. normalized = normalized[9:]
  60. return normalized
  61. @asynccontextmanager
  62. async def lifespan(app: FastAPI):
  63. # Startup
  64. logger.info("Starting Dune Weaver application...")
  65. # Register signal handlers
  66. signal.signal(signal.SIGINT, signal_handler)
  67. signal.signal(signal.SIGTERM, signal_handler)
  68. # Initialize process pool for CPU-intensive tasks
  69. global process_pool
  70. process_pool = ProcessPoolExecutor(max_workers=process_pool_size)
  71. logger.info(f"Initialized process pool with {process_pool_size} workers (detected {cpu_count} cores total)")
  72. try:
  73. connection_manager.connect_device()
  74. except Exception as e:
  75. logger.warning(f"Failed to auto-connect to serial port: {str(e)}")
  76. # Check if auto_play mode is enabled and auto-play playlist (right after connection attempt)
  77. if state.auto_play_enabled and state.auto_play_playlist:
  78. logger.info(f"auto_play mode enabled, checking for connection before auto-playing playlist: {state.auto_play_playlist}")
  79. try:
  80. # Check if we have a valid connection before starting playlist
  81. if state.conn and hasattr(state.conn, 'is_connected') and state.conn.is_connected():
  82. logger.info(f"Connection available, starting auto-play playlist: {state.auto_play_playlist} with options: run_mode={state.auto_play_run_mode}, pause_time={state.auto_play_pause_time}, clear_pattern={state.auto_play_clear_pattern}, shuffle={state.auto_play_shuffle}")
  83. asyncio.create_task(playlist_manager.run_playlist(
  84. state.auto_play_playlist,
  85. pause_time=state.auto_play_pause_time,
  86. clear_pattern=state.auto_play_clear_pattern,
  87. run_mode=state.auto_play_run_mode,
  88. shuffle=state.auto_play_shuffle
  89. ))
  90. else:
  91. logger.warning("No hardware connection available, skipping auto_play mode auto-play")
  92. except Exception as e:
  93. logger.error(f"Failed to auto-play auto_play playlist: {str(e)}")
  94. try:
  95. mqtt_handler = mqtt.init_mqtt()
  96. except Exception as e:
  97. logger.warning(f"Failed to initialize MQTT: {str(e)}")
  98. # Schedule cache generation check for later (non-blocking startup)
  99. async def delayed_cache_check():
  100. """Check and generate cache in background."""
  101. try:
  102. logger.info("Starting cache check...")
  103. from modules.core.cache_manager import is_cache_generation_needed_async, generate_cache_background
  104. if await is_cache_generation_needed_async():
  105. logger.info("Cache generation needed, starting background task...")
  106. asyncio.create_task(generate_cache_background()) # Don't await - run in background
  107. else:
  108. logger.info("Cache is up to date, skipping generation")
  109. except Exception as e:
  110. logger.warning(f"Failed during cache generation: {str(e)}")
  111. # Start cache check in background immediately
  112. asyncio.create_task(delayed_cache_check())
  113. yield # This separates startup from shutdown code
  114. # Shutdown
  115. logger.info("Shutting down Dune Weaver application...")
  116. # Shutdown process pool
  117. if process_pool:
  118. process_pool.shutdown(wait=True)
  119. logger.info("Process pool shutdown complete")
  120. app = FastAPI(lifespan=lifespan)
  121. templates = Jinja2Templates(directory="templates")
  122. app.mount("/static", StaticFiles(directory="static"), name="static")
  123. # Pydantic models for request/response validation
  124. class ConnectRequest(BaseModel):
  125. port: Optional[str] = None
  126. class auto_playModeRequest(BaseModel):
  127. enabled: bool
  128. playlist: Optional[str] = None
  129. run_mode: Optional[str] = "loop"
  130. pause_time: Optional[float] = 5.0
  131. clear_pattern: Optional[str] = "adaptive"
  132. shuffle: Optional[bool] = False
  133. class TimeSlot(BaseModel):
  134. start_time: str # HH:MM format
  135. end_time: str # HH:MM format
  136. days: str # "daily", "weekdays", "weekends", or "custom"
  137. custom_days: Optional[List[str]] = [] # ["monday", "tuesday", etc.]
  138. class ScheduledPauseRequest(BaseModel):
  139. enabled: bool
  140. time_slots: List[TimeSlot] = []
  141. class CoordinateRequest(BaseModel):
  142. theta: float
  143. rho: float
  144. class PlaylistRequest(BaseModel):
  145. playlist_name: str
  146. files: List[str] = []
  147. pause_time: float = 0
  148. clear_pattern: Optional[str] = None
  149. run_mode: str = "single"
  150. shuffle: bool = False
  151. class PlaylistRunRequest(BaseModel):
  152. playlist_name: str
  153. pause_time: Optional[float] = 0
  154. clear_pattern: Optional[str] = None
  155. run_mode: Optional[str] = "single"
  156. shuffle: Optional[bool] = False
  157. start_time: Optional[str] = None
  158. end_time: Optional[str] = None
  159. class SpeedRequest(BaseModel):
  160. speed: float
  161. class WLEDRequest(BaseModel):
  162. wled_ip: Optional[str] = None
  163. class DeletePlaylistRequest(BaseModel):
  164. playlist_name: str
  165. class ThetaRhoRequest(BaseModel):
  166. file_name: str
  167. pre_execution: Optional[str] = "none"
  168. class GetCoordinatesRequest(BaseModel):
  169. file_name: str
  170. # Store active WebSocket connections
  171. active_status_connections = set()
  172. active_cache_progress_connections = set()
  173. @app.websocket("/ws/status")
  174. async def websocket_status_endpoint(websocket: WebSocket):
  175. await websocket.accept()
  176. active_status_connections.add(websocket)
  177. try:
  178. while True:
  179. status = pattern_manager.get_status()
  180. try:
  181. await websocket.send_json({
  182. "type": "status_update",
  183. "data": status
  184. })
  185. except RuntimeError as e:
  186. if "close message has been sent" in str(e):
  187. break
  188. raise
  189. await asyncio.sleep(1)
  190. except WebSocketDisconnect:
  191. pass
  192. finally:
  193. active_status_connections.discard(websocket)
  194. try:
  195. await websocket.close()
  196. except RuntimeError:
  197. pass
  198. async def broadcast_status_update(status: dict):
  199. """Broadcast status update to all connected clients."""
  200. disconnected = set()
  201. for websocket in active_status_connections:
  202. try:
  203. await websocket.send_json({
  204. "type": "status_update",
  205. "data": status
  206. })
  207. except WebSocketDisconnect:
  208. disconnected.add(websocket)
  209. except RuntimeError:
  210. disconnected.add(websocket)
  211. active_status_connections.difference_update(disconnected)
  212. @app.websocket("/ws/cache-progress")
  213. async def websocket_cache_progress_endpoint(websocket: WebSocket):
  214. from modules.core.cache_manager import get_cache_progress
  215. await websocket.accept()
  216. active_cache_progress_connections.add(websocket)
  217. try:
  218. while True:
  219. progress = get_cache_progress()
  220. try:
  221. await websocket.send_json({
  222. "type": "cache_progress",
  223. "data": progress
  224. })
  225. except RuntimeError as e:
  226. if "close message has been sent" in str(e):
  227. break
  228. raise
  229. await asyncio.sleep(1.0) # Update every 1 second (reduced frequency for better performance)
  230. except WebSocketDisconnect:
  231. pass
  232. finally:
  233. active_cache_progress_connections.discard(websocket)
  234. try:
  235. await websocket.close()
  236. except RuntimeError:
  237. pass
  238. # FastAPI routes
  239. @app.get("/")
  240. async def index(request: Request):
  241. return templates.TemplateResponse("index.html", {"request": request, "app_name": state.app_name})
  242. @app.get("/settings")
  243. async def settings(request: Request):
  244. return templates.TemplateResponse("settings.html", {"request": request, "app_name": state.app_name})
  245. @app.get("/api/auto_play-mode")
  246. async def get_auto_play_mode():
  247. """Get current auto_play mode settings."""
  248. return {
  249. "enabled": state.auto_play_enabled,
  250. "playlist": state.auto_play_playlist,
  251. "run_mode": state.auto_play_run_mode,
  252. "pause_time": state.auto_play_pause_time,
  253. "clear_pattern": state.auto_play_clear_pattern,
  254. "shuffle": state.auto_play_shuffle
  255. }
  256. @app.post("/api/auto_play-mode")
  257. async def set_auto_play_mode(request: auto_playModeRequest):
  258. """Update auto_play mode settings."""
  259. state.auto_play_enabled = request.enabled
  260. if request.playlist is not None:
  261. state.auto_play_playlist = request.playlist
  262. if request.run_mode is not None:
  263. state.auto_play_run_mode = request.run_mode
  264. if request.pause_time is not None:
  265. state.auto_play_pause_time = request.pause_time
  266. if request.clear_pattern is not None:
  267. state.auto_play_clear_pattern = request.clear_pattern
  268. if request.shuffle is not None:
  269. state.auto_play_shuffle = request.shuffle
  270. state.save()
  271. logger.info(f"auto_play mode {'enabled' if request.enabled else 'disabled'}, playlist: {request.playlist}")
  272. return {"success": True, "message": "auto_play mode settings updated"}
  273. @app.get("/api/scheduled-pause")
  274. async def get_scheduled_pause():
  275. """Get current Still Sands settings."""
  276. return {
  277. "enabled": state.scheduled_pause_enabled,
  278. "time_slots": state.scheduled_pause_time_slots
  279. }
  280. @app.post("/api/scheduled-pause")
  281. async def set_scheduled_pause(request: ScheduledPauseRequest):
  282. """Update Still Sands settings."""
  283. try:
  284. # Validate time slots
  285. for i, slot in enumerate(request.time_slots):
  286. # Validate time format (HH:MM)
  287. try:
  288. start_time = datetime.strptime(slot.start_time, "%H:%M").time()
  289. end_time = datetime.strptime(slot.end_time, "%H:%M").time()
  290. except ValueError:
  291. raise HTTPException(
  292. status_code=400,
  293. detail=f"Invalid time format in slot {i+1}. Use HH:MM format."
  294. )
  295. # Validate days setting
  296. if slot.days not in ["daily", "weekdays", "weekends", "custom"]:
  297. raise HTTPException(
  298. status_code=400,
  299. detail=f"Invalid days setting in slot {i+1}. Must be 'daily', 'weekdays', 'weekends', or 'custom'."
  300. )
  301. # Validate custom days if applicable
  302. if slot.days == "custom":
  303. if not slot.custom_days or len(slot.custom_days) == 0:
  304. raise HTTPException(
  305. status_code=400,
  306. detail=f"Custom days must be specified for slot {i+1} when days is set to 'custom'."
  307. )
  308. valid_days = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
  309. for day in slot.custom_days:
  310. if day not in valid_days:
  311. raise HTTPException(
  312. status_code=400,
  313. detail=f"Invalid day '{day}' in slot {i+1}. Valid days are: {', '.join(valid_days)}"
  314. )
  315. # Update state
  316. state.scheduled_pause_enabled = request.enabled
  317. state.scheduled_pause_time_slots = [slot.model_dump() for slot in request.time_slots]
  318. state.save()
  319. logger.info(f"Still Sands {'enabled' if request.enabled else 'disabled'} with {len(request.time_slots)} time slots")
  320. return {"success": True, "message": "Still Sands settings updated"}
  321. except HTTPException:
  322. raise
  323. except Exception as e:
  324. logger.error(f"Error updating Still Sands settings: {str(e)}")
  325. raise HTTPException(status_code=500, detail=f"Failed to update Still Sands settings: {str(e)}")
  326. @app.get("/list_serial_ports")
  327. async def list_ports():
  328. logger.debug("Listing available serial ports")
  329. return await asyncio.to_thread(connection_manager.list_serial_ports)
  330. @app.post("/connect")
  331. async def connect(request: ConnectRequest):
  332. if not request.port:
  333. state.conn = connection_manager.WebSocketConnection('ws://fluidnc.local:81')
  334. connection_manager.device_init()
  335. logger.info('Successfully connected to websocket ws://fluidnc.local:81')
  336. return {"success": True}
  337. try:
  338. state.conn = connection_manager.SerialConnection(request.port)
  339. connection_manager.device_init()
  340. logger.info(f'Successfully connected to serial port {request.port}')
  341. return {"success": True}
  342. except Exception as e:
  343. logger.error(f'Failed to connect to serial port {request.port}: {str(e)}')
  344. raise HTTPException(status_code=500, detail=str(e))
  345. @app.post("/disconnect")
  346. async def disconnect():
  347. try:
  348. state.conn.close()
  349. logger.info('Successfully disconnected from serial port')
  350. return {"success": True}
  351. except Exception as e:
  352. logger.error(f'Failed to disconnect serial: {str(e)}')
  353. raise HTTPException(status_code=500, detail=str(e))
  354. @app.post("/restart_connection")
  355. async def restart(request: ConnectRequest):
  356. if not request.port:
  357. logger.warning("Restart serial request received without port")
  358. raise HTTPException(status_code=400, detail="No port provided")
  359. try:
  360. logger.info(f"Restarting connection on port {request.port}")
  361. connection_manager.restart_connection()
  362. return {"success": True}
  363. except Exception as e:
  364. logger.error(f"Failed to restart serial on port {request.port}: {str(e)}")
  365. raise HTTPException(status_code=500, detail=str(e))
  366. @app.get("/list_theta_rho_files")
  367. async def list_theta_rho_files():
  368. logger.debug("Listing theta-rho files")
  369. # Run the blocking file system operation in a thread pool
  370. files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
  371. return sorted(files)
  372. @app.get("/list_theta_rho_files_with_metadata")
  373. async def list_theta_rho_files_with_metadata():
  374. """Get list of theta-rho files with metadata for sorting and filtering.
  375. Optimized to process files asynchronously and support request cancellation.
  376. """
  377. from modules.core.cache_manager import get_pattern_metadata
  378. import asyncio
  379. from concurrent.futures import ThreadPoolExecutor
  380. # Run the blocking file listing in a thread
  381. files = await asyncio.to_thread(pattern_manager.list_theta_rho_files)
  382. files_with_metadata = []
  383. # Use ThreadPoolExecutor for I/O-bound operations
  384. executor = ThreadPoolExecutor(max_workers=4)
  385. def process_file(file_path):
  386. """Process a single file and return its metadata."""
  387. try:
  388. full_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path)
  389. # Get file stats
  390. file_stat = os.stat(full_path)
  391. # Get cached metadata (this should be fast if cached)
  392. metadata = get_pattern_metadata(file_path)
  393. # Extract full folder path from file path
  394. path_parts = file_path.split('/')
  395. if len(path_parts) > 1:
  396. # Get everything except the filename (join all folder parts)
  397. category = '/'.join(path_parts[:-1])
  398. else:
  399. category = 'root'
  400. # Get file name without extension
  401. file_name = os.path.splitext(os.path.basename(file_path))[0]
  402. # Use modification time (mtime) for "date modified"
  403. date_modified = file_stat.st_mtime
  404. return {
  405. 'path': file_path,
  406. 'name': file_name,
  407. 'category': category,
  408. 'date_modified': date_modified,
  409. 'coordinates_count': metadata.get('total_coordinates', 0) if metadata else 0
  410. }
  411. except Exception as e:
  412. logger.warning(f"Error getting metadata for {file_path}: {str(e)}")
  413. # Include file with minimal info if metadata fails
  414. path_parts = file_path.split('/')
  415. if len(path_parts) > 1:
  416. category = '/'.join(path_parts[:-1])
  417. else:
  418. category = 'root'
  419. return {
  420. 'path': file_path,
  421. 'name': os.path.splitext(os.path.basename(file_path))[0],
  422. 'category': category,
  423. 'date_modified': 0,
  424. 'coordinates_count': 0
  425. }
  426. # Load the entire metadata cache at once (async)
  427. # This is much faster than 1000+ individual metadata lookups
  428. try:
  429. import json
  430. metadata_cache_path = "metadata_cache.json"
  431. # Use async file reading to avoid blocking the event loop
  432. cache_data = await asyncio.to_thread(lambda: json.load(open(metadata_cache_path, 'r')))
  433. cache_dict = cache_data.get('data', {})
  434. logger.debug(f"Loaded metadata cache with {len(cache_dict)} entries")
  435. # Process all files using cached data only
  436. for file_path in files:
  437. try:
  438. # Extract category from path
  439. path_parts = file_path.split('/')
  440. category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
  441. # Get file name without extension
  442. file_name = os.path.splitext(os.path.basename(file_path))[0]
  443. # Get metadata from cache
  444. cached_entry = cache_dict.get(file_path, {})
  445. if isinstance(cached_entry, dict) and 'metadata' in cached_entry:
  446. metadata = cached_entry['metadata']
  447. coords_count = metadata.get('total_coordinates', 0)
  448. date_modified = cached_entry.get('mtime', 0)
  449. else:
  450. coords_count = 0
  451. date_modified = 0
  452. files_with_metadata.append({
  453. 'path': file_path,
  454. 'name': file_name,
  455. 'category': category,
  456. 'date_modified': date_modified,
  457. 'coordinates_count': coords_count
  458. })
  459. except Exception as e:
  460. logger.warning(f"Error processing {file_path}: {e}")
  461. # Include file with minimal info if processing fails
  462. path_parts = file_path.split('/')
  463. category = '/'.join(path_parts[:-1]) if len(path_parts) > 1 else 'root'
  464. files_with_metadata.append({
  465. 'path': file_path,
  466. 'name': os.path.splitext(os.path.basename(file_path))[0],
  467. 'category': category,
  468. 'date_modified': 0,
  469. 'coordinates_count': 0
  470. })
  471. except Exception as e:
  472. logger.error(f"Failed to load metadata cache, falling back to slow method: {e}")
  473. # Fallback to original method if cache loading fails
  474. # Create tasks only when needed
  475. loop = asyncio.get_event_loop()
  476. tasks = [loop.run_in_executor(executor, process_file, file_path) for file_path in files]
  477. for task in asyncio.as_completed(tasks):
  478. try:
  479. result = await task
  480. files_with_metadata.append(result)
  481. except Exception as task_error:
  482. logger.error(f"Error processing file: {str(task_error)}")
  483. # Clean up executor
  484. executor.shutdown(wait=False)
  485. return files_with_metadata
  486. @app.post("/upload_theta_rho")
  487. async def upload_theta_rho(file: UploadFile = File(...)):
  488. """Upload a theta-rho file."""
  489. try:
  490. # Save the file
  491. # Ensure custom_patterns directory exists
  492. custom_patterns_dir = os.path.join(pattern_manager.THETA_RHO_DIR, "custom_patterns")
  493. os.makedirs(custom_patterns_dir, exist_ok=True)
  494. # Use forward slashes for internal path representation to maintain consistency
  495. file_path_in_patterns_dir = f"custom_patterns/{file.filename}"
  496. full_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_path_in_patterns_dir)
  497. # Save the uploaded file with proper encoding for Windows compatibility
  498. file_content = await file.read()
  499. try:
  500. # First try to decode as UTF-8 and re-encode to ensure proper encoding
  501. text_content = file_content.decode('utf-8')
  502. with open(full_file_path, "w", encoding='utf-8') as f:
  503. f.write(text_content)
  504. except UnicodeDecodeError:
  505. # If UTF-8 decoding fails, save as binary (fallback)
  506. with open(full_file_path, "wb") as f:
  507. f.write(file_content)
  508. logger.info(f"File {file.filename} saved successfully")
  509. # Generate image preview for the new file with retry logic
  510. max_retries = 3
  511. for attempt in range(max_retries):
  512. try:
  513. logger.info(f"Generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}/{max_retries})")
  514. success = await generate_image_preview(file_path_in_patterns_dir)
  515. if success:
  516. logger.info(f"Preview generated successfully for {file_path_in_patterns_dir}")
  517. break
  518. else:
  519. logger.warning(f"Preview generation failed for {file_path_in_patterns_dir} (attempt {attempt + 1})")
  520. if attempt < max_retries - 1:
  521. await asyncio.sleep(0.5) # Small delay before retry
  522. except Exception as e:
  523. logger.error(f"Error generating preview for {file_path_in_patterns_dir} (attempt {attempt + 1}): {str(e)}")
  524. if attempt < max_retries - 1:
  525. await asyncio.sleep(0.5) # Small delay before retry
  526. return {"success": True, "message": f"File {file.filename} uploaded successfully"}
  527. except Exception as e:
  528. logger.error(f"Error uploading file: {str(e)}")
  529. raise HTTPException(status_code=500, detail=str(e))
  530. @app.post("/get_theta_rho_coordinates")
  531. async def get_theta_rho_coordinates(request: GetCoordinatesRequest):
  532. """Get theta-rho coordinates for animated preview."""
  533. try:
  534. # Normalize file path for cross-platform compatibility and remove prefixes
  535. file_name = normalize_file_path(request.file_name)
  536. file_path = os.path.join(THETA_RHO_DIR, file_name)
  537. # Check file existence asynchronously
  538. exists = await asyncio.to_thread(os.path.exists, file_path)
  539. if not exists:
  540. raise HTTPException(status_code=404, detail=f"File {file_name} not found")
  541. # Parse the theta-rho file in a separate process for CPU-intensive work
  542. # This prevents blocking the motion control thread
  543. loop = asyncio.get_event_loop()
  544. coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, file_path)
  545. if not coordinates:
  546. raise HTTPException(status_code=400, detail="No valid coordinates found in file")
  547. return {
  548. "success": True,
  549. "coordinates": coordinates,
  550. "total_points": len(coordinates)
  551. }
  552. except Exception as e:
  553. logger.error(f"Error getting coordinates for {request.file_name}: {str(e)}")
  554. raise HTTPException(status_code=500, detail=str(e))
  555. @app.post("/run_theta_rho")
  556. async def run_theta_rho(request: ThetaRhoRequest, background_tasks: BackgroundTasks):
  557. if not request.file_name:
  558. logger.warning('Run theta-rho request received without file name')
  559. raise HTTPException(status_code=400, detail="No file name provided")
  560. file_path = None
  561. if 'clear' in request.file_name:
  562. logger.info(f'Clear pattern file: {request.file_name.split(".")[0]}')
  563. file_path = pattern_manager.get_clear_pattern_file(request.file_name.split('.')[0])
  564. logger.info(f'Clear pattern file: {file_path}')
  565. if not file_path:
  566. # Normalize file path for cross-platform compatibility
  567. normalized_file_name = normalize_file_path(request.file_name)
  568. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  569. if not os.path.exists(file_path):
  570. logger.error(f'Theta-rho file not found: {file_path}')
  571. raise HTTPException(status_code=404, detail="File not found")
  572. try:
  573. if not (state.conn.is_connected() if state.conn else False):
  574. logger.warning("Attempted to run a pattern without a connection")
  575. raise HTTPException(status_code=400, detail="Connection not established")
  576. if pattern_manager.pattern_lock.locked():
  577. logger.warning("Attempted to run a pattern while another is already running")
  578. raise HTTPException(status_code=409, detail="Another pattern is already running")
  579. files_to_run = [file_path]
  580. logger.info(f'Running theta-rho file: {request.file_name} with pre_execution={request.pre_execution}')
  581. # Only include clear_pattern if it's not "none"
  582. kwargs = {}
  583. if request.pre_execution != "none":
  584. kwargs['clear_pattern'] = request.pre_execution
  585. # Pass arguments properly
  586. background_tasks.add_task(
  587. pattern_manager.run_theta_rho_files,
  588. files_to_run, # First positional argument
  589. **kwargs # Spread keyword arguments
  590. )
  591. return {"success": True}
  592. except HTTPException as http_exc:
  593. logger.error(f'Failed to run theta-rho file {request.file_name}: {http_exc.detail}')
  594. raise http_exc
  595. except Exception as e:
  596. logger.error(f'Failed to run theta-rho file {request.file_name}: {str(e)}')
  597. raise HTTPException(status_code=500, detail=str(e))
  598. @app.post("/stop_execution")
  599. async def stop_execution():
  600. if not (state.conn.is_connected() if state.conn else False):
  601. logger.warning("Attempted to stop without a connection")
  602. raise HTTPException(status_code=400, detail="Connection not established")
  603. await pattern_manager.stop_actions()
  604. return {"success": True}
  605. @app.post("/send_home")
  606. async def send_home():
  607. try:
  608. if not (state.conn.is_connected() if state.conn else False):
  609. logger.warning("Attempted to move to home without a connection")
  610. raise HTTPException(status_code=400, detail="Connection not established")
  611. # Run homing with 15 second timeout
  612. success = await asyncio.to_thread(connection_manager.home)
  613. if not success:
  614. logger.error("Homing failed or timed out")
  615. raise HTTPException(status_code=500, detail="Homing failed or timed out after 15 seconds")
  616. return {"success": True}
  617. except HTTPException:
  618. raise
  619. except Exception as e:
  620. logger.error(f"Failed to send home command: {str(e)}")
  621. raise HTTPException(status_code=500, detail=str(e))
  622. @app.post("/run_theta_rho_file/{file_name}")
  623. async def run_specific_theta_rho_file(file_name: str):
  624. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, file_name)
  625. if not os.path.exists(file_path):
  626. raise HTTPException(status_code=404, detail="File not found")
  627. if not (state.conn.is_connected() if state.conn else False):
  628. logger.warning("Attempted to run a pattern without a connection")
  629. raise HTTPException(status_code=400, detail="Connection not established")
  630. pattern_manager.run_theta_rho_file(file_path)
  631. return {"success": True}
  632. class DeleteFileRequest(BaseModel):
  633. file_name: str
  634. @app.post("/delete_theta_rho_file")
  635. async def delete_theta_rho_file(request: DeleteFileRequest):
  636. if not request.file_name:
  637. logger.warning("Delete theta-rho file request received without filename")
  638. raise HTTPException(status_code=400, detail="No file name provided")
  639. # Normalize file path for cross-platform compatibility
  640. normalized_file_name = normalize_file_path(request.file_name)
  641. file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  642. # Check file existence asynchronously
  643. exists = await asyncio.to_thread(os.path.exists, file_path)
  644. if not exists:
  645. logger.error(f"Attempted to delete non-existent file: {file_path}")
  646. raise HTTPException(status_code=404, detail="File not found")
  647. try:
  648. # Delete the pattern file asynchronously
  649. await asyncio.to_thread(os.remove, file_path)
  650. logger.info(f"Successfully deleted theta-rho file: {request.file_name}")
  651. # Clean up cached preview image and metadata asynchronously
  652. from modules.core.cache_manager import delete_pattern_cache
  653. cache_cleanup_success = await asyncio.to_thread(delete_pattern_cache, normalized_file_name)
  654. if cache_cleanup_success:
  655. logger.info(f"Successfully cleaned up cache for {request.file_name}")
  656. else:
  657. logger.warning(f"Cache cleanup failed for {request.file_name}, but pattern was deleted")
  658. return {"success": True, "cache_cleanup": cache_cleanup_success}
  659. except Exception as e:
  660. logger.error(f"Failed to delete theta-rho file {request.file_name}: {str(e)}")
  661. raise HTTPException(status_code=500, detail=str(e))
  662. @app.post("/move_to_center")
  663. async def move_to_center():
  664. try:
  665. if not (state.conn.is_connected() if state.conn else False):
  666. logger.warning("Attempted to move to center without a connection")
  667. raise HTTPException(status_code=400, detail="Connection not established")
  668. logger.info("Moving device to center position")
  669. await pattern_manager.reset_theta()
  670. await pattern_manager.move_polar(0, 0)
  671. return {"success": True}
  672. except Exception as e:
  673. logger.error(f"Failed to move to center: {str(e)}")
  674. raise HTTPException(status_code=500, detail=str(e))
  675. @app.post("/move_to_perimeter")
  676. async def move_to_perimeter():
  677. try:
  678. if not (state.conn.is_connected() if state.conn else False):
  679. logger.warning("Attempted to move to perimeter without a connection")
  680. raise HTTPException(status_code=400, detail="Connection not established")
  681. await pattern_manager.reset_theta()
  682. await pattern_manager.move_polar(0, 1)
  683. return {"success": True}
  684. except Exception as e:
  685. logger.error(f"Failed to move to perimeter: {str(e)}")
  686. raise HTTPException(status_code=500, detail=str(e))
  687. @app.post("/preview_thr")
  688. async def preview_thr(request: DeleteFileRequest):
  689. if not request.file_name:
  690. logger.warning("Preview theta-rho request received without filename")
  691. raise HTTPException(status_code=400, detail="No file name provided")
  692. # Normalize file path for cross-platform compatibility
  693. normalized_file_name = normalize_file_path(request.file_name)
  694. # Construct the full path to the pattern file to check existence
  695. pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  696. # Check file existence asynchronously
  697. exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
  698. if not exists:
  699. logger.error(f"Attempted to preview non-existent pattern file: {pattern_file_path}")
  700. raise HTTPException(status_code=404, detail="Pattern file not found")
  701. try:
  702. cache_path = get_cache_path(normalized_file_name)
  703. # Check cache existence asynchronously
  704. cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
  705. if not cache_exists:
  706. logger.info(f"Cache miss for {request.file_name}. Generating preview...")
  707. # Attempt to generate the preview if it's missing
  708. success = await generate_image_preview(normalized_file_name)
  709. cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
  710. if not success or not cache_exists_after:
  711. logger.error(f"Failed to generate or find preview for {request.file_name} after attempting generation.")
  712. raise HTTPException(status_code=500, detail="Failed to generate preview image.")
  713. # Try to get coordinates from metadata cache first
  714. metadata = get_pattern_metadata(normalized_file_name)
  715. if metadata:
  716. first_coord_obj = metadata.get('first_coordinate')
  717. last_coord_obj = metadata.get('last_coordinate')
  718. else:
  719. # Fallback to parsing file if metadata not cached (shouldn't happen after initial cache)
  720. logger.debug(f"Metadata cache miss for {request.file_name}, parsing file")
  721. coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_file_path)
  722. first_coord = coordinates[0] if coordinates else None
  723. last_coord = coordinates[-1] if coordinates else None
  724. # Format coordinates as objects with x and y properties
  725. first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
  726. last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
  727. # Return JSON with preview URL and coordinates
  728. # URL encode the file_name for the preview URL
  729. # Handle both forward slashes and backslashes for cross-platform compatibility
  730. encoded_filename = normalized_file_name.replace('\\', '--').replace('/', '--')
  731. return {
  732. "preview_url": f"/preview/{encoded_filename}",
  733. "first_coordinate": first_coord_obj,
  734. "last_coordinate": last_coord_obj
  735. }
  736. except HTTPException:
  737. raise
  738. except Exception as e:
  739. logger.error(f"Failed to generate or serve preview for {request.file_name}: {str(e)}")
  740. raise HTTPException(status_code=500, detail=f"Failed to serve preview image: {str(e)}")
  741. @app.get("/preview/{encoded_filename}")
  742. async def serve_preview(encoded_filename: str):
  743. """Serve a preview image for a pattern file."""
  744. # Decode the filename by replacing -- with the original path separators
  745. # First try forward slash (most common case), then backslash if needed
  746. file_name = encoded_filename.replace('--', '/')
  747. # Apply normalization to handle any remaining path prefixes
  748. file_name = normalize_file_path(file_name)
  749. # Check if the decoded path exists, if not try backslash decoding
  750. cache_path = get_cache_path(file_name)
  751. if not os.path.exists(cache_path):
  752. # Try with backslash for Windows paths
  753. file_name_backslash = encoded_filename.replace('--', '\\')
  754. file_name_backslash = normalize_file_path(file_name_backslash)
  755. cache_path_backslash = get_cache_path(file_name_backslash)
  756. if os.path.exists(cache_path_backslash):
  757. file_name = file_name_backslash
  758. cache_path = cache_path_backslash
  759. # cache_path is already determined above in the decoding logic
  760. if not os.path.exists(cache_path):
  761. logger.error(f"Preview image not found for {file_name}")
  762. raise HTTPException(status_code=404, detail="Preview image not found")
  763. # Add caching headers
  764. headers = {
  765. "Cache-Control": "public, max-age=31536000", # Cache for 1 year
  766. "Content-Type": "image/webp",
  767. "Accept-Ranges": "bytes"
  768. }
  769. return FileResponse(
  770. cache_path,
  771. media_type="image/webp",
  772. headers=headers
  773. )
  774. @app.post("/send_coordinate")
  775. async def send_coordinate(request: CoordinateRequest):
  776. if not (state.conn.is_connected() if state.conn else False):
  777. logger.warning("Attempted to send coordinate without a connection")
  778. raise HTTPException(status_code=400, detail="Connection not established")
  779. try:
  780. logger.debug(f"Sending coordinate: theta={request.theta}, rho={request.rho}")
  781. await pattern_manager.move_polar(request.theta, request.rho)
  782. return {"success": True}
  783. except Exception as e:
  784. logger.error(f"Failed to send coordinate: {str(e)}")
  785. raise HTTPException(status_code=500, detail=str(e))
  786. @app.get("/download/{filename}")
  787. async def download_file(filename: str):
  788. return FileResponse(
  789. os.path.join(pattern_manager.THETA_RHO_DIR, filename),
  790. filename=filename
  791. )
  792. @app.get("/serial_status")
  793. async def serial_status():
  794. connected = state.conn.is_connected() if state.conn else False
  795. port = state.port
  796. logger.debug(f"Serial status check - connected: {connected}, port: {port}")
  797. return {
  798. "connected": connected,
  799. "port": port
  800. }
  801. @app.post("/pause_execution")
  802. async def pause_execution():
  803. if pattern_manager.pause_execution():
  804. return {"success": True, "message": "Execution paused"}
  805. raise HTTPException(status_code=500, detail="Failed to pause execution")
  806. @app.post("/resume_execution")
  807. async def resume_execution():
  808. if pattern_manager.resume_execution():
  809. return {"success": True, "message": "Execution resumed"}
  810. raise HTTPException(status_code=500, detail="Failed to resume execution")
  811. # Playlist endpoints
  812. @app.get("/list_all_playlists")
  813. async def list_all_playlists():
  814. playlist_names = playlist_manager.list_all_playlists()
  815. return playlist_names
  816. @app.get("/get_playlist")
  817. async def get_playlist(name: str):
  818. if not name:
  819. raise HTTPException(status_code=400, detail="Missing playlist name parameter")
  820. playlist = playlist_manager.get_playlist(name)
  821. if not playlist:
  822. raise HTTPException(status_code=404, detail=f"Playlist '{name}' not found")
  823. return playlist
  824. @app.post("/create_playlist")
  825. async def create_playlist(request: PlaylistRequest):
  826. success = playlist_manager.create_playlist(request.playlist_name, request.files)
  827. return {
  828. "success": success,
  829. "message": f"Playlist '{request.playlist_name}' created/updated"
  830. }
  831. @app.post("/modify_playlist")
  832. async def modify_playlist(request: PlaylistRequest):
  833. success = playlist_manager.modify_playlist(request.playlist_name, request.files)
  834. return {
  835. "success": success,
  836. "message": f"Playlist '{request.playlist_name}' updated"
  837. }
  838. @app.delete("/delete_playlist")
  839. async def delete_playlist(request: DeletePlaylistRequest):
  840. success = playlist_manager.delete_playlist(request.playlist_name)
  841. if not success:
  842. raise HTTPException(
  843. status_code=404,
  844. detail=f"Playlist '{request.playlist_name}' not found"
  845. )
  846. return {
  847. "success": True,
  848. "message": f"Playlist '{request.playlist_name}' deleted"
  849. }
  850. class AddToPlaylistRequest(BaseModel):
  851. playlist_name: str
  852. pattern: str
  853. @app.post("/add_to_playlist")
  854. async def add_to_playlist(request: AddToPlaylistRequest):
  855. success = playlist_manager.add_to_playlist(request.playlist_name, request.pattern)
  856. if not success:
  857. raise HTTPException(status_code=404, detail="Playlist not found")
  858. return {"success": True}
  859. @app.post("/run_playlist")
  860. async def run_playlist_endpoint(request: PlaylistRequest):
  861. """Run a playlist with specified parameters."""
  862. try:
  863. if not (state.conn.is_connected() if state.conn else False):
  864. logger.warning("Attempted to run a playlist without a connection")
  865. raise HTTPException(status_code=400, detail="Connection not established")
  866. if not os.path.exists(playlist_manager.PLAYLISTS_FILE):
  867. raise HTTPException(status_code=404, detail=f"Playlist '{request.playlist_name}' not found")
  868. # Start the playlist execution
  869. success, message = await playlist_manager.run_playlist(
  870. request.playlist_name,
  871. pause_time=request.pause_time,
  872. clear_pattern=request.clear_pattern,
  873. run_mode=request.run_mode,
  874. shuffle=request.shuffle
  875. )
  876. if not success:
  877. raise HTTPException(status_code=409, detail=message)
  878. return {"message": f"Started playlist: {request.playlist_name}"}
  879. except Exception as e:
  880. logger.error(f"Error running playlist: {e}")
  881. raise HTTPException(status_code=500, detail=str(e))
  882. @app.post("/set_speed")
  883. async def set_speed(request: SpeedRequest):
  884. try:
  885. if not (state.conn.is_connected() if state.conn else False):
  886. logger.warning("Attempted to change speed without a connection")
  887. raise HTTPException(status_code=400, detail="Connection not established")
  888. if request.speed <= 0:
  889. logger.warning(f"Invalid speed value received: {request.speed}")
  890. raise HTTPException(status_code=400, detail="Invalid speed value")
  891. state.speed = request.speed
  892. return {"success": True, "speed": request.speed}
  893. except Exception as e:
  894. logger.error(f"Failed to set speed: {str(e)}")
  895. raise HTTPException(status_code=500, detail=str(e))
  896. @app.get("/check_software_update")
  897. async def check_updates():
  898. update_info = update_manager.check_git_updates()
  899. return update_info
  900. @app.post("/update_software")
  901. async def update_software():
  902. logger.info("Starting software update process")
  903. success, error_message, error_log = update_manager.update_software()
  904. if success:
  905. logger.info("Software update completed successfully")
  906. return {"success": True}
  907. else:
  908. logger.error(f"Software update failed: {error_message}\nDetails: {error_log}")
  909. raise HTTPException(
  910. status_code=500,
  911. detail={
  912. "error": error_message,
  913. "details": error_log
  914. }
  915. )
  916. @app.post("/set_wled_ip")
  917. async def set_wled_ip(request: WLEDRequest):
  918. state.wled_ip = request.wled_ip
  919. state.led_controller = LEDController(request.wled_ip)
  920. effect_idle(state.led_controller)
  921. state.save()
  922. logger.info(f"WLED IP updated: {request.wled_ip}")
  923. return {"success": True, "wled_ip": state.wled_ip}
  924. @app.get("/get_wled_ip")
  925. async def get_wled_ip():
  926. if not state.wled_ip:
  927. raise HTTPException(status_code=404, detail="No WLED IP set")
  928. return {"success": True, "wled_ip": state.wled_ip}
  929. @app.post("/skip_pattern")
  930. async def skip_pattern():
  931. if not state.current_playlist:
  932. raise HTTPException(status_code=400, detail="No playlist is currently running")
  933. state.skip_requested = True
  934. return {"success": True}
  935. @app.get("/api/custom_clear_patterns")
  936. async def get_custom_clear_patterns():
  937. """Get the currently configured custom clear patterns."""
  938. return {
  939. "success": True,
  940. "custom_clear_from_in": state.custom_clear_from_in,
  941. "custom_clear_from_out": state.custom_clear_from_out
  942. }
  943. @app.post("/api/custom_clear_patterns")
  944. async def set_custom_clear_patterns(request: dict):
  945. """Set custom clear patterns for clear_from_in and clear_from_out."""
  946. try:
  947. # Validate that the patterns exist if they're provided
  948. if "custom_clear_from_in" in request and request["custom_clear_from_in"]:
  949. pattern_path = os.path.join(pattern_manager.THETA_RHO_DIR, request["custom_clear_from_in"])
  950. if not os.path.exists(pattern_path):
  951. raise HTTPException(status_code=400, detail=f"Pattern file not found: {request['custom_clear_from_in']}")
  952. state.custom_clear_from_in = request["custom_clear_from_in"]
  953. elif "custom_clear_from_in" in request:
  954. state.custom_clear_from_in = None
  955. if "custom_clear_from_out" in request and request["custom_clear_from_out"]:
  956. pattern_path = os.path.join(pattern_manager.THETA_RHO_DIR, request["custom_clear_from_out"])
  957. if not os.path.exists(pattern_path):
  958. raise HTTPException(status_code=400, detail=f"Pattern file not found: {request['custom_clear_from_out']}")
  959. state.custom_clear_from_out = request["custom_clear_from_out"]
  960. elif "custom_clear_from_out" in request:
  961. state.custom_clear_from_out = None
  962. state.save()
  963. logger.info(f"Custom clear patterns updated - in: {state.custom_clear_from_in}, out: {state.custom_clear_from_out}")
  964. return {
  965. "success": True,
  966. "custom_clear_from_in": state.custom_clear_from_in,
  967. "custom_clear_from_out": state.custom_clear_from_out
  968. }
  969. except Exception as e:
  970. logger.error(f"Failed to set custom clear patterns: {str(e)}")
  971. raise HTTPException(status_code=500, detail=str(e))
  972. @app.get("/api/clear_pattern_speed")
  973. async def get_clear_pattern_speed():
  974. """Get the current clearing pattern speed setting."""
  975. return {
  976. "success": True,
  977. "clear_pattern_speed": state.clear_pattern_speed,
  978. "effective_speed": state.clear_pattern_speed if state.clear_pattern_speed is not None else state.speed
  979. }
  980. @app.post("/api/clear_pattern_speed")
  981. async def set_clear_pattern_speed(request: dict):
  982. """Set the clearing pattern speed."""
  983. try:
  984. # If speed is None or "none", use default behavior (state.speed)
  985. speed_value = request.get("clear_pattern_speed")
  986. if speed_value is None or speed_value == "none" or speed_value == "":
  987. speed = None
  988. else:
  989. speed = int(speed_value)
  990. # Validate speed range (same as regular speed limits) only if speed is not None
  991. if speed is not None and not (50 <= speed <= 2000):
  992. raise HTTPException(status_code=400, detail="Speed must be between 50 and 2000")
  993. state.clear_pattern_speed = speed
  994. state.save()
  995. logger.info(f"Clear pattern speed set to {speed if speed is not None else 'default (state.speed)'}")
  996. return {
  997. "success": True,
  998. "clear_pattern_speed": state.clear_pattern_speed,
  999. "effective_speed": state.clear_pattern_speed if state.clear_pattern_speed is not None else state.speed
  1000. }
  1001. except ValueError:
  1002. raise HTTPException(status_code=400, detail="Invalid speed value")
  1003. except Exception as e:
  1004. logger.error(f"Failed to set clear pattern speed: {str(e)}")
  1005. raise HTTPException(status_code=500, detail=str(e))
  1006. @app.get("/api/app-name")
  1007. async def get_app_name():
  1008. """Get current application name."""
  1009. return {"app_name": state.app_name}
  1010. @app.post("/api/app-name")
  1011. async def set_app_name(request: dict):
  1012. """Update application name."""
  1013. app_name = request.get("app_name", "").strip()
  1014. if not app_name:
  1015. app_name = "Dune Weaver" # Reset to default if empty
  1016. state.app_name = app_name
  1017. state.save()
  1018. logger.info(f"Application name updated to: {app_name}")
  1019. return {"success": True, "app_name": app_name}
  1020. @app.post("/preview_thr_batch")
  1021. async def preview_thr_batch(request: dict):
  1022. start = time.time()
  1023. if not request.get("file_names"):
  1024. logger.warning("Batch preview request received without filenames")
  1025. raise HTTPException(status_code=400, detail="No file names provided")
  1026. file_names = request["file_names"]
  1027. if not isinstance(file_names, list):
  1028. raise HTTPException(status_code=400, detail="file_names must be a list")
  1029. headers = {
  1030. "Cache-Control": "public, max-age=3600", # Cache for 1 hour
  1031. "Content-Type": "application/json"
  1032. }
  1033. async def process_single_file(file_name):
  1034. """Process a single file and return its preview data."""
  1035. t1 = time.time()
  1036. try:
  1037. # Normalize file path for cross-platform compatibility
  1038. normalized_file_name = normalize_file_path(file_name)
  1039. pattern_file_path = os.path.join(pattern_manager.THETA_RHO_DIR, normalized_file_name)
  1040. # Check file existence asynchronously
  1041. exists = await asyncio.to_thread(os.path.exists, pattern_file_path)
  1042. if not exists:
  1043. logger.warning(f"Pattern file not found: {pattern_file_path}")
  1044. return file_name, {"error": "Pattern file not found"}
  1045. cache_path = get_cache_path(normalized_file_name)
  1046. # Check cache existence asynchronously
  1047. cache_exists = await asyncio.to_thread(os.path.exists, cache_path)
  1048. if not cache_exists:
  1049. logger.info(f"Cache miss for {file_name}. Generating preview...")
  1050. success = await generate_image_preview(normalized_file_name)
  1051. cache_exists_after = await asyncio.to_thread(os.path.exists, cache_path)
  1052. if not success or not cache_exists_after:
  1053. logger.error(f"Failed to generate or find preview for {file_name}")
  1054. return file_name, {"error": "Failed to generate preview"}
  1055. metadata = get_pattern_metadata(normalized_file_name)
  1056. if metadata:
  1057. first_coord_obj = metadata.get('first_coordinate')
  1058. last_coord_obj = metadata.get('last_coordinate')
  1059. else:
  1060. logger.debug(f"Metadata cache miss for {file_name}, parsing file")
  1061. # Use process pool for CPU-intensive parsing
  1062. loop = asyncio.get_event_loop()
  1063. coordinates = await loop.run_in_executor(process_pool, parse_theta_rho_file, pattern_file_path)
  1064. first_coord = coordinates[0] if coordinates else None
  1065. last_coord = coordinates[-1] if coordinates else None
  1066. first_coord_obj = {"x": first_coord[0], "y": first_coord[1]} if first_coord else None
  1067. last_coord_obj = {"x": last_coord[0], "y": last_coord[1]} if last_coord else None
  1068. # Read image file asynchronously
  1069. image_data = await asyncio.to_thread(lambda: open(cache_path, 'rb').read())
  1070. image_b64 = base64.b64encode(image_data).decode('utf-8')
  1071. result = {
  1072. "image_data": f"data:image/webp;base64,{image_b64}",
  1073. "first_coordinate": first_coord_obj,
  1074. "last_coordinate": last_coord_obj
  1075. }
  1076. logger.debug(f"Processed {file_name} in {time.time() - t1:.2f}s")
  1077. return file_name, result
  1078. except Exception as e:
  1079. logger.error(f"Error processing {file_name}: {str(e)}")
  1080. return file_name, {"error": str(e)}
  1081. # Process all files concurrently
  1082. tasks = [process_single_file(file_name) for file_name in file_names]
  1083. file_results = await asyncio.gather(*tasks)
  1084. # Convert results to dictionary
  1085. results = dict(file_results)
  1086. logger.info(f"Total batch processing time: {time.time() - start:.2f}s for {len(file_names)} files")
  1087. return JSONResponse(content=results, headers=headers)
  1088. @app.get("/playlists")
  1089. async def playlists(request: Request):
  1090. logger.debug("Rendering playlists page")
  1091. return templates.TemplateResponse("playlists.html", {"request": request, "app_name": state.app_name})
  1092. @app.get("/image2sand")
  1093. async def image2sand(request: Request):
  1094. return templates.TemplateResponse("image2sand.html", {"request": request, "app_name": state.app_name})
  1095. @app.get("/wled")
  1096. async def wled(request: Request):
  1097. return templates.TemplateResponse("wled.html", {"request": request, "app_name": state.app_name})
  1098. @app.get("/table_control")
  1099. async def table_control(request: Request):
  1100. return templates.TemplateResponse("table_control.html", {"request": request, "app_name": state.app_name})
  1101. @app.get("/cache-progress")
  1102. async def get_cache_progress_endpoint():
  1103. """Get the current cache generation progress."""
  1104. from modules.core.cache_manager import get_cache_progress
  1105. return get_cache_progress()
  1106. @app.post("/rebuild_cache")
  1107. async def rebuild_cache_endpoint():
  1108. """Trigger a rebuild of the pattern cache."""
  1109. try:
  1110. from modules.core.cache_manager import rebuild_cache
  1111. await rebuild_cache()
  1112. return {"success": True, "message": "Cache rebuild completed successfully"}
  1113. except Exception as e:
  1114. logger.error(f"Failed to rebuild cache: {str(e)}")
  1115. raise HTTPException(status_code=500, detail=str(e))
  1116. def signal_handler(signum, frame):
  1117. """Handle shutdown signals gracefully but forcefully."""
  1118. logger.info("Received shutdown signal, cleaning up...")
  1119. try:
  1120. if state.led_controller:
  1121. state.led_controller.set_power(0)
  1122. # Run cleanup operations - need to handle async in sync context
  1123. try:
  1124. # Try to run in existing loop if available
  1125. import asyncio
  1126. loop = asyncio.get_running_loop()
  1127. # If we're in an event loop, schedule the coroutine
  1128. import concurrent.futures
  1129. with concurrent.futures.ThreadPoolExecutor() as executor:
  1130. future = executor.submit(asyncio.run, pattern_manager.stop_actions())
  1131. future.result(timeout=5.0) # Wait up to 5 seconds
  1132. except RuntimeError:
  1133. # No running loop, create a new one
  1134. import asyncio
  1135. asyncio.run(pattern_manager.stop_actions())
  1136. except Exception as cleanup_err:
  1137. logger.error(f"Error in async cleanup: {cleanup_err}")
  1138. state.save()
  1139. logger.info("Cleanup completed")
  1140. except Exception as e:
  1141. logger.error(f"Error during cleanup: {str(e)}")
  1142. finally:
  1143. logger.info("Exiting application...")
  1144. os._exit(0) # Force exit regardless of other threads
  1145. @app.get("/api/version")
  1146. async def get_version_info():
  1147. """Get current and latest version information"""
  1148. try:
  1149. version_info = await version_manager.get_version_info()
  1150. return JSONResponse(content=version_info)
  1151. except Exception as e:
  1152. logger.error(f"Error getting version info: {e}")
  1153. return JSONResponse(
  1154. content={
  1155. "current": version_manager.get_current_version(),
  1156. "latest": version_manager.get_current_version(),
  1157. "update_available": False,
  1158. "error": "Unable to check for updates"
  1159. },
  1160. status_code=200
  1161. )
  1162. @app.post("/api/update")
  1163. async def trigger_update():
  1164. """Trigger software update (placeholder for future implementation)"""
  1165. try:
  1166. # For now, just return the GitHub release URL
  1167. version_info = await version_manager.get_version_info()
  1168. if version_info.get("latest_release"):
  1169. return JSONResponse(content={
  1170. "success": False,
  1171. "message": "Automatic updates not implemented yet",
  1172. "manual_update_url": version_info["latest_release"].get("html_url"),
  1173. "instructions": "Please visit the GitHub release page to download and install the update manually"
  1174. })
  1175. else:
  1176. return JSONResponse(content={
  1177. "success": False,
  1178. "message": "No updates available"
  1179. })
  1180. except Exception as e:
  1181. logger.error(f"Error triggering update: {e}")
  1182. return JSONResponse(
  1183. content={"success": False, "message": "Failed to check for updates"},
  1184. status_code=500
  1185. )
  1186. def entrypoint():
  1187. import uvicorn
  1188. logger.info("Starting FastAPI server on port 8080...")
  1189. uvicorn.run(app, host="0.0.0.0", port=8080, workers=1) # Set workers to 1 to avoid multiple signal handlers
  1190. if __name__ == "__main__":
  1191. entrypoint()