cache_manager.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319
  1. """Image Cache Manager for pre-generating and managing image previews."""
  2. import os
  3. import json
  4. import asyncio
  5. import logging
  6. from pathlib import Path
  7. from modules.core.pattern_manager import list_theta_rho_files, THETA_RHO_DIR, parse_theta_rho_file
  8. logger = logging.getLogger(__name__)
  9. # Constants
  10. CACHE_DIR = os.path.join(THETA_RHO_DIR, "cached_images")
  11. METADATA_CACHE_FILE = "metadata_cache.json" # Now in root directory
  12. def ensure_cache_dir():
  13. """Ensure the cache directory exists with proper permissions."""
  14. try:
  15. Path(CACHE_DIR).mkdir(parents=True, exist_ok=True)
  16. # Initialize metadata cache if it doesn't exist
  17. if not os.path.exists(METADATA_CACHE_FILE):
  18. with open(METADATA_CACHE_FILE, 'w') as f:
  19. json.dump({}, f)
  20. try:
  21. os.chmod(METADATA_CACHE_FILE, 0o644) # More conservative permissions
  22. except (OSError, PermissionError) as e:
  23. logger.debug(f"Could not set metadata cache file permissions: {str(e)}")
  24. for root, dirs, files in os.walk(CACHE_DIR):
  25. try:
  26. os.chmod(root, 0o755) # More conservative permissions
  27. for file in files:
  28. file_path = os.path.join(root, file)
  29. try:
  30. os.chmod(file_path, 0o644) # More conservative permissions
  31. except (OSError, PermissionError) as e:
  32. # Log as debug instead of error since this is not critical
  33. logger.debug(f"Could not set permissions for file {file_path}: {str(e)}")
  34. except (OSError, PermissionError) as e:
  35. # Log as debug instead of error since this is not critical
  36. logger.debug(f"Could not set permissions for directory {root}: {str(e)}")
  37. continue
  38. except Exception as e:
  39. logger.error(f"Failed to create cache directory: {str(e)}")
  40. def get_cache_path(pattern_file):
  41. """Get the cache path for a pattern file."""
  42. # Create subdirectories in cache to match the pattern file structure
  43. cache_subpath = os.path.dirname(pattern_file)
  44. if cache_subpath:
  45. # Create the same subdirectory structure in cache (including custom_patterns)
  46. cache_dir = os.path.join(CACHE_DIR, cache_subpath)
  47. else:
  48. # For files in root pattern directory
  49. cache_dir = CACHE_DIR
  50. # Ensure the subdirectory exists
  51. os.makedirs(cache_dir, exist_ok=True)
  52. try:
  53. os.chmod(cache_dir, 0o755) # More conservative permissions
  54. except (OSError, PermissionError) as e:
  55. # Log as debug instead of error since this is not critical
  56. logger.debug(f"Could not set permissions for cache subdirectory {cache_dir}: {str(e)}")
  57. # Use just the filename part for the cache file
  58. filename = os.path.basename(pattern_file)
  59. safe_name = filename.replace('\\', '_')
  60. return os.path.join(cache_dir, f"{safe_name}.webp")
  61. def load_metadata_cache():
  62. """Load the metadata cache from disk."""
  63. try:
  64. if os.path.exists(METADATA_CACHE_FILE):
  65. with open(METADATA_CACHE_FILE, 'r') as f:
  66. return json.load(f)
  67. except Exception as e:
  68. logger.warning(f"Failed to load metadata cache: {str(e)}")
  69. return {}
  70. def save_metadata_cache(cache_data):
  71. """Save the metadata cache to disk."""
  72. try:
  73. ensure_cache_dir()
  74. with open(METADATA_CACHE_FILE, 'w') as f:
  75. json.dump(cache_data, f, indent=2)
  76. except Exception as e:
  77. logger.error(f"Failed to save metadata cache: {str(e)}")
  78. def get_pattern_metadata(pattern_file):
  79. """Get cached metadata for a pattern file."""
  80. cache_data = load_metadata_cache()
  81. # Check if we have cached metadata and if the file hasn't changed
  82. if pattern_file in cache_data:
  83. cached_entry = cache_data[pattern_file]
  84. pattern_path = os.path.join(THETA_RHO_DIR, pattern_file)
  85. try:
  86. file_mtime = os.path.getmtime(pattern_path)
  87. if cached_entry.get('mtime') == file_mtime:
  88. return cached_entry.get('metadata')
  89. except OSError:
  90. pass
  91. return None
  92. def cache_pattern_metadata(pattern_file, first_coord, last_coord, total_coords):
  93. """Cache metadata for a pattern file."""
  94. try:
  95. cache_data = load_metadata_cache()
  96. pattern_path = os.path.join(THETA_RHO_DIR, pattern_file)
  97. file_mtime = os.path.getmtime(pattern_path)
  98. cache_data[pattern_file] = {
  99. 'mtime': file_mtime,
  100. 'metadata': {
  101. 'first_coordinate': first_coord,
  102. 'last_coordinate': last_coord,
  103. 'total_coordinates': total_coords
  104. }
  105. }
  106. save_metadata_cache(cache_data)
  107. logger.debug(f"Cached metadata for {pattern_file}")
  108. except Exception as e:
  109. logger.warning(f"Failed to cache metadata for {pattern_file}: {str(e)}")
  110. def needs_cache(pattern_file):
  111. """Check if a pattern file needs its cache generated."""
  112. # Check if image preview exists
  113. cache_path = get_cache_path(pattern_file)
  114. if not os.path.exists(cache_path):
  115. return True
  116. # Check if metadata cache exists and is valid
  117. metadata = get_pattern_metadata(pattern_file)
  118. if metadata is None:
  119. return True
  120. return False
  121. async def generate_image_preview(pattern_file):
  122. """Generate image preview for a single pattern file."""
  123. from modules.core.preview import generate_preview_image
  124. from modules.core.pattern_manager import parse_theta_rho_file
  125. try:
  126. logger.debug(f"Starting preview generation for {pattern_file}")
  127. # Check if we need to update metadata cache
  128. metadata = get_pattern_metadata(pattern_file)
  129. if metadata is None:
  130. # Parse file to get metadata (this is the only time we need to parse)
  131. logger.debug(f"Parsing {pattern_file} for metadata cache")
  132. pattern_path = os.path.join(THETA_RHO_DIR, pattern_file)
  133. try:
  134. coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_path)
  135. if coordinates:
  136. first_coord = {"x": coordinates[0][0], "y": coordinates[0][1]}
  137. last_coord = {"x": coordinates[-1][0], "y": coordinates[-1][1]}
  138. total_coords = len(coordinates)
  139. # Cache the metadata for future use
  140. cache_pattern_metadata(pattern_file, first_coord, last_coord, total_coords)
  141. logger.debug(f"Metadata cached for {pattern_file}: {total_coords} coordinates")
  142. else:
  143. logger.warning(f"No coordinates found in {pattern_file}")
  144. except Exception as e:
  145. logger.error(f"Failed to parse {pattern_file} for metadata: {str(e)}")
  146. # Continue with image generation even if metadata fails
  147. # Check if we need to generate the image
  148. cache_path = get_cache_path(pattern_file)
  149. if os.path.exists(cache_path):
  150. logger.debug(f"Skipping image generation for {pattern_file} - already cached")
  151. return True
  152. # Generate the image
  153. logger.debug(f"Generating image preview for {pattern_file}")
  154. image_content = await generate_preview_image(pattern_file)
  155. if not image_content:
  156. logger.error(f"Generated image content is empty for {pattern_file}")
  157. return False
  158. # Ensure cache directory exists
  159. ensure_cache_dir()
  160. with open(cache_path, 'wb') as f:
  161. f.write(image_content)
  162. try:
  163. os.chmod(cache_path, 0o644) # More conservative permissions
  164. except (OSError, PermissionError) as e:
  165. # Log as debug instead of error since this is not critical
  166. logger.debug(f"Could not set cache file permissions for {pattern_file}: {str(e)}")
  167. logger.debug(f"Successfully generated preview for {pattern_file}")
  168. return True
  169. except Exception as e:
  170. logger.error(f"Failed to generate image for {pattern_file}: {str(e)}")
  171. return False
  172. async def generate_all_image_previews():
  173. """Generate image previews for all pattern files."""
  174. ensure_cache_dir()
  175. pattern_files = [f for f in list_theta_rho_files() if f.endswith('.thr')]
  176. patterns_to_cache = [f for f in pattern_files if needs_cache(f)]
  177. total_files = len(patterns_to_cache)
  178. if total_files == 0:
  179. logger.info("All patterns are already cached")
  180. return
  181. logger.info(f"Generating image cache for {total_files} uncached .thr patterns...")
  182. batch_size = 5
  183. successful = 0
  184. for i in range(0, total_files, batch_size):
  185. batch = patterns_to_cache[i:i + batch_size]
  186. tasks = [generate_image_preview(file) for file in batch]
  187. results = await asyncio.gather(*tasks)
  188. successful += sum(1 for r in results if r)
  189. # Log progress
  190. progress = min(i + batch_size, total_files)
  191. logger.info(f"Image cache generation progress: {progress}/{total_files} files processed")
  192. logger.info(f"Image cache generation completed: {successful}/{total_files} patterns cached")
  193. async def generate_metadata_cache():
  194. """Generate metadata cache for all pattern files."""
  195. logger.info("Starting metadata cache generation...")
  196. # Get all pattern files using the same function as the rest of the codebase
  197. pattern_files = list_theta_rho_files()
  198. # Filter out files that already have valid metadata cache
  199. files_to_process = []
  200. for file_name in pattern_files:
  201. if get_pattern_metadata(file_name) is None:
  202. files_to_process.append(file_name)
  203. total_files = len(files_to_process)
  204. skipped_files = len(pattern_files) - total_files
  205. if total_files == 0:
  206. logger.info(f"No new files to cache. {skipped_files} files already cached.")
  207. return
  208. logger.info(f"Generating metadata cache for {total_files} new files ({skipped_files} files already cached)...")
  209. # Process in batches
  210. batch_size = 5
  211. successful = 0
  212. for i in range(0, total_files, batch_size):
  213. batch = files_to_process[i:i + batch_size]
  214. tasks = []
  215. for file_name in batch:
  216. pattern_path = os.path.join(THETA_RHO_DIR, file_name)
  217. try:
  218. # Parse file to get metadata
  219. coordinates = await asyncio.to_thread(parse_theta_rho_file, pattern_path)
  220. if coordinates:
  221. first_coord = {"x": coordinates[0][0], "y": coordinates[0][1]}
  222. last_coord = {"x": coordinates[-1][0], "y": coordinates[-1][1]}
  223. total_coords = len(coordinates)
  224. # Cache the metadata
  225. cache_pattern_metadata(file_name, first_coord, last_coord, total_coords)
  226. successful += 1
  227. logger.debug(f"Generated metadata for {file_name}")
  228. except Exception as e:
  229. logger.error(f"Failed to generate metadata for {file_name}: {str(e)}")
  230. # Log progress
  231. progress = min(i + batch_size, total_files)
  232. logger.info(f"Metadata cache generation progress: {progress}/{total_files} files processed")
  233. logger.info(f"Metadata cache generation completed: {successful}/{total_files} patterns cached successfully, {skipped_files} patterns skipped (already cached)")
  234. async def rebuild_cache():
  235. """Rebuild the entire cache for all pattern files."""
  236. logger.info("Starting cache rebuild...")
  237. # Ensure cache directory exists
  238. ensure_cache_dir()
  239. # First generate metadata cache for all files
  240. await generate_metadata_cache()
  241. # Then generate image previews
  242. pattern_files = [f for f in list_theta_rho_files() if f.endswith('.thr')]
  243. total_files = len(pattern_files)
  244. if total_files == 0:
  245. logger.info("No pattern files found to cache")
  246. return
  247. logger.info(f"Generating image previews for {total_files} pattern files...")
  248. # Process in batches
  249. batch_size = 5
  250. successful = 0
  251. for i in range(0, total_files, batch_size):
  252. batch = pattern_files[i:i + batch_size]
  253. tasks = [generate_image_preview(file) for file in batch]
  254. results = await asyncio.gather(*tasks)
  255. successful += sum(1 for r in results if r)
  256. # Log progress
  257. progress = min(i + batch_size, total_files)
  258. logger.info(f"Image preview generation progress: {progress}/{total_files} files processed")
  259. logger.info(f"Cache rebuild completed: {successful}/{total_files} patterns cached successfully")