Просмотр исходного кода

Fix preview batch requests overwhelming backend on Pi

Both BrowsePage and PlaylistsPage were sending all uncached pattern
previews in a single request, causing:
- 100+ second processing times for 879 files
- 504 gateway timeouts on other API calls
- 8.9MB responses causing nginx buffering warnings

Now batches requests into groups of 10 with 100ms delays between
batches, matching the cacheAllPreviews behavior.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
tuanchris 3 недель назад
Родитель
Сommit
9dbb58b111
2 измененных файлов с 57 добавлено и 29 удалено
  1. 28 13
      frontend/src/pages/BrowsePage.tsx
  2. 29 16
      frontend/src/pages/PlaylistsPage.tsx

+ 28 - 13
frontend/src/pages/BrowsePage.tsx

@@ -238,6 +238,8 @@ export function BrowsePage() {
   }
 
   const fetchPreviewsBatch = async (filePaths: string[]) => {
+    const BATCH_SIZE = 10 // Process 10 patterns at a time to avoid overwhelming the backend
+
     try {
       // First check IndexedDB cache for all patterns
       const cachedPreviews = await getPreviewsFromCache(filePaths)
@@ -254,23 +256,36 @@ export function BrowsePage() {
       // Find patterns not in cache
       const uncachedPaths = filePaths.filter((path) => !cachedPreviews.has(path))
 
-      // Only fetch uncached patterns from API
+      // Fetch uncached patterns in batches to avoid overwhelming the backend
       if (uncachedPaths.length > 0) {
-        const response = await fetch('/preview_thr_batch', {
-          method: 'POST',
-          headers: { 'Content-Type': 'application/json' },
-          body: JSON.stringify({ file_names: uncachedPaths }),
-        })
-        const data = await response.json()
+        for (let i = 0; i < uncachedPaths.length; i += BATCH_SIZE) {
+          const batch = uncachedPaths.slice(i, i + BATCH_SIZE)
+
+          try {
+            const response = await fetch('/preview_thr_batch', {
+              method: 'POST',
+              headers: { 'Content-Type': 'application/json' },
+              body: JSON.stringify({ file_names: batch }),
+            })
+            const data = await response.json()
+
+            // Save fetched previews to IndexedDB cache
+            for (const [path, previewData] of Object.entries(data)) {
+              if (previewData && !(previewData as PreviewData).error) {
+                savePreviewToCache(path, previewData as PreviewData)
+              }
+            }
+
+            setPreviews((prev) => ({ ...prev, ...data }))
+          } catch {
+            // Continue with next batch even if one fails
+          }
 
-        // Save fetched previews to IndexedDB cache
-        for (const [path, previewData] of Object.entries(data)) {
-          if (previewData && !(previewData as PreviewData).error) {
-            savePreviewToCache(path, previewData as PreviewData)
+          // Small delay between batches to reduce backend load
+          if (i + BATCH_SIZE < uncachedPaths.length) {
+            await new Promise((resolve) => setTimeout(resolve, 100))
           }
         }
-
-        setPreviews((prev) => ({ ...prev, ...data }))
       }
     } catch (error) {
       console.error('Error fetching previews:', error)

+ 29 - 16
frontend/src/pages/PlaylistsPage.tsx

@@ -225,25 +225,38 @@ export function PlaylistsPage() {
   }
 
   const fetchPreviewsBatch = async (paths: string[]) => {
-    try {
-      const response = await fetch('/preview_thr_batch', {
-        method: 'POST',
-        headers: { 'Content-Type': 'application/json' },
-        body: JSON.stringify({ file_names: paths }),
-      })
-      const data = await response.json()
+    const BATCH_SIZE = 10 // Process 10 patterns at a time to avoid overwhelming the backend
+
+    // Process in batches
+    for (let i = 0; i < paths.length; i += BATCH_SIZE) {
+      const batch = paths.slice(i, i + BATCH_SIZE)
+
+      try {
+        const response = await fetch('/preview_thr_batch', {
+          method: 'POST',
+          headers: { 'Content-Type': 'application/json' },
+          body: JSON.stringify({ file_names: batch }),
+        })
+        const data = await response.json()
 
-      const newPreviews: Record<string, PreviewData> = {}
-      for (const [path, previewData] of Object.entries(data)) {
-        newPreviews[path] = previewData as PreviewData
-        // Only cache valid previews (with image_data and no error)
-        if (previewData && !(previewData as PreviewData).error) {
-          savePreviewToCache(path, previewData as PreviewData)
+        const newPreviews: Record<string, PreviewData> = {}
+        for (const [path, previewData] of Object.entries(data)) {
+          newPreviews[path] = previewData as PreviewData
+          // Only cache valid previews (with image_data and no error)
+          if (previewData && !(previewData as PreviewData).error) {
+            savePreviewToCache(path, previewData as PreviewData)
+          }
         }
+        setPreviews(prev => ({ ...prev, ...newPreviews }))
+      } catch (error) {
+        console.error('Error fetching previews batch:', error)
+        // Continue with next batch even if one fails
+      }
+
+      // Small delay between batches to reduce backend load
+      if (i + BATCH_SIZE < paths.length) {
+        await new Promise((resolve) => setTimeout(resolve, 100))
       }
-      setPreviews(prev => ({ ...prev, ...newPreviews }))
-    } catch (error) {
-      console.error('Error fetching previews:', error)
     }
   }