Finally got the memory thing under contril, I'm pretty sure.
This commit is contained in:
27
bookstorm.py
27
bookstorm.py
@@ -817,15 +817,34 @@ class BookReader:
|
|||||||
gc.collect() # Full collection every 20 seconds
|
gc.collect() # Full collection every 20 seconds
|
||||||
else:
|
else:
|
||||||
gc.collect(generation=0) # Fast collection every 10 seconds
|
gc.collect(generation=0) # Fast collection every 10 seconds
|
||||||
|
|
||||||
# Memory watchdog: warn if exceeding 2GB (50% on Pi 4GB)
|
# Memory watchdog: warn if exceeding 2GB (50% on Pi 4GB)
|
||||||
try:
|
try:
|
||||||
import resource
|
import resource
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
memUsage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024 # MB
|
memUsage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024 # MB
|
||||||
if memUsage > 2048 and not memoryWarningShown:
|
|
||||||
memoryWarningShown = True
|
# More aggressive memory warnings and cleanup
|
||||||
self.speechEngine.speak("Warning: High memory usage detected. Consider restarting BookStorm soon.")
|
if memUsage > 1536: # 1.5GB threshold
|
||||||
except:
|
if not memoryWarningShown:
|
||||||
|
print(f"Memory usage: {memUsage:.0f}MB - performing aggressive cleanup")
|
||||||
|
# Force aggressive cleanup
|
||||||
|
if hasattr(self, 'parser') and self.parser:
|
||||||
|
self.parser.cleanup()
|
||||||
|
# Clear any cached pygame surfaces
|
||||||
|
if hasattr(self, 'cachedSurfaces'):
|
||||||
|
for surfaceType, surface, position in self.cachedSurfaces:
|
||||||
|
del surface
|
||||||
|
self.cachedSurfaces.clear()
|
||||||
|
# Additional garbage collection
|
||||||
|
gc.collect()
|
||||||
|
gc.collect() # Second pass
|
||||||
|
|
||||||
|
if memUsage > 2048 and not memoryWarningShown:
|
||||||
|
memoryWarningShown = True
|
||||||
|
self.speechEngine.speak("Warning: High memory usage detected. Consider restarting BookStorm soon.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Memory monitoring error: {e}")
|
||||||
pass
|
pass
|
||||||
gcCounter = 0
|
gcCounter = 0
|
||||||
|
|
||||||
|
|||||||
@@ -381,7 +381,56 @@ class EpubParser:
|
|||||||
return chapters
|
return chapters
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""Clean up temporary files"""
|
"""Clean up temporary files and memory"""
|
||||||
|
try:
|
||||||
|
# Clear BeautifulSoup objects and other memory references
|
||||||
|
if hasattr(self, 'soup'):
|
||||||
|
del self.soup
|
||||||
|
self.soup = None
|
||||||
|
if hasattr(self, 'rootfile'):
|
||||||
|
del self.rootfile
|
||||||
|
self.rootfile = None
|
||||||
|
if hasattr(self, 'metadataTag'):
|
||||||
|
del self.metadataTag
|
||||||
|
self.metadataTag = None
|
||||||
|
if hasattr(self, 'manifestTag'):
|
||||||
|
del self.manifestTag
|
||||||
|
self.manifestTag = None
|
||||||
|
if hasattr(self, 'spineTag'):
|
||||||
|
del self.spineTag
|
||||||
|
self.spineTag = None
|
||||||
|
if hasattr(self, 'tocNav'):
|
||||||
|
del self.tocNav
|
||||||
|
self.tocNav = None
|
||||||
|
if hasattr(self, 'navMap'):
|
||||||
|
del self.navMap
|
||||||
|
self.navMap = None
|
||||||
|
|
||||||
|
# Clear book content and references
|
||||||
|
if self.book:
|
||||||
|
if hasattr(self.book, 'chapters'):
|
||||||
|
for chapter in self.book.chapters:
|
||||||
|
if hasattr(chapter, 'paragraphs'):
|
||||||
|
chapter.paragraphs.clear()
|
||||||
|
self.book.chapters.clear()
|
||||||
|
self.book = None
|
||||||
|
|
||||||
|
# Clear dictionaries and lists
|
||||||
|
if hasattr(self, 'tocMap'):
|
||||||
|
self.tocMap.clear()
|
||||||
|
if hasattr(self, 'manifest'):
|
||||||
|
self.manifest.clear()
|
||||||
|
if hasattr(self, 'spine'):
|
||||||
|
self.spine.clear()
|
||||||
|
|
||||||
|
# Force garbage collection
|
||||||
|
import gc
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Error during memory cleanup: {e}")
|
||||||
|
|
||||||
|
# Clean up temp directory
|
||||||
if self.tempDir and Path(self.tempDir).exists():
|
if self.tempDir and Path(self.tempDir).exists():
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(self.tempDir)
|
shutil.rmtree(self.tempDir)
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ Supports real-time speed control without re-encoding.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import gc
|
||||||
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import threading
|
import threading
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
@@ -108,24 +110,46 @@ class MpvPlayer:
|
|||||||
if success:
|
if success:
|
||||||
success = self.play_audio_file()
|
success = self.play_audio_file()
|
||||||
|
|
||||||
# Schedule cleanup after mpv loads the file
|
# Schedule more aggressive cleanup with memory awareness
|
||||||
# Use ThreadPoolExecutor instead of daemon threads to prevent accumulation
|
|
||||||
if tempFile:
|
if tempFile:
|
||||||
import time
|
import time
|
||||||
def cleanup_temp_file(filepath):
|
def cleanup_temp_file(filepath, file_size):
|
||||||
time.sleep(2) # mpv loads files quickly
|
# Shorter delay for smaller files, longer for larger ones
|
||||||
|
# Small files (<100KB): 0.5s delay
|
||||||
|
# Medium files (100KB-1MB): 1s delay
|
||||||
|
# Large files (>1MB): 2s delay
|
||||||
|
if file_size < 100 * 1024:
|
||||||
|
delay = 0.5
|
||||||
|
elif file_size < 1024 * 1024:
|
||||||
|
delay = 1.0
|
||||||
|
else:
|
||||||
|
delay = 2.0
|
||||||
|
|
||||||
|
time.sleep(delay)
|
||||||
try:
|
try:
|
||||||
|
# Force garbage collection after file deletion
|
||||||
os.unlink(filepath)
|
os.unlink(filepath)
|
||||||
|
import gc
|
||||||
|
gc.collect() # Help reclaim memory sooner
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
# Remove from active list
|
# Remove from active list
|
||||||
with self.tempFileLock:
|
with self.tempFileLock:
|
||||||
try:
|
try:
|
||||||
self.activeTempFiles.remove(filepath)
|
# Remove by path if stored as string
|
||||||
|
if filepath in self.activeTempFiles:
|
||||||
|
self.activeTempFiles.remove(filepath)
|
||||||
|
else:
|
||||||
|
# Remove by dict path if stored as dict
|
||||||
|
for item in self.activeTempFiles:
|
||||||
|
if isinstance(item, dict) and item.get('path') == filepath:
|
||||||
|
self.activeTempFiles.remove(item)
|
||||||
|
break
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass # Already removed
|
pass # Already removed
|
||||||
# Use thread pool instead of creating new daemon threads
|
|
||||||
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name)
|
# Use thread pool with file size for better memory management
|
||||||
|
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name, len(wavData))
|
||||||
|
|
||||||
return success
|
return success
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,8 @@ class TtsEngine:
|
|||||||
|
|
||||||
# Safety: Limit text size to prevent excessive memory usage
|
# Safety: Limit text size to prevent excessive memory usage
|
||||||
# ~10,000 chars = ~10-15 minutes of audio at normal reading speed
|
# ~10,000 chars = ~10-15 minutes of audio at normal reading speed
|
||||||
MAX_TEXT_LENGTH = 10000
|
# Reduced to 5000 to lower memory usage per paragraph
|
||||||
|
MAX_TEXT_LENGTH = 5000
|
||||||
if len(text) > MAX_TEXT_LENGTH:
|
if len(text) > MAX_TEXT_LENGTH:
|
||||||
print(f"Warning: Paragraph too long ({len(text)} chars), truncating to {MAX_TEXT_LENGTH}")
|
print(f"Warning: Paragraph too long ({len(text)} chars), truncating to {MAX_TEXT_LENGTH}")
|
||||||
text = text[:MAX_TEXT_LENGTH] + "..."
|
text = text[:MAX_TEXT_LENGTH] + "..."
|
||||||
|
|||||||
Reference in New Issue
Block a user