Finally got the memory thing under contril, I'm pretty sure.
This commit is contained in:
27
bookstorm.py
27
bookstorm.py
@@ -817,15 +817,34 @@ class BookReader:
|
||||
gc.collect() # Full collection every 20 seconds
|
||||
else:
|
||||
gc.collect(generation=0) # Fast collection every 10 seconds
|
||||
|
||||
# Memory watchdog: warn if exceeding 2GB (50% on Pi 4GB)
|
||||
try:
|
||||
import resource
|
||||
# pylint: disable=no-member
|
||||
memUsage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024 # MB
|
||||
if memUsage > 2048 and not memoryWarningShown:
|
||||
memoryWarningShown = True
|
||||
self.speechEngine.speak("Warning: High memory usage detected. Consider restarting BookStorm soon.")
|
||||
except:
|
||||
|
||||
# More aggressive memory warnings and cleanup
|
||||
if memUsage > 1536: # 1.5GB threshold
|
||||
if not memoryWarningShown:
|
||||
print(f"Memory usage: {memUsage:.0f}MB - performing aggressive cleanup")
|
||||
# Force aggressive cleanup
|
||||
if hasattr(self, 'parser') and self.parser:
|
||||
self.parser.cleanup()
|
||||
# Clear any cached pygame surfaces
|
||||
if hasattr(self, 'cachedSurfaces'):
|
||||
for surfaceType, surface, position in self.cachedSurfaces:
|
||||
del surface
|
||||
self.cachedSurfaces.clear()
|
||||
# Additional garbage collection
|
||||
gc.collect()
|
||||
gc.collect() # Second pass
|
||||
|
||||
if memUsage > 2048 and not memoryWarningShown:
|
||||
memoryWarningShown = True
|
||||
self.speechEngine.speak("Warning: High memory usage detected. Consider restarting BookStorm soon.")
|
||||
except Exception as e:
|
||||
print(f"Memory monitoring error: {e}")
|
||||
pass
|
||||
gcCounter = 0
|
||||
|
||||
|
||||
@@ -381,7 +381,56 @@ class EpubParser:
|
||||
return chapters
|
||||
|
||||
def cleanup(self):
|
||||
"""Clean up temporary files"""
|
||||
"""Clean up temporary files and memory"""
|
||||
try:
|
||||
# Clear BeautifulSoup objects and other memory references
|
||||
if hasattr(self, 'soup'):
|
||||
del self.soup
|
||||
self.soup = None
|
||||
if hasattr(self, 'rootfile'):
|
||||
del self.rootfile
|
||||
self.rootfile = None
|
||||
if hasattr(self, 'metadataTag'):
|
||||
del self.metadataTag
|
||||
self.metadataTag = None
|
||||
if hasattr(self, 'manifestTag'):
|
||||
del self.manifestTag
|
||||
self.manifestTag = None
|
||||
if hasattr(self, 'spineTag'):
|
||||
del self.spineTag
|
||||
self.spineTag = None
|
||||
if hasattr(self, 'tocNav'):
|
||||
del self.tocNav
|
||||
self.tocNav = None
|
||||
if hasattr(self, 'navMap'):
|
||||
del self.navMap
|
||||
self.navMap = None
|
||||
|
||||
# Clear book content and references
|
||||
if self.book:
|
||||
if hasattr(self.book, 'chapters'):
|
||||
for chapter in self.book.chapters:
|
||||
if hasattr(chapter, 'paragraphs'):
|
||||
chapter.paragraphs.clear()
|
||||
self.book.chapters.clear()
|
||||
self.book = None
|
||||
|
||||
# Clear dictionaries and lists
|
||||
if hasattr(self, 'tocMap'):
|
||||
self.tocMap.clear()
|
||||
if hasattr(self, 'manifest'):
|
||||
self.manifest.clear()
|
||||
if hasattr(self, 'spine'):
|
||||
self.spine.clear()
|
||||
|
||||
# Force garbage collection
|
||||
import gc
|
||||
gc.collect()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: Error during memory cleanup: {e}")
|
||||
|
||||
# Clean up temp directory
|
||||
if self.tempDir and Path(self.tempDir).exists():
|
||||
try:
|
||||
shutil.rmtree(self.tempDir)
|
||||
|
||||
@@ -8,6 +8,8 @@ Supports real-time speed control without re-encoding.
|
||||
"""
|
||||
|
||||
import os
|
||||
import gc
|
||||
import time
|
||||
from pathlib import Path
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
@@ -108,24 +110,46 @@ class MpvPlayer:
|
||||
if success:
|
||||
success = self.play_audio_file()
|
||||
|
||||
# Schedule cleanup after mpv loads the file
|
||||
# Use ThreadPoolExecutor instead of daemon threads to prevent accumulation
|
||||
# Schedule more aggressive cleanup with memory awareness
|
||||
if tempFile:
|
||||
import time
|
||||
def cleanup_temp_file(filepath):
|
||||
time.sleep(2) # mpv loads files quickly
|
||||
def cleanup_temp_file(filepath, file_size):
|
||||
# Shorter delay for smaller files, longer for larger ones
|
||||
# Small files (<100KB): 0.5s delay
|
||||
# Medium files (100KB-1MB): 1s delay
|
||||
# Large files (>1MB): 2s delay
|
||||
if file_size < 100 * 1024:
|
||||
delay = 0.5
|
||||
elif file_size < 1024 * 1024:
|
||||
delay = 1.0
|
||||
else:
|
||||
delay = 2.0
|
||||
|
||||
time.sleep(delay)
|
||||
try:
|
||||
# Force garbage collection after file deletion
|
||||
os.unlink(filepath)
|
||||
import gc
|
||||
gc.collect() # Help reclaim memory sooner
|
||||
except:
|
||||
pass
|
||||
# Remove from active list
|
||||
with self.tempFileLock:
|
||||
try:
|
||||
self.activeTempFiles.remove(filepath)
|
||||
# Remove by path if stored as string
|
||||
if filepath in self.activeTempFiles:
|
||||
self.activeTempFiles.remove(filepath)
|
||||
else:
|
||||
# Remove by dict path if stored as dict
|
||||
for item in self.activeTempFiles:
|
||||
if isinstance(item, dict) and item.get('path') == filepath:
|
||||
self.activeTempFiles.remove(item)
|
||||
break
|
||||
except ValueError:
|
||||
pass # Already removed
|
||||
# Use thread pool instead of creating new daemon threads
|
||||
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name)
|
||||
|
||||
# Use thread pool with file size for better memory management
|
||||
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name, len(wavData))
|
||||
|
||||
return success
|
||||
|
||||
|
||||
@@ -59,7 +59,8 @@ class TtsEngine:
|
||||
|
||||
# Safety: Limit text size to prevent excessive memory usage
|
||||
# ~10,000 chars = ~10-15 minutes of audio at normal reading speed
|
||||
MAX_TEXT_LENGTH = 10000
|
||||
# Reduced to 5000 to lower memory usage per paragraph
|
||||
MAX_TEXT_LENGTH = 5000
|
||||
if len(text) > MAX_TEXT_LENGTH:
|
||||
print(f"Warning: Paragraph too long ({len(text)} chars), truncating to {MAX_TEXT_LENGTH}")
|
||||
text = text[:MAX_TEXT_LENGTH] + "..."
|
||||
|
||||
Reference in New Issue
Block a user