Finally got the memory thing under contril, I'm pretty sure.

This commit is contained in:
Storm Dragon
2025-11-13 00:44:58 -05:00
parent 8bcde8bf37
commit 671a290323
4 changed files with 106 additions and 13 deletions
+50 -1
View File
@@ -381,7 +381,56 @@ class EpubParser:
return chapters
def cleanup(self):
"""Clean up temporary files"""
"""Clean up temporary files and memory"""
try:
# Clear BeautifulSoup objects and other memory references
if hasattr(self, 'soup'):
del self.soup
self.soup = None
if hasattr(self, 'rootfile'):
del self.rootfile
self.rootfile = None
if hasattr(self, 'metadataTag'):
del self.metadataTag
self.metadataTag = None
if hasattr(self, 'manifestTag'):
del self.manifestTag
self.manifestTag = None
if hasattr(self, 'spineTag'):
del self.spineTag
self.spineTag = None
if hasattr(self, 'tocNav'):
del self.tocNav
self.tocNav = None
if hasattr(self, 'navMap'):
del self.navMap
self.navMap = None
# Clear book content and references
if self.book:
if hasattr(self.book, 'chapters'):
for chapter in self.book.chapters:
if hasattr(chapter, 'paragraphs'):
chapter.paragraphs.clear()
self.book.chapters.clear()
self.book = None
# Clear dictionaries and lists
if hasattr(self, 'tocMap'):
self.tocMap.clear()
if hasattr(self, 'manifest'):
self.manifest.clear()
if hasattr(self, 'spine'):
self.spine.clear()
# Force garbage collection
import gc
gc.collect()
except Exception as e:
print(f"Warning: Error during memory cleanup: {e}")
# Clean up temp directory
if self.tempDir and Path(self.tempDir).exists():
try:
shutil.rmtree(self.tempDir)
+31 -7
View File
@@ -8,6 +8,8 @@ Supports real-time speed control without re-encoding.
"""
import os
import gc
import time
from pathlib import Path
import threading
from concurrent.futures import ThreadPoolExecutor
@@ -108,24 +110,46 @@ class MpvPlayer:
if success:
success = self.play_audio_file()
# Schedule cleanup after mpv loads the file
# Use ThreadPoolExecutor instead of daemon threads to prevent accumulation
# Schedule more aggressive cleanup with memory awareness
if tempFile:
import time
def cleanup_temp_file(filepath):
time.sleep(2) # mpv loads files quickly
def cleanup_temp_file(filepath, file_size):
# Shorter delay for smaller files, longer for larger ones
# Small files (<100KB): 0.5s delay
# Medium files (100KB-1MB): 1s delay
# Large files (>1MB): 2s delay
if file_size < 100 * 1024:
delay = 0.5
elif file_size < 1024 * 1024:
delay = 1.0
else:
delay = 2.0
time.sleep(delay)
try:
# Force garbage collection after file deletion
os.unlink(filepath)
import gc
gc.collect() # Help reclaim memory sooner
except:
pass
# Remove from active list
with self.tempFileLock:
try:
self.activeTempFiles.remove(filepath)
# Remove by path if stored as string
if filepath in self.activeTempFiles:
self.activeTempFiles.remove(filepath)
else:
# Remove by dict path if stored as dict
for item in self.activeTempFiles:
if isinstance(item, dict) and item.get('path') == filepath:
self.activeTempFiles.remove(item)
break
except ValueError:
pass # Already removed
# Use thread pool instead of creating new daemon threads
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name)
# Use thread pool with file size for better memory management
self.cleanupExecutor.submit(cleanup_temp_file, tempFile.name, len(wavData))
return success
+2 -1
View File
@@ -59,7 +59,8 @@ class TtsEngine:
# Safety: Limit text size to prevent excessive memory usage
# ~10,000 chars = ~10-15 minutes of audio at normal reading speed
MAX_TEXT_LENGTH = 10000
# Reduced to 5000 to lower memory usage per paragraph
MAX_TEXT_LENGTH = 5000
if len(text) > MAX_TEXT_LENGTH:
print(f"Warning: Paragraph too long ({len(text)} chars), truncating to {MAX_TEXT_LENGTH}")
text = text[:MAX_TEXT_LENGTH] + "..."