1.0.9 improves library loading performance
This commit is contained in:
+23
-5
@@ -287,7 +287,7 @@ def _group_key(title: str, year: str | None) -> str:
|
||||
return f"{base} ({year})" if year else base
|
||||
|
||||
|
||||
def _build_library_items(files: list[dict], latest_results: dict, limit: int) -> list[dict]:
|
||||
def _build_library_items(files: list[dict], latest_results: dict, limit: int | None) -> list[dict]:
|
||||
"""Aggregate scan files into library items."""
|
||||
grouped = {}
|
||||
for file_info in files:
|
||||
@@ -354,6 +354,8 @@ def _build_library_items(files: list[dict], latest_results: dict, limit: int) ->
|
||||
),
|
||||
reverse=True
|
||||
)
|
||||
if limit is None:
|
||||
return items
|
||||
return items[:limit]
|
||||
|
||||
def get_format_options_from_settings() -> SubtitleFormatOptions:
|
||||
@@ -1644,14 +1646,30 @@ def get_scan_history():
|
||||
def get_library_report():
|
||||
"""Get library health report with scan files and issue summaries"""
|
||||
try:
|
||||
limit = request.args.get('limit', 200, type=int)
|
||||
offset = request.args.get('offset', 0, type=int)
|
||||
latest_files = db.get_latest_scan_files(limit=limit, offset=offset)
|
||||
page_size = request.args.get('page_size', type=int)
|
||||
page = request.args.get('page', type=int)
|
||||
if page_size is None:
|
||||
page_size = request.args.get('limit', 200, type=int)
|
||||
if page is None:
|
||||
offset = request.args.get('offset', 0, type=int)
|
||||
page = (offset // page_size) + 1 if page_size else 1
|
||||
|
||||
latest_files = db.get_latest_scan_files(limit=None, offset=0)
|
||||
latest_results = db.get_latest_file_results()
|
||||
items = _build_library_items(latest_files, latest_results, None)
|
||||
|
||||
total_items = len(items)
|
||||
start = max(0, (page - 1) * page_size)
|
||||
end = start + page_size
|
||||
page_items = items[start:end]
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"items": _build_library_items(latest_files, latest_results, limit)
|
||||
"items": page_items,
|
||||
"total_items": total_items,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"has_more": end < total_items
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching library report: {e}")
|
||||
|
||||
+40
-11
@@ -4,7 +4,7 @@ Handles persistent storage for settings, runs, and history
|
||||
"""
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from sqlalchemy import create_engine, Column, Integer, String, DateTime, Boolean, Float, Text, ForeignKey, text
|
||||
from sqlalchemy import create_engine, Column, Integer, String, DateTime, Boolean, Float, Text, ForeignKey, text, func
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker, relationship, scoped_session
|
||||
import json
|
||||
@@ -506,17 +506,28 @@ class DatabaseManager:
|
||||
session.close()
|
||||
|
||||
def get_latest_scan_files(self, limit=500, offset=0):
|
||||
"""Get latest scan entry per file path, paged by scan_files.created_at"""
|
||||
"""Get latest scan entry per file path, optionally paged by created_at."""
|
||||
session = self.get_session()
|
||||
try:
|
||||
files = session.query(ScanFile).order_by(
|
||||
ScanFile.created_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
latest = {}
|
||||
for file_entry in files:
|
||||
if file_entry.file_path in latest:
|
||||
continue
|
||||
latest[file_entry.file_path] = {
|
||||
latest_subquery = session.query(
|
||||
ScanFile.file_path,
|
||||
func.max(ScanFile.created_at).label("max_created_at")
|
||||
).group_by(ScanFile.file_path).subquery()
|
||||
|
||||
query = session.query(ScanFile).join(
|
||||
latest_subquery,
|
||||
(ScanFile.file_path == latest_subquery.c.file_path)
|
||||
& (ScanFile.created_at == latest_subquery.c.max_created_at)
|
||||
).order_by(ScanFile.created_at.desc())
|
||||
|
||||
if offset:
|
||||
query = query.offset(offset)
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
|
||||
files = query.all()
|
||||
return [
|
||||
{
|
||||
"path": file_entry.file_path,
|
||||
"name": file_entry.file_name,
|
||||
"title": file_entry.title,
|
||||
@@ -526,7 +537,25 @@ class DatabaseManager:
|
||||
"status": file_entry.status,
|
||||
"summary": file_entry.summary
|
||||
}
|
||||
return list(latest.values())
|
||||
for file_entry in files
|
||||
]
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_latest_scan_files_count(self):
|
||||
"""Count distinct latest scan entries per file path."""
|
||||
session = self.get_session()
|
||||
try:
|
||||
latest_subquery = session.query(
|
||||
ScanFile.file_path,
|
||||
func.max(ScanFile.created_at).label("max_created_at")
|
||||
).group_by(ScanFile.file_path).subquery()
|
||||
|
||||
return session.query(ScanFile).join(
|
||||
latest_subquery,
|
||||
(ScanFile.file_path == latest_subquery.c.file_path)
|
||||
& (ScanFile.created_at == latest_subquery.c.max_created_at)
|
||||
).count()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@@ -138,13 +138,17 @@ class FileScanner:
|
||||
file_path.name, e
|
||||
)
|
||||
|
||||
status = "Has Plot" if has_plot else "Not Loaded"
|
||||
if plot_marker_count > 1:
|
||||
status = "Duplicate Plot"
|
||||
|
||||
batch.append({
|
||||
"path": str(file_path),
|
||||
"name": file_path.name,
|
||||
"has_plot": has_plot,
|
||||
"plot_marker_count": plot_marker_count,
|
||||
"duplicate_plot": plot_marker_count > 1,
|
||||
"status": "Has Plot" if has_plot else "Not Loaded",
|
||||
"status": status,
|
||||
"summary": metadata.get("summary", ""),
|
||||
"plot": metadata.get("summary", ""),
|
||||
"title": metadata.get("title"),
|
||||
|
||||
Reference in New Issue
Block a user