1.0.0.8 logging updates

This commit is contained in:
ponzischeme89
2026-01-18 23:01:03 +13:00
parent 0a1edc0922
commit 7fa9c4d16e
15 changed files with 220 additions and 123 deletions
+14 -8
View File
@@ -1,6 +1,5 @@
import asyncio
import json
import logging
import os
import threading
import time
@@ -20,13 +19,11 @@ from core.subtitle_processor import SubtitleProcessor, SubtitleFormatOptions, SU
from core.keyword_stripper import get_stripper
from core.file_scanner import FileScanner
from core.database import DatabaseManager
from logging_utils import configure_logging, get_logger
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
configure_logging()
logger = get_logger(__name__)
# Initialize Flask app with static folder for production
static_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
@@ -592,6 +589,8 @@ def stream_scan():
"error": "No directory specified"
}), 400
client_closed = threading.Event()
def generate():
"""Generator function that yields SSE-formatted progress updates"""
try:
@@ -616,6 +615,10 @@ def stream_scan():
# Stream batches as they're found
for batch in FileScanner.scan_directory(directory, batch_size=10):
if client_closed.is_set():
logger.info("Client disconnected, stopping scan loop")
scan_state["scanning"] = False
return
# Check if client is still connected before processing
try:
batch_count += 1
@@ -700,7 +703,7 @@ def stream_scan():
}
yield f"data: {json.dumps(error_data)}\n\n"
return Response(
response = Response(
stream_with_context(generate()),
mimetype='text/event-stream',
headers={
@@ -709,6 +712,8 @@ def stream_scan():
'Connection': 'keep-alive'
}
)
response.call_on_close(client_closed.set)
return response
except Exception as e:
logger.error(f"Stream scan setup error: {e}")
@@ -1640,7 +1645,8 @@ def get_library_report():
"""Get library health report with scan files and issue summaries"""
try:
limit = request.args.get('limit', 200, type=int)
latest_files = db.get_latest_scan_files()
offset = request.args.get('offset', 0, type=int)
latest_files = db.get_latest_scan_files(limit=limit, offset=offset)
latest_results = db.get_latest_file_results()
return jsonify({
+8 -7
View File
@@ -3,9 +3,10 @@ Configuration manager - handles settings persistence
"""
import json
import logging
from logging_utils import get_logger
from pathlib import Path
logging.basicConfig(level=logging.INFO)
logger = get_logger(__name__)
class ConfigManager:
@@ -35,18 +36,18 @@ class ConfigManager:
try:
with open(self.file_path, "r") as f:
self.settings.update(json.load(f))
logging.info("Settings loaded successfully")
logger.info("Settings loaded successfully")
except Exception as e:
logging.error(f"Error loading settings: {e}")
logger.error(f"Error loading settings: {e}")
def save_settings(self):
"""Save settings to disk"""
try:
with open(self.file_path, "w") as f:
json.dump(self.settings, f, indent=2)
logging.info("Settings saved successfully")
logger.info("Settings saved successfully")
except Exception as e:
logging.error(f"Error saving settings: {e}")
logger.error(f"Error saving settings: {e}")
def get(self, key, default=None):
"""Get a setting value"""
@@ -55,7 +56,7 @@ class ConfigManager:
def set(self, key, value):
"""Set a setting value"""
self.settings[key] = value
logging.info(f"Setting updated: {key}")
logger.info(f"Setting updated: {key}")
def get_all(self):
"""Get all settings"""
@@ -64,4 +65,4 @@ class ConfigManager:
def update_multiple(self, updates):
"""Update multiple settings at once"""
self.settings.update(updates)
logging.info(f"Updated {len(updates)} settings")
logger.info(f"Updated {len(updates)} settings")
+7 -4
View File
@@ -9,8 +9,9 @@ from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, scoped_session
import json
import logging
from logging_utils import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
Base = declarative_base()
@@ -504,11 +505,13 @@ class DatabaseManager:
finally:
session.close()
def get_latest_scan_files(self):
"""Get latest scan entry per file path"""
def get_latest_scan_files(self, limit=500, offset=0):
"""Get latest scan entry per file path, paged by scan_files.created_at"""
session = self.get_session()
try:
files = session.query(ScanFile).order_by(ScanFile.created_at.desc()).all()
files = session.query(ScanFile).order_by(
ScanFile.created_at.desc()
).offset(offset).limit(limit).all()
latest = {}
for file_entry in files:
if file_entry.file_path in latest:
+2 -9
View File
@@ -1,4 +1,5 @@
import logging
from logging_utils import get_logger
import os
import re
from pathlib import Path
@@ -8,15 +9,7 @@ from typing import Generator, List, Dict
# Logging configuration
# ------------------------------------------------------------
logger = logging.getLogger("FileScanner")
logger.setLevel(logging.INFO) # Change to DEBUG for deep tracing
handler = logging.StreamHandler()
formatter = logging.Formatter(
"%(asctime)s | %(levelname)-8s | %(name)s | %(message)s"
)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger = get_logger("FileScanner")
# ------------------------------------------------------------
# Import subtitle parser
+2 -1
View File
@@ -7,9 +7,10 @@ from __future__ import annotations
import re
import logging
from logging_utils import get_logger
from typing import Optional, List
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class KeywordStripper:
+2 -2
View File
@@ -12,11 +12,11 @@ from __future__ import annotations
import asyncio
import aiohttp
import logging
from logging_utils import get_logger
import time
from typing import Dict, Optional
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class RateLimiter:
+2 -2
View File
@@ -2,6 +2,7 @@ from __future__ import annotations
import re
import logging
from logging_utils import get_logger
import textwrap
import time
import os
@@ -25,8 +26,7 @@ except ImportError:
except ImportError:
_HAS_MSVCRT = False
logger = logging.getLogger("SubtitleProcessor")
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
logger = get_logger("SubtitleProcessor")
# ============================================================
# Sentinel tag for deterministic detection
+16 -16
View File
@@ -3,10 +3,10 @@ TMDb API client - async movie and TV series metadata fetching
"""
import asyncio
import aiohttp
import logging
from logging_utils import get_logger
import time
logging.basicConfig(level=logging.INFO)
logger = get_logger(__name__)
class TMDbClient:
@@ -47,7 +47,7 @@ class TMDbClient:
response_time_ms = int((time.time() - start_time) * 1000)
if response.status != 200:
logging.error(f"TMDb HTTP error {response.status} for movie '{title}'")
logger.error(f"TMDb HTTP error {response.status} for movie '{title}'")
# Track failed API call
if self.db_manager:
self.db_manager.track_api_call(
@@ -71,7 +71,7 @@ class TMDbClient:
)
return data["results"][0] # Return first match
logging.warning(f"No TMDb results for movie '{title}'")
logger.warning(f"No TMDb results for movie '{title}'")
# Track failed API call (no results)
if self.db_manager:
self.db_manager.track_api_call(
@@ -83,7 +83,7 @@ class TMDbClient:
return None
except Exception as e:
logging.error(f"Error searching TMDb for movie '{title}': {e}")
logger.error(f"Error searching TMDb for movie '{title}': {e}")
return None
async def search_tv(self, title, year=None, language=None):
@@ -115,7 +115,7 @@ class TMDbClient:
response_time_ms = int((time.time() - start_time) * 1000)
if response.status != 200:
logging.error(f"TMDb HTTP error {response.status} for TV '{title}'")
logger.error(f"TMDb HTTP error {response.status} for TV '{title}'")
# Track failed API call
if self.db_manager:
self.db_manager.track_api_call(
@@ -139,7 +139,7 @@ class TMDbClient:
)
return data["results"][0] # Return first match
logging.warning(f"No TMDb results for TV series '{title}'")
logger.warning(f"No TMDb results for TV series '{title}'")
# Track failed API call (no results)
if self.db_manager:
self.db_manager.track_api_call(
@@ -151,7 +151,7 @@ class TMDbClient:
return None
except Exception as e:
logging.error(f"Error searching TMDb for TV '{title}': {e}")
logger.error(f"Error searching TMDb for TV '{title}': {e}")
return None
async def get_movie_details(self, movie_id, language=None):
@@ -174,13 +174,13 @@ class TMDbClient:
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params) as response:
if response.status != 200:
logging.error(f"TMDb HTTP error {response.status} for movie ID {movie_id}")
logger.error(f"TMDb HTTP error {response.status} for movie ID {movie_id}")
return None
return await response.json()
except Exception as e:
logging.error(f"Error getting TMDb movie details for ID {movie_id}: {e}")
logger.error(f"Error getting TMDb movie details for ID {movie_id}: {e}")
return None
async def get_tv_details(self, tv_id, language=None):
@@ -203,13 +203,13 @@ class TMDbClient:
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params) as response:
if response.status != 200:
logging.error(f"TMDb HTTP error {response.status} for TV ID {tv_id}")
logger.error(f"TMDb HTTP error {response.status} for TV ID {tv_id}")
return None
return await response.json()
except Exception as e:
logging.error(f"Error getting TMDb TV details for ID {tv_id}: {e}")
logger.error(f"Error getting TMDb TV details for ID {tv_id}: {e}")
return None
async def get_tv_season(self, tv_id, season_number, language=None):
@@ -233,13 +233,13 @@ class TMDbClient:
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params) as response:
if response.status != 200:
logging.error(f"TMDb HTTP error {response.status} for TV {tv_id} season {season_number}")
logger.error(f"TMDb HTTP error {response.status} for TV {tv_id} season {season_number}")
return None
return await response.json()
except Exception as e:
logging.error(f"Error getting TMDb season data: {e}")
logger.error(f"Error getting TMDb season data: {e}")
return None
async def fetch_summary(self, title, media_type="movie", year=None, season=None, episode=None, language=None):
@@ -256,7 +256,7 @@ class TMDbClient:
Returns:
dict: {plot, title, year, media_type, rating} or None if not found
"""
logging.info(f"Fetching TMDb summary for: {title} (type: {media_type})")
logger.info(f"Fetching TMDb summary for: {title} (type: {media_type})")
try:
if media_type == "tv":
@@ -327,5 +327,5 @@ class TMDbClient:
}
except Exception as e:
logging.error(f"Error fetching TMDb summary for '{title}': {e}")
logger.error(f"Error fetching TMDb summary for '{title}': {e}")
return None
+2 -1
View File
@@ -3,10 +3,11 @@ TVmaze API client - async TV metadata fetching
"""
import aiohttp
import logging
from logging_utils import get_logger
import re
import time
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class TVMazeClient:
+15
View File
@@ -0,0 +1,15 @@
import logging
from typing import Optional
DEFAULT_LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
def configure_logging(level: int = logging.INFO, fmt: str = DEFAULT_LOG_FORMAT) -> None:
"""Configure application logging."""
logging.basicConfig(level=level, format=fmt)
def get_logger(name: Optional[str] = None) -> logging.Logger:
"""Get a logger by name."""
return logging.getLogger(name or __name__)