1.0.6 - Multi language support (TMDb), Per folder rule settings
This commit is contained in:
+208
-4
@@ -208,6 +208,43 @@ def _get_str_setting(key: str, default: str) -> str:
|
||||
return default
|
||||
return str(value)
|
||||
|
||||
def _get_folder_rule_for_path(file_path: str, rules: list[dict]) -> dict | None:
|
||||
"""Pick the most specific folder rule that matches the file path."""
|
||||
if not rules:
|
||||
return None
|
||||
normalized_file = os.path.normcase(os.path.abspath(file_path))
|
||||
best_rule = None
|
||||
best_len = -1
|
||||
for rule in rules:
|
||||
directory = rule.get("directory")
|
||||
if not directory:
|
||||
continue
|
||||
normalized_dir = os.path.normcase(os.path.abspath(directory))
|
||||
normalized_dir = normalized_dir.rstrip(os.sep)
|
||||
prefix = normalized_dir + os.sep
|
||||
if normalized_file == normalized_dir or normalized_file.startswith(prefix):
|
||||
if len(normalized_dir) > best_len:
|
||||
best_len = len(normalized_dir)
|
||||
best_rule = rule
|
||||
return best_rule
|
||||
|
||||
|
||||
def _merge_format_options(base_options: SubtitleFormatOptions, rule: dict | None) -> SubtitleFormatOptions:
|
||||
"""Merge folder rule overrides into format options."""
|
||||
if not rule:
|
||||
return base_options
|
||||
def _override_bool(key: str, current: bool) -> bool:
|
||||
value = rule.get(key)
|
||||
return current if value is None else bool(value)
|
||||
return SubtitleFormatOptions(
|
||||
title_bold=_override_bool("subtitle_title_bold", base_options.title_bold),
|
||||
plot_italic=_override_bool("subtitle_plot_italic", base_options.plot_italic),
|
||||
show_director=_override_bool("subtitle_show_director", base_options.show_director),
|
||||
show_actors=_override_bool("subtitle_show_actors", base_options.show_actors),
|
||||
show_released=_override_bool("subtitle_show_released", base_options.show_released),
|
||||
show_genre=_override_bool("subtitle_show_genre", base_options.show_genre),
|
||||
)
|
||||
|
||||
|
||||
def get_format_options_from_settings() -> SubtitleFormatOptions:
|
||||
"""Load subtitle formatting options from database settings."""
|
||||
@@ -670,6 +707,63 @@ def save_suggested_matches():
|
||||
}), 500
|
||||
|
||||
|
||||
@app.route('/api/folder-rules', methods=['GET'])
|
||||
def get_folder_rules():
|
||||
"""Get all folder rules"""
|
||||
try:
|
||||
rules = db.get_all_folder_rules()
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"rules": rules
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching folder rules: {e}")
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@app.route('/api/folder-rules', methods=['POST'])
|
||||
def save_folder_rule():
|
||||
"""Create or update a folder rule"""
|
||||
try:
|
||||
data = request.json or {}
|
||||
directory = data.get("directory", "").strip()
|
||||
if not directory:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Directory is required"
|
||||
}), 400
|
||||
|
||||
success = db.upsert_folder_rule(directory, data)
|
||||
return jsonify({
|
||||
"success": success
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving folder rule: {e}")
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@app.route('/api/folder-rules/<path:directory>', methods=['DELETE'])
|
||||
def delete_folder_rule(directory):
|
||||
"""Delete a folder rule for a directory"""
|
||||
try:
|
||||
success = db.delete_folder_rule(directory)
|
||||
return jsonify({
|
||||
"success": success
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting folder rule: {e}")
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@app.route('/api/suggested-matches/<path:file_path>', methods=['DELETE'])
|
||||
def delete_suggested_match(file_path):
|
||||
"""Delete a suggested match for a file"""
|
||||
@@ -797,7 +891,89 @@ def search_title():
|
||||
"error": "API not configured"
|
||||
}), 400
|
||||
|
||||
preferred_source = data.get("preferred_source") or _get_str_setting("preferred_source", "omdb")
|
||||
language = data.get("language")
|
||||
|
||||
results = []
|
||||
if preferred_source == "tmdb" and tmdb_client:
|
||||
try:
|
||||
import aiohttp
|
||||
import asyncio
|
||||
|
||||
async def tmdb_search(title, mode, language=None):
|
||||
"""TMDb search with optional language support (1 API call)"""
|
||||
start_time = time.time()
|
||||
|
||||
url = f"{tmdb_client.base_url}/search/multi"
|
||||
params = {
|
||||
"api_key": tmdb_client.api_key,
|
||||
"query": title
|
||||
}
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, params=params) as resp:
|
||||
response_time_ms = int((time.time() - start_time) * 1000)
|
||||
if resp.status != 200:
|
||||
db.track_api_call(
|
||||
provider="tmdb",
|
||||
endpoint="/search/multi",
|
||||
success=False,
|
||||
response_time_ms=response_time_ms,
|
||||
call_count=1
|
||||
)
|
||||
return []
|
||||
|
||||
data = await resp.json()
|
||||
items = [
|
||||
item for item in data.get("results", [])
|
||||
if item.get("media_type") in ("movie", "tv")
|
||||
]
|
||||
if mode == "quick":
|
||||
items = items[:1]
|
||||
else:
|
||||
items = items[:5]
|
||||
|
||||
results = []
|
||||
for item in items:
|
||||
title_value = item.get("title") or item.get("name")
|
||||
date_value = item.get("release_date") or item.get("first_air_date") or ""
|
||||
year = date_value[:4] if date_value else "N/A"
|
||||
poster_path = item.get("poster_path")
|
||||
poster = f"https://image.tmdb.org/t/p/w185{poster_path}" if poster_path else None
|
||||
vote_average = item.get("vote_average")
|
||||
imdb_rating = f"{vote_average:.1f}/10" if isinstance(vote_average, (int, float)) else "N/A"
|
||||
|
||||
results.append({
|
||||
"title": title_value,
|
||||
"year": year,
|
||||
"plot": item.get("overview") or "No plot available",
|
||||
"runtime": None,
|
||||
"imdb_rating": imdb_rating,
|
||||
"media_type": item.get("media_type"),
|
||||
"poster": poster,
|
||||
"imdb_id": None
|
||||
})
|
||||
|
||||
db.track_api_call(
|
||||
provider="tmdb",
|
||||
endpoint="/search/multi",
|
||||
success=True,
|
||||
response_time_ms=response_time_ms,
|
||||
call_count=1
|
||||
)
|
||||
return results
|
||||
|
||||
results = asyncio.run(tmdb_search(query, mode, language))
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"results": results
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching TMDb: {e}")
|
||||
|
||||
if omdb_client:
|
||||
try:
|
||||
import aiohttp
|
||||
@@ -986,7 +1162,7 @@ def process_files():
|
||||
"error": "Metadata provider not configured"
|
||||
}), 400
|
||||
|
||||
# Load format options from settings
|
||||
# Load default format options from settings
|
||||
format_options = get_format_options_from_settings()
|
||||
|
||||
# Load strip_keywords setting (default True for better matching)
|
||||
@@ -995,6 +1171,12 @@ def process_files():
|
||||
# Load clean_subtitle_content setting (default True for ad removal)
|
||||
clean_subtitle_content = _get_bool_setting("clean_subtitle_content", True)
|
||||
|
||||
# Load default insertion position and preferred source
|
||||
default_insertion_position = _get_str_setting("insertion_position", "start")
|
||||
default_preferred_source = _get_str_setting("preferred_source", "omdb")
|
||||
|
||||
folder_rules = db.get_all_folder_rules()
|
||||
|
||||
# Create a processing run in database
|
||||
run_id = db.create_run(total_files=len(file_paths))
|
||||
|
||||
@@ -1006,14 +1188,23 @@ def process_files():
|
||||
for file_path in file_paths:
|
||||
try:
|
||||
# Process file asynchronously with optional title override
|
||||
rule = _get_folder_rule_for_path(file_path, folder_rules)
|
||||
effective_format = _merge_format_options(format_options, rule)
|
||||
insertion_position = rule.get("insertion_position") if rule else None
|
||||
preferred_source = rule.get("preferred_source") if rule else None
|
||||
language = rule.get("language") if rule else None
|
||||
|
||||
result = asyncio.run(processor.process_file(
|
||||
file_path,
|
||||
duration,
|
||||
force_reprocess=force_reprocess,
|
||||
title_override=title_override,
|
||||
format_options=format_options,
|
||||
format_options=effective_format,
|
||||
strip_keywords=strip_keywords,
|
||||
clean_subtitle_content=clean_subtitle_content,
|
||||
insertion_position=insertion_position or default_insertion_position,
|
||||
preferred_source=preferred_source or default_preferred_source,
|
||||
language=language,
|
||||
))
|
||||
|
||||
# Track success/failure
|
||||
@@ -1124,7 +1315,7 @@ def process_batch():
|
||||
successful_count = 0
|
||||
failed_count = 0
|
||||
|
||||
# Load format options from settings
|
||||
# Load default format options from settings
|
||||
format_options = get_format_options_from_settings()
|
||||
|
||||
# Load strip_keywords setting (default True for better matching)
|
||||
@@ -1133,6 +1324,10 @@ def process_batch():
|
||||
# Load clean_subtitle_content setting (default True for ad removal)
|
||||
clean_subtitle_content = _get_bool_setting("clean_subtitle_content", True)
|
||||
|
||||
default_insertion_position = _get_str_setting("insertion_position", "start")
|
||||
default_preferred_source = _get_str_setting("preferred_source", "omdb")
|
||||
folder_rules = db.get_all_folder_rules()
|
||||
|
||||
# Create a processing run
|
||||
run_id = db.create_run(total_files=total)
|
||||
|
||||
@@ -1148,14 +1343,23 @@ def process_batch():
|
||||
|
||||
try:
|
||||
# Process file with title override (no API call needed - data is pre-fetched)
|
||||
rule = _get_folder_rule_for_path(file_path, folder_rules)
|
||||
effective_format = _merge_format_options(format_options, rule)
|
||||
insertion_position = rule.get("insertion_position") if rule else None
|
||||
preferred_source = rule.get("preferred_source") if rule else None
|
||||
language = rule.get("language") if rule else None
|
||||
|
||||
result = asyncio.run(processor.process_file(
|
||||
file_path,
|
||||
duration,
|
||||
force_reprocess=True, # Always reprocess when applying matches
|
||||
title_override=title_override,
|
||||
format_options=format_options,
|
||||
format_options=effective_format,
|
||||
strip_keywords=strip_keywords,
|
||||
clean_subtitle_content=clean_subtitle_content,
|
||||
insertion_position=insertion_position or default_insertion_position,
|
||||
preferred_source=preferred_source or default_preferred_source,
|
||||
language=language,
|
||||
))
|
||||
|
||||
if result["success"]:
|
||||
|
||||
@@ -139,6 +139,28 @@ class SuggestedMatch(Base):
|
||||
return f"<SuggestedMatch(id={self.id}, file_name='{self.file_name}', matched_title='{self.matched_title}')>"
|
||||
|
||||
|
||||
class FolderRule(Base):
|
||||
"""Folder-specific rules that override default settings"""
|
||||
__tablename__ = 'folder_rules'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
directory = Column(String(500), nullable=False, unique=True, index=True)
|
||||
preferred_source = Column(String(50))
|
||||
insertion_position = Column(String(50))
|
||||
language = Column(String(20))
|
||||
subtitle_title_bold = Column(Boolean)
|
||||
subtitle_plot_italic = Column(Boolean)
|
||||
subtitle_show_director = Column(Boolean)
|
||||
subtitle_show_actors = Column(Boolean)
|
||||
subtitle_show_released = Column(Boolean)
|
||||
subtitle_show_genre = Column(Boolean)
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<FolderRule(id={self.id}, directory='{self.directory}')>"
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Manages database connections and operations"""
|
||||
|
||||
@@ -759,6 +781,98 @@ class DatabaseManager:
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# ============ FOLDER RULES OPERATIONS ============
|
||||
|
||||
def get_folder_rule(self, directory):
|
||||
"""Get a folder rule for a specific directory"""
|
||||
session = self.get_session()
|
||||
try:
|
||||
rule = session.query(FolderRule).filter_by(directory=directory).first()
|
||||
if not rule:
|
||||
return None
|
||||
return {
|
||||
"directory": rule.directory,
|
||||
"preferred_source": rule.preferred_source,
|
||||
"insertion_position": rule.insertion_position,
|
||||
"language": rule.language,
|
||||
"subtitle_title_bold": rule.subtitle_title_bold,
|
||||
"subtitle_plot_italic": rule.subtitle_plot_italic,
|
||||
"subtitle_show_director": rule.subtitle_show_director,
|
||||
"subtitle_show_actors": rule.subtitle_show_actors,
|
||||
"subtitle_show_released": rule.subtitle_show_released,
|
||||
"subtitle_show_genre": rule.subtitle_show_genre,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_all_folder_rules(self):
|
||||
"""Get all folder rules"""
|
||||
session = self.get_session()
|
||||
try:
|
||||
rules = session.query(FolderRule).order_by(FolderRule.directory.asc()).all()
|
||||
return [
|
||||
{
|
||||
"directory": rule.directory,
|
||||
"preferred_source": rule.preferred_source,
|
||||
"insertion_position": rule.insertion_position,
|
||||
"language": rule.language,
|
||||
"subtitle_title_bold": rule.subtitle_title_bold,
|
||||
"subtitle_plot_italic": rule.subtitle_plot_italic,
|
||||
"subtitle_show_director": rule.subtitle_show_director,
|
||||
"subtitle_show_actors": rule.subtitle_show_actors,
|
||||
"subtitle_show_released": rule.subtitle_show_released,
|
||||
"subtitle_show_genre": rule.subtitle_show_genre,
|
||||
}
|
||||
for rule in rules
|
||||
]
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def upsert_folder_rule(self, directory, rule_data):
|
||||
"""Create or update a folder rule"""
|
||||
session = self.get_session()
|
||||
try:
|
||||
rule = session.query(FolderRule).filter_by(directory=directory).first()
|
||||
if not rule:
|
||||
rule = FolderRule(directory=directory)
|
||||
session.add(rule)
|
||||
|
||||
rule.preferred_source = rule_data.get("preferred_source")
|
||||
rule.insertion_position = rule_data.get("insertion_position")
|
||||
rule.language = rule_data.get("language")
|
||||
rule.subtitle_title_bold = rule_data.get("subtitle_title_bold")
|
||||
rule.subtitle_plot_italic = rule_data.get("subtitle_plot_italic")
|
||||
rule.subtitle_show_director = rule_data.get("subtitle_show_director")
|
||||
rule.subtitle_show_actors = rule_data.get("subtitle_show_actors")
|
||||
rule.subtitle_show_released = rule_data.get("subtitle_show_released")
|
||||
rule.subtitle_show_genre = rule_data.get("subtitle_show_genre")
|
||||
|
||||
session.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"Error saving folder rule: {e}")
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def delete_folder_rule(self, directory):
|
||||
"""Delete a folder rule for a directory"""
|
||||
session = self.get_session()
|
||||
try:
|
||||
rule = session.query(FolderRule).filter_by(directory=directory).first()
|
||||
if rule:
|
||||
session.delete(rule)
|
||||
session.commit()
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"Error deleting folder rule: {e}")
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# ============ MAINTENANCE OPERATIONS ============
|
||||
|
||||
def clear_settings(self, keep_api_keys=False):
|
||||
|
||||
@@ -1106,11 +1106,12 @@ def build_intro_blocks(
|
||||
elif available_time_ms >= int(MIN_DURATION_SECONDS * 1000):
|
||||
block_end_ms = safe_end_time(first_subtitle_start_ms - min_safe_gap_ms)
|
||||
|
||||
# If we can fit at least a brief title, show it
|
||||
# If we can fit a brief header, include title + ratings/runtime
|
||||
brief_text = (
|
||||
f"{SUBLOGUE_SENTINEL}\n"
|
||||
f"{title} ({year})\n"
|
||||
f"— Generated by Sublogue"
|
||||
f"{title_line}\n"
|
||||
f"{info_line}\n"
|
||||
f"- Generated by Sublogue"
|
||||
)
|
||||
|
||||
blocks.append(SubtitleBlock(1, 0, block_end_ms, brief_text))
|
||||
@@ -1123,6 +1124,102 @@ def build_intro_blocks(
|
||||
|
||||
return blocks
|
||||
|
||||
|
||||
def build_outro_blocks(
|
||||
movie: dict,
|
||||
plot: str,
|
||||
last_subtitle_end_ms: int,
|
||||
min_safe_gap_ms: int = 500,
|
||||
format_options: SubtitleFormatOptions = None,
|
||||
) -> List[SubtitleBlock]:
|
||||
"""
|
||||
Build outro blocks that appear AFTER the last real subtitle.
|
||||
|
||||
This avoids any overlap by placing new blocks after the final subtitle end time.
|
||||
"""
|
||||
if format_options is None:
|
||||
format_options = DEFAULT_FORMAT_OPTIONS
|
||||
|
||||
title = movie.get("title", "Unknown Title")
|
||||
year = movie.get("year", "")
|
||||
|
||||
imdb_rating = movie.get("imdb_rating") or movie.get("imdbRating") or "N/A"
|
||||
if not imdb_rating or imdb_rating in ("", "N/A", None):
|
||||
imdb_rating = "N/A"
|
||||
|
||||
rt_rating = movie.get("rotten_tomatoes") or movie.get("rottenTomatoes") or "N/A"
|
||||
if not rt_rating or rt_rating in ("", "N/A", None):
|
||||
rt_rating = "N/A"
|
||||
|
||||
runtime_raw = movie.get("runtime") or movie.get("Runtime") or "N/A"
|
||||
if runtime_raw and runtime_raw != "N/A":
|
||||
runtime_match = re.search(r'(\d+)', str(runtime_raw))
|
||||
runtime = f"{runtime_match.group(1)} min" if runtime_match else runtime_raw
|
||||
else:
|
||||
runtime = "N/A"
|
||||
|
||||
director = movie.get("director") or movie.get("Director") or "N/A"
|
||||
actors = movie.get("actors") or movie.get("Actors") or "N/A"
|
||||
released = movie.get("released") or movie.get("Released") or "N/A"
|
||||
genre = movie.get("genre") or movie.get("Genre") or "N/A"
|
||||
|
||||
title_display = f"<b>{title}</b>" if format_options.title_bold else title
|
||||
title_line = f"{title_display} ({year})"
|
||||
info_line = f"? IMDb: {imdb_rating} ?? RT: {rt_rating} ? {runtime}"
|
||||
|
||||
extra_lines = []
|
||||
if format_options.show_director and director != "N/A":
|
||||
extra_lines.append(f"?? Director: {director}")
|
||||
if format_options.show_actors and actors != "N/A":
|
||||
actor_list = actors.split(", ")
|
||||
if len(actor_list) > 3:
|
||||
actors_display = ", ".join(actor_list[:3]) + "..."
|
||||
else:
|
||||
actors_display = actors
|
||||
extra_lines.append(f"?? Cast: {actors_display}")
|
||||
if format_options.show_released and released != "N/A":
|
||||
extra_lines.append(f"?? Released: {released}")
|
||||
if format_options.show_genre and genre != "N/A":
|
||||
extra_lines.append(f"?? Genre: {genre}")
|
||||
|
||||
header_parts = [
|
||||
SUBLOGUE_SENTINEL,
|
||||
title_line,
|
||||
info_line,
|
||||
]
|
||||
if extra_lines:
|
||||
header_parts.extend(extra_lines)
|
||||
header_parts.append("")
|
||||
header_parts.append("- Generated by Sublogue")
|
||||
|
||||
header_text = "\n".join(header_parts)
|
||||
|
||||
plot_chunks = _split_plot_into_display_chunks(plot)
|
||||
|
||||
def format_plot_chunk(chunk_text: str, is_first_chunk: bool) -> str:
|
||||
wrapped = wrap_for_tv(chunk_text)
|
||||
if format_options.plot_italic:
|
||||
wrapped = f"<i>{wrapped}</i>"
|
||||
prefix = "Plot: " if is_first_chunk else ""
|
||||
return f"{SUBLOGUE_SENTINEL}\n{prefix}{wrapped}"
|
||||
|
||||
blocks = []
|
||||
current_ms = last_subtitle_end_ms + min_safe_gap_ms
|
||||
|
||||
header_duration_ms = calculate_reading_duration_ms(header_text)
|
||||
header_end_ms = current_ms + header_duration_ms
|
||||
blocks.append(SubtitleBlock(1, current_ms, header_end_ms, header_text))
|
||||
current_ms = header_end_ms
|
||||
|
||||
for i, chunk in enumerate(plot_chunks):
|
||||
chunk_text = format_plot_chunk(chunk, is_first_chunk=(i == 0))
|
||||
chunk_duration_ms = calculate_reading_duration_ms(chunk)
|
||||
chunk_end_ms = current_ms + chunk_duration_ms
|
||||
blocks.append(SubtitleBlock(len(blocks) + 1, current_ms, chunk_end_ms, chunk_text))
|
||||
current_ms = chunk_end_ms
|
||||
|
||||
return blocks
|
||||
|
||||
# ============================================================
|
||||
# Processor
|
||||
# ============================================================
|
||||
@@ -1169,6 +1266,9 @@ class SubtitleProcessor:
|
||||
format_options: SubtitleFormatOptions = None,
|
||||
strip_keywords: bool = True,
|
||||
clean_subtitle_content: bool = True,
|
||||
insertion_position: str = "start",
|
||||
preferred_source: str | None = None,
|
||||
language: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Process a subtitle file to add plot information.
|
||||
@@ -1238,6 +1338,8 @@ class SubtitleProcessor:
|
||||
is_series=is_series,
|
||||
season=season,
|
||||
episode=episode,
|
||||
preferred_source=preferred_source,
|
||||
language=language,
|
||||
)
|
||||
if not movie:
|
||||
return self._fail("No metadata found")
|
||||
@@ -1343,13 +1445,22 @@ class SubtitleProcessor:
|
||||
# These will NEVER overlap with or shift existing subtitles
|
||||
# Returns EMPTY list if insufficient gap
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
intro_blocks = build_intro_blocks(
|
||||
movie,
|
||||
plot,
|
||||
first_subtitle_start_ms=first_subtitle_start_ms,
|
||||
min_safe_gap_ms=500, # 500ms safety buffer before first subtitle
|
||||
format_options=format_options,
|
||||
)
|
||||
if insertion_position == "end":
|
||||
intro_blocks = build_outro_blocks(
|
||||
movie,
|
||||
plot,
|
||||
last_subtitle_end_ms=last_original_timing,
|
||||
min_safe_gap_ms=500, # 500ms safety buffer after last subtitle
|
||||
format_options=format_options,
|
||||
)
|
||||
else:
|
||||
intro_blocks = build_intro_blocks(
|
||||
movie,
|
||||
plot,
|
||||
first_subtitle_start_ms=first_subtitle_start_ms,
|
||||
min_safe_gap_ms=500, # 500ms safety buffer before first subtitle
|
||||
format_options=format_options,
|
||||
)
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# PHASE 5: Combine intro + original subtitles
|
||||
@@ -1357,7 +1468,7 @@ class SubtitleProcessor:
|
||||
# NOTE: We're ONLY renumbering indices (1, 2, 3...), NOT timestamps!
|
||||
# The start_time and end_time of clean_subs are PRESERVED EXACTLY.
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
final = intro_blocks + clean_subs
|
||||
final = clean_subs + intro_blocks if insertion_position == "end" else intro_blocks + clean_subs
|
||||
|
||||
# Renumber all blocks sequentially (index only, timing unchanged)
|
||||
renumbered = [
|
||||
@@ -1368,7 +1479,7 @@ class SubtitleProcessor:
|
||||
# Verify timing preservation (sanity check)
|
||||
num_intro = len(intro_blocks)
|
||||
if len(renumbered) > num_intro:
|
||||
preserved_first = renumbered[num_intro]
|
||||
preserved_first = renumbered[0] if insertion_position == "end" else renumbered[num_intro]
|
||||
if preserved_first.start_time != first_subtitle_start_ms:
|
||||
logger.error(
|
||||
f"TIMING CORRUPTION DETECTED! Original first subtitle was at "
|
||||
@@ -1431,6 +1542,8 @@ class SubtitleProcessor:
|
||||
is_series: bool = False,
|
||||
season: Optional[int] = None,
|
||||
episode: Optional[int] = None,
|
||||
preferred_source: str | None = None,
|
||||
language: str | None = None,
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Fetch metadata from configured sources with fallback.
|
||||
@@ -1442,14 +1555,15 @@ class SubtitleProcessor:
|
||||
Year validation ensures we don't match wrong movies (e.g., "Eternity 2025"
|
||||
shouldn't match "From Here to Eternity 1953").
|
||||
"""
|
||||
logger.info("Fetching metadata for '%s' (year=%s)", movie_name, year)
|
||||
source_preference = preferred_source or self.preferred_source
|
||||
logger.info("Fetching metadata for '%s' (year=%s, source=%s)", movie_name, year, source_preference)
|
||||
|
||||
result = None
|
||||
omdb_type = "series" if is_series else "movie"
|
||||
tmdb_type = "tv" if is_series else "movie"
|
||||
|
||||
# Try preferred source first
|
||||
if self.preferred_source == "tvmaze" and self.tvmaze_client and is_series:
|
||||
if source_preference == "tvmaze" and self.tvmaze_client and is_series:
|
||||
result = await self.tvmaze_client.fetch_summary(
|
||||
movie_name,
|
||||
year=year,
|
||||
@@ -1459,18 +1573,19 @@ class SubtitleProcessor:
|
||||
if result:
|
||||
logger.info("Found metadata via TVmaze: %s (%s)", result.get("title"), result.get("year"))
|
||||
return result
|
||||
elif self.preferred_source == "tmdb" and self.tmdb_client:
|
||||
elif source_preference == "tmdb" and self.tmdb_client:
|
||||
result = await self.tmdb_client.fetch_summary(
|
||||
movie_name,
|
||||
media_type=tmdb_type,
|
||||
year=year,
|
||||
season=season,
|
||||
episode=episode,
|
||||
language=language,
|
||||
)
|
||||
if result:
|
||||
logger.info("Found metadata via TMDb: %s (%s)", result.get("title"), result.get("year"))
|
||||
return result
|
||||
elif self.preferred_source == "omdb" and self.omdb_client:
|
||||
elif source_preference == "omdb" and self.omdb_client:
|
||||
result = await self.omdb_client.fetch_summary(
|
||||
movie_name,
|
||||
media_type=omdb_type,
|
||||
@@ -1483,7 +1598,7 @@ class SubtitleProcessor:
|
||||
return result
|
||||
|
||||
# Fallback to other source
|
||||
if not result and self.omdb_client and self.preferred_source != "omdb":
|
||||
if not result and self.omdb_client and source_preference != "omdb":
|
||||
result = await self.omdb_client.fetch_summary(
|
||||
movie_name,
|
||||
media_type=omdb_type,
|
||||
@@ -1495,19 +1610,20 @@ class SubtitleProcessor:
|
||||
logger.info("Found metadata via OMDb (fallback): %s (%s)", result.get("title"), result.get("year"))
|
||||
return result
|
||||
|
||||
if not result and self.tmdb_client and self.preferred_source != "tmdb":
|
||||
if not result and self.tmdb_client and source_preference != "tmdb":
|
||||
result = await self.tmdb_client.fetch_summary(
|
||||
movie_name,
|
||||
media_type=tmdb_type,
|
||||
year=year,
|
||||
season=season,
|
||||
episode=episode,
|
||||
language=language,
|
||||
)
|
||||
if result:
|
||||
logger.info("Found metadata via TMDb (fallback): %s (%s)", result.get("title"), result.get("year"))
|
||||
return result
|
||||
|
||||
if not result and self.tvmaze_client and self.preferred_source != "tvmaze" and is_series:
|
||||
if not result and self.tvmaze_client and source_preference != "tvmaze" and is_series:
|
||||
result = await self.tvmaze_client.fetch_summary(
|
||||
movie_name,
|
||||
year=year,
|
||||
|
||||
+21
-11
@@ -18,7 +18,7 @@ class TMDbClient:
|
||||
self.semaphore = asyncio.Semaphore(5) # Limit concurrent requests
|
||||
self.db_manager = db_manager
|
||||
|
||||
async def search_movie(self, title, year=None):
|
||||
async def search_movie(self, title, year=None, language=None):
|
||||
"""
|
||||
Search for a movie by title
|
||||
|
||||
@@ -37,6 +37,8 @@ class TMDbClient:
|
||||
}
|
||||
if year:
|
||||
params["year"] = year
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
try:
|
||||
start_time = time.time()
|
||||
@@ -84,7 +86,7 @@ class TMDbClient:
|
||||
logging.error(f"Error searching TMDb for movie '{title}': {e}")
|
||||
return None
|
||||
|
||||
async def search_tv(self, title, year=None):
|
||||
async def search_tv(self, title, year=None, language=None):
|
||||
"""
|
||||
Search for a TV series by title
|
||||
|
||||
@@ -103,6 +105,8 @@ class TMDbClient:
|
||||
}
|
||||
if year:
|
||||
params["first_air_date_year"] = year
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
try:
|
||||
start_time = time.time()
|
||||
@@ -150,7 +154,7 @@ class TMDbClient:
|
||||
logging.error(f"Error searching TMDb for TV '{title}': {e}")
|
||||
return None
|
||||
|
||||
async def get_movie_details(self, movie_id):
|
||||
async def get_movie_details(self, movie_id, language=None):
|
||||
"""
|
||||
Get detailed movie information
|
||||
|
||||
@@ -163,6 +167,8 @@ class TMDbClient:
|
||||
async with self.semaphore:
|
||||
url = f"{self.base_url}/movie/{movie_id}"
|
||||
params = {"api_key": self.api_key}
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
@@ -177,7 +183,7 @@ class TMDbClient:
|
||||
logging.error(f"Error getting TMDb movie details for ID {movie_id}: {e}")
|
||||
return None
|
||||
|
||||
async def get_tv_details(self, tv_id):
|
||||
async def get_tv_details(self, tv_id, language=None):
|
||||
"""
|
||||
Get detailed TV series information
|
||||
|
||||
@@ -190,6 +196,8 @@ class TMDbClient:
|
||||
async with self.semaphore:
|
||||
url = f"{self.base_url}/tv/{tv_id}"
|
||||
params = {"api_key": self.api_key}
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
@@ -204,7 +212,7 @@ class TMDbClient:
|
||||
logging.error(f"Error getting TMDb TV details for ID {tv_id}: {e}")
|
||||
return None
|
||||
|
||||
async def get_tv_season(self, tv_id, season_number):
|
||||
async def get_tv_season(self, tv_id, season_number, language=None):
|
||||
"""
|
||||
Get TV season information
|
||||
|
||||
@@ -218,6 +226,8 @@ class TMDbClient:
|
||||
async with self.semaphore:
|
||||
url = f"{self.base_url}/tv/{tv_id}/season/{season_number}"
|
||||
params = {"api_key": self.api_key}
|
||||
if language:
|
||||
params["language"] = language
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
@@ -232,7 +242,7 @@ class TMDbClient:
|
||||
logging.error(f"Error getting TMDb season data: {e}")
|
||||
return None
|
||||
|
||||
async def fetch_summary(self, title, media_type="movie", year=None, season=None, episode=None):
|
||||
async def fetch_summary(self, title, media_type="movie", year=None, season=None, episode=None, language=None):
|
||||
"""
|
||||
Fetch summary for movie or TV series
|
||||
|
||||
@@ -251,14 +261,14 @@ class TMDbClient:
|
||||
try:
|
||||
if media_type == "tv":
|
||||
# Search for TV series
|
||||
search_result = await self.search_tv(title, year)
|
||||
search_result = await self.search_tv(title, year, language=language)
|
||||
if not search_result:
|
||||
return None
|
||||
|
||||
tv_id = search_result["id"]
|
||||
|
||||
# Get detailed TV info
|
||||
tv_details = await self.get_tv_details(tv_id)
|
||||
tv_details = await self.get_tv_details(tv_id, language=language)
|
||||
if not tv_details:
|
||||
return None
|
||||
|
||||
@@ -266,7 +276,7 @@ class TMDbClient:
|
||||
|
||||
# If specific season/episode requested, try to get that plot
|
||||
if season is not None:
|
||||
season_data = await self.get_tv_season(tv_id, season)
|
||||
season_data = await self.get_tv_season(tv_id, season, language=language)
|
||||
if season_data and episode is not None:
|
||||
episodes = season_data.get("episodes", [])
|
||||
for ep in episodes:
|
||||
@@ -291,14 +301,14 @@ class TMDbClient:
|
||||
|
||||
else: # movie
|
||||
# Search for movie
|
||||
search_result = await self.search_movie(title, year)
|
||||
search_result = await self.search_movie(title, year, language=language)
|
||||
if not search_result:
|
||||
return None
|
||||
|
||||
movie_id = search_result["id"]
|
||||
|
||||
# Get detailed movie info
|
||||
movie_details = await self.get_movie_details(movie_id)
|
||||
movie_details = await self.get_movie_details(movie_id, language=language)
|
||||
if not movie_details:
|
||||
return None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user