Add Ice Age: Continental Drift (2012) BDRip file to test filenames
This commit is contained in:
BIN
dist/renamer-0.5.10-py3-none-any.whl
vendored
Normal file
BIN
dist/renamer-0.5.10-py3-none-any.whl
vendored
Normal file
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "renamer"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
description = "Terminal-based media file renamer and metadata viewer"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
@@ -17,7 +17,6 @@ from .formatters.proposed_name_formatter import ProposedNameFormatter
|
||||
from .formatters.text_formatter import TextFormatter
|
||||
from .formatters.catalog_formatter import CatalogFormatter
|
||||
from .settings import Settings
|
||||
from .cache import Cache
|
||||
|
||||
|
||||
# Set up logging conditionally
|
||||
@@ -57,7 +56,6 @@ class RenamerApp(App):
|
||||
self.scan_dir = Path(scan_dir) if scan_dir else None
|
||||
self.tree_expanded = False
|
||||
self.settings = Settings()
|
||||
self.cache = Cache()
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
with Horizontal():
|
||||
@@ -148,10 +146,9 @@ class RenamerApp(App):
|
||||
).start()
|
||||
|
||||
def _extract_and_show_details(self, file_path: Path):
|
||||
time.sleep(1) # Minimum delay to show loading
|
||||
try:
|
||||
# Initialize extractors and formatters
|
||||
extractor = MediaExtractor.create(file_path, self.cache, self.settings.get("cache_ttl_extractors"))
|
||||
extractor = MediaExtractor(file_path)
|
||||
|
||||
mode = self.settings.get("mode")
|
||||
if mode == "technical":
|
||||
@@ -205,11 +202,6 @@ class RenamerApp(App):
|
||||
tree = self.query_one("#file_tree", Tree)
|
||||
node = tree.cursor_node
|
||||
if node and node.data and isinstance(node.data, Path) and node.data.is_file():
|
||||
# Clear cache for this file
|
||||
cache_key_base = str(node.data)
|
||||
# Invalidate all keys for this file (we can improve this later)
|
||||
for key in ["title", "year", "source", "extension", "video_tracks", "audio_tracks", "subtitle_tracks"]:
|
||||
self.cache.invalidate(f"{cache_key_base}_{key}")
|
||||
self._start_loading_animation()
|
||||
threading.Thread(
|
||||
target=self._extract_and_show_details, args=(node.data,)
|
||||
@@ -240,7 +232,7 @@ class RenamerApp(App):
|
||||
node = tree.cursor_node
|
||||
if node and node.data and isinstance(node.data, Path) and node.data.is_file():
|
||||
# Get the proposed name from the extractor
|
||||
extractor = MediaExtractor.create(node.data, self.cache, self.settings.get("cache_ttl_extractors"))
|
||||
extractor = MediaExtractor(node.data)
|
||||
proposed_formatter = ProposedNameFormatter(extractor)
|
||||
new_name = str(proposed_formatter)
|
||||
logging.info(f"Proposed new name: {new_name!r} for file: {node.data}")
|
||||
@@ -273,11 +265,6 @@ class RenamerApp(App):
|
||||
"""Update the tree node for a renamed file."""
|
||||
logging.info(f"update_renamed_file called with old_path={old_path}, new_path={new_path}")
|
||||
|
||||
# Clear cache for old file
|
||||
cache_key_base = str(old_path)
|
||||
for key in ["title", "year", "source", "extension", "video_tracks", "audio_tracks", "subtitle_tracks"]:
|
||||
self.cache.invalidate(f"{cache_key_base}_{key}")
|
||||
|
||||
tree = self.query_one("#file_tree", Tree)
|
||||
logging.info(f"Before update: cursor_node.data = {tree.cursor_node.data if tree.cursor_node else None}")
|
||||
|
||||
|
||||
@@ -11,13 +11,16 @@ class Cache:
|
||||
"""File-based cache with TTL support."""
|
||||
|
||||
def __init__(self, cache_dir: Optional[Path] = None):
|
||||
if cache_dir is None:
|
||||
# Always use the default cache dir to avoid creating cache in scan dir
|
||||
cache_dir = Path.home() / ".cache" / "renamer"
|
||||
self.cache_dir = cache_dir
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._memory_cache = {} # In-memory cache for faster access
|
||||
|
||||
def _get_cache_file(self, key: str) -> Path:
|
||||
"""Get cache file path with hashed filename and subdirs."""
|
||||
import logging
|
||||
logging.info(f"Cache _get_cache_file called with key: {key!r}")
|
||||
# Parse key format: ClassName.method_name.param_hash
|
||||
if '.' in key:
|
||||
parts = key.split('.')
|
||||
@@ -26,12 +29,27 @@ class Cache:
|
||||
method_name = parts[1]
|
||||
param_hash = parts[2]
|
||||
|
||||
# Use class name as subdir, but if it contains '/', use general to avoid creating nested dirs
|
||||
if '/' in class_name or '\\' in class_name:
|
||||
subdir = "general"
|
||||
subkey = key
|
||||
file_ext = "json"
|
||||
else:
|
||||
subdir = class_name
|
||||
file_ext = "pkl"
|
||||
|
||||
# Use class name as subdir
|
||||
cache_subdir = self.cache_dir / class_name
|
||||
cache_subdir = self.cache_dir / subdir
|
||||
logging.info(f"Cache parsed key, class_name: {class_name!r}, cache_subdir: {cache_subdir!r}")
|
||||
cache_subdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if file_ext == "pkl":
|
||||
# Use method_name.param_hash as filename
|
||||
return cache_subdir / f"{method_name}.{param_hash}.pkl"
|
||||
else:
|
||||
# Hash the subkey for filename
|
||||
key_hash = hashlib.md5(subkey.encode('utf-8')).hexdigest()
|
||||
return cache_subdir / f"{key_hash}.json"
|
||||
|
||||
# Fallback for old keys (tmdb_, poster_, etc.)
|
||||
if key.startswith("tmdb_"):
|
||||
@@ -40,12 +58,16 @@ class Cache:
|
||||
elif key.startswith("poster_"):
|
||||
subdir = "posters"
|
||||
subkey = key[7:] # Remove "poster_" prefix
|
||||
elif key.startswith("extractor_"):
|
||||
subdir = "extractors"
|
||||
subkey = key[10:] # Remove "extractor_" prefix
|
||||
else:
|
||||
subdir = "general"
|
||||
subkey = key
|
||||
|
||||
# Create subdir
|
||||
cache_subdir = self.cache_dir / subdir
|
||||
logging.info(f"Cache fallback, subdir: {subdir!r}, cache_subdir: {cache_subdir!r}")
|
||||
cache_subdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Hash the subkey for filename
|
||||
@@ -54,6 +76,14 @@ class Cache:
|
||||
|
||||
def get(self, key: str) -> Optional[Any]:
|
||||
"""Get cached value if not expired."""
|
||||
# Check memory cache first
|
||||
if key in self._memory_cache:
|
||||
data = self._memory_cache[key]
|
||||
if time.time() > data.get('expires', 0):
|
||||
del self._memory_cache[key]
|
||||
return None
|
||||
return data.get('value')
|
||||
|
||||
cache_file = self._get_cache_file(key)
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
@@ -67,6 +97,8 @@ class Cache:
|
||||
cache_file.unlink(missing_ok=True)
|
||||
return None
|
||||
|
||||
# Store in memory cache
|
||||
self._memory_cache[key] = data
|
||||
return data.get('value')
|
||||
except (json.JSONDecodeError, IOError):
|
||||
# Corrupted, remove
|
||||
@@ -75,11 +107,14 @@ class Cache:
|
||||
|
||||
def set(self, key: str, value: Any, ttl_seconds: int) -> None:
|
||||
"""Set cached value with TTL."""
|
||||
cache_file = self._get_cache_file(key)
|
||||
data = {
|
||||
'value': value,
|
||||
'expires': time.time() + ttl_seconds
|
||||
}
|
||||
# Store in memory cache
|
||||
self._memory_cache[key] = data
|
||||
|
||||
cache_file = self._get_cache_file(key)
|
||||
try:
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump(data, f)
|
||||
@@ -154,6 +189,14 @@ class Cache:
|
||||
|
||||
def get_object(self, key: str) -> Optional[Any]:
|
||||
"""Get pickled object from cache if not expired."""
|
||||
# Check memory cache first
|
||||
if key in self._memory_cache:
|
||||
data = self._memory_cache[key]
|
||||
if time.time() > data.get('expires', 0):
|
||||
del self._memory_cache[key]
|
||||
return None
|
||||
return data.get('value')
|
||||
|
||||
cache_file = self._get_cache_file(key)
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
@@ -167,6 +210,8 @@ class Cache:
|
||||
cache_file.unlink(missing_ok=True)
|
||||
return None
|
||||
|
||||
# Store in memory cache
|
||||
self._memory_cache[key] = data
|
||||
return data.get('value')
|
||||
except (pickle.PickleError, IOError):
|
||||
# Corrupted, remove
|
||||
@@ -175,11 +220,14 @@ class Cache:
|
||||
|
||||
def set_object(self, key: str, obj: Any, ttl_seconds: int) -> None:
|
||||
"""Pickle and cache object with TTL."""
|
||||
cache_file = self._get_cache_file(key)
|
||||
data = {
|
||||
'value': obj,
|
||||
'expires': time.time() + ttl_seconds
|
||||
}
|
||||
# Store in memory cache
|
||||
self._memory_cache[key] = data
|
||||
|
||||
cache_file = self._get_cache_file(key)
|
||||
try:
|
||||
with open(cache_file, 'wb') as f:
|
||||
pickle.dump(data, f)
|
||||
|
||||
@@ -31,12 +31,17 @@ def cached_method(ttl_seconds: int = 3600) -> Callable:
|
||||
|
||||
# Use instance identifier (file_path for extractors)
|
||||
instance_id = getattr(self, 'file_path', str(id(self)))
|
||||
# If instance_id contains path separators, hash it to avoid creating subdirs
|
||||
if '/' in str(instance_id) or '\\' in str(instance_id):
|
||||
instance_id = hashlib.md5(str(instance_id).encode('utf-8')).hexdigest()
|
||||
|
||||
# Create hash from args and kwargs (excluding self)
|
||||
# Create hash from args and kwargs only if they exist (excluding self)
|
||||
if args or kwargs:
|
||||
param_str = json.dumps((args, kwargs), sort_keys=True, default=str)
|
||||
param_hash = hashlib.md5(param_str.encode('utf-8')).hexdigest()
|
||||
|
||||
cache_key = f"{class_name}.{method_name}.{instance_id}.{param_hash}"
|
||||
else:
|
||||
cache_key = f"{class_name}.{method_name}.{instance_id}"
|
||||
|
||||
# Try to get from cache
|
||||
cached_result = _cache.get_object(cache_key)
|
||||
|
||||
@@ -10,38 +10,14 @@ from .default_extractor import DefaultExtractor
|
||||
class MediaExtractor:
|
||||
"""Class to extract various metadata from media files using specialized extractors"""
|
||||
|
||||
@classmethod
|
||||
def create(cls, file_path: Path, cache=None, ttl_seconds: int = 21600):
|
||||
"""Factory method that returns cached object if available, else creates new."""
|
||||
if cache:
|
||||
cache_key = f"extractor_{file_path}"
|
||||
cached_obj = cache.get_object(cache_key)
|
||||
if cached_obj:
|
||||
print(f"Loaded MediaExtractor object from cache for {file_path.name}")
|
||||
return cached_obj
|
||||
|
||||
# Create new instance
|
||||
instance = cls(file_path, cache, ttl_seconds)
|
||||
|
||||
# Cache the object
|
||||
if cache:
|
||||
cache_key = f"extractor_{file_path}"
|
||||
cache.set_object(cache_key, instance, ttl_seconds)
|
||||
print(f"Cached MediaExtractor object for {file_path.name}")
|
||||
|
||||
return instance
|
||||
|
||||
def __init__(self, file_path: Path, cache=None, ttl_seconds: int = 21600):
|
||||
def __init__(self, file_path: Path):
|
||||
self.file_path = file_path
|
||||
self.cache = cache
|
||||
self.ttl_seconds = ttl_seconds
|
||||
self.cache_key = f"file_data_{file_path}"
|
||||
|
||||
self.filename_extractor = FilenameExtractor(file_path)
|
||||
self.metadata_extractor = MetadataExtractor(file_path)
|
||||
self.mediainfo_extractor = MediaInfoExtractor(file_path)
|
||||
self.fileinfo_extractor = FileInfoExtractor(file_path)
|
||||
self.tmdb_extractor = TMDBExtractor(file_path, cache, ttl_seconds)
|
||||
self.tmdb_extractor = TMDBExtractor(file_path)
|
||||
self.default_extractor = DefaultExtractor()
|
||||
|
||||
# Extractor mapping
|
||||
@@ -191,15 +167,8 @@ class MediaExtractor:
|
||||
},
|
||||
}
|
||||
|
||||
# No caching logic here - handled in create() method
|
||||
|
||||
def get(self, key: str, source: str | None = None):
|
||||
"""Get extracted data by key, optionally from specific source"""
|
||||
print(f"Extracting real data for key '{key}' in {self.file_path.name}")
|
||||
return self._get_uncached(key, source)
|
||||
|
||||
def _get_uncached(self, key: str, source: str | None = None):
|
||||
"""Original get logic without caching"""
|
||||
if source:
|
||||
# Specific source requested - find the extractor and call the method directly
|
||||
for extractor_name, extractor in self._extractors.items():
|
||||
|
||||
@@ -3,30 +3,34 @@ import os
|
||||
import time
|
||||
import hashlib
|
||||
import requests
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, Tuple, Any
|
||||
from ..secrets import TMDB_API_KEY, TMDB_ACCESS_TOKEN
|
||||
|
||||
from ..cache import Cache
|
||||
from ..settings import Settings
|
||||
|
||||
class TMDBExtractor:
|
||||
"""Class to extract TMDB movie information"""
|
||||
|
||||
def __init__(self, file_path: Path, cache=None, ttl_seconds: int = 21600):
|
||||
def __init__(self, file_path: Path):
|
||||
self.file_path = file_path
|
||||
self.cache = cache
|
||||
self.ttl_seconds = ttl_seconds
|
||||
self.cache = Cache()
|
||||
self.ttl_seconds = Settings().get("cache_ttl_extractors", 21600)
|
||||
self._movie_db_info = None
|
||||
|
||||
def _get_cached_data(self, cache_key: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get data from cache if valid"""
|
||||
if self.cache:
|
||||
return self.cache.get(f"tmdb_{cache_key}")
|
||||
return self.cache.get_object(f"tmdb_{cache_key}")
|
||||
return None
|
||||
|
||||
def _set_cached_data(self, cache_key: str, data: Dict[str, Any]):
|
||||
"""Store data in cache"""
|
||||
if self.cache:
|
||||
self.cache.set(f"tmdb_{cache_key}", data, self.ttl_seconds)
|
||||
self.cache.set_object(f"tmdb_{cache_key}", data, self.ttl_seconds)
|
||||
|
||||
|
||||
|
||||
def _make_tmdb_request(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a request to TMDB API"""
|
||||
@@ -56,8 +60,10 @@ class TMDBExtractor:
|
||||
# Check cache first
|
||||
cached = self._get_cached_data(cache_key)
|
||||
if cached is not None:
|
||||
logging.info(f"TMDB cache hit for search: {title} ({year})")
|
||||
return cached
|
||||
|
||||
logging.info(f"TMDB cache miss for search: {title} ({year}), making request")
|
||||
params = {'query': title}
|
||||
if year:
|
||||
params['year'] = year
|
||||
@@ -95,8 +101,10 @@ class TMDBExtractor:
|
||||
# Check cache first
|
||||
cached = self._get_cached_data(cache_key)
|
||||
if cached is not None:
|
||||
logging.info(f"TMDB cache hit for movie details: {movie_id}")
|
||||
return cached
|
||||
|
||||
logging.info(f"TMDB cache miss for movie details: {movie_id}, making request")
|
||||
result = self._make_tmdb_request(f'/movie/{movie_id}')
|
||||
if result:
|
||||
# Cache the result
|
||||
|
||||
@@ -74,18 +74,6 @@ class FormatterApplier:
|
||||
# Sort formatters according to the global order
|
||||
ordered_formatters = sorted(formatters, key=lambda f: FormatterApplier.FORMATTER_ORDER.index(f) if f in FormatterApplier.FORMATTER_ORDER else len(FormatterApplier.FORMATTER_ORDER))
|
||||
|
||||
# Get caller info
|
||||
frame = inspect.currentframe()
|
||||
if frame and frame.f_back:
|
||||
caller = f"{frame.f_back.f_code.co_filename}:{frame.f_back.f_lineno} in {frame.f_back.f_code.co_name}"
|
||||
else:
|
||||
caller = "Unknown"
|
||||
|
||||
logging.info(f"Caller: {caller}")
|
||||
logging.info(f"Original formatters: {[f.__name__ if hasattr(f, '__name__') else str(f) for f in formatters]}")
|
||||
logging.info(f"Ordered formatters: {[f.__name__ if hasattr(f, '__name__') else str(f) for f in ordered_formatters]}")
|
||||
logging.info(f"Input value: {repr(value)}")
|
||||
|
||||
# Apply in the ordered sequence
|
||||
for formatter in ordered_formatters:
|
||||
try:
|
||||
@@ -96,7 +84,6 @@ class FormatterApplier:
|
||||
logging.error(f"Error applying {formatter.__name__ if hasattr(formatter, '__name__') else str(formatter)}: {e}")
|
||||
value = "Unknown"
|
||||
|
||||
logging.info(f"Final value: {repr(value)}")
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -51,9 +51,9 @@ class Settings:
|
||||
except IOError as e:
|
||||
print(f"Error: Could not save settings: {e}")
|
||||
|
||||
def get(self, key: str) -> Any:
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
"""Get a setting value."""
|
||||
return self._settings.get(key, self.DEFAULTS.get(key))
|
||||
return self._settings.get(key, self.DEFAULTS.get(key, default))
|
||||
|
||||
def set(self, key: str, value: Any) -> None:
|
||||
"""Set a setting value and save."""
|
||||
|
||||
Reference in New Issue
Block a user