done
This commit is contained in:
commit
2816b00787
56
.gitignore
vendored
Normal file
56
.gitignore
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Cache and logs (runtime generated)
|
||||
cache/
|
||||
logs/
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db
|
||||
29
config.xml
Normal file
29
config.xml
Normal file
@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<config>
|
||||
|
||||
<!-- =============================
|
||||
PATH MAPPINGS (Windows → Linux)
|
||||
Maps Windows paths to their Linux equivalents for API communication
|
||||
============================= -->
|
||||
<path_mappings>
|
||||
<map from="P:\tv" to="/mnt/plex/tv" />
|
||||
<map from="P:\anime" to="/mnt/plex/anime" />
|
||||
<map from="P:\movies" to="/mnt/plex/movies" />
|
||||
</path_mappings>
|
||||
|
||||
<!-- =============================
|
||||
SONARR / RADARR SETTINGS
|
||||
Configure your Sonarr/Radarr instances and release group naming
|
||||
============================= -->
|
||||
<services>
|
||||
<sonarr>
|
||||
<url>http://10.0.0.10:8989</url>
|
||||
<api_key>a3458e2a095e4e1c892626c4a4f6959f</api_key>
|
||||
</sonarr>
|
||||
<radarr>
|
||||
<url>http://10.0.0.10:7878</url>
|
||||
<api_key>64680475a6b9425bb47bd7eed4ae92fe</api_key>
|
||||
</radarr>
|
||||
</services>
|
||||
|
||||
</config>
|
||||
69
core/config_helper.py
Normal file
69
core/config_helper.py
Normal file
@ -0,0 +1,69 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
|
||||
# Default XML content to write if missing
|
||||
DEFAULT_XML = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<config>
|
||||
<path_mappings>
|
||||
<map from="P:\\tv" to="/mnt/plex/tv" />
|
||||
<map from="P:\\anime" to="/mnt/plex/anime" />
|
||||
</path_mappings>
|
||||
<services>
|
||||
<sonarr>
|
||||
<url>http://localhost:8989</url>
|
||||
<api_key>YOUR_SONARR_API_KEY</api_key>
|
||||
<new_release_group>CONVERTED</new_release_group>
|
||||
</sonarr>
|
||||
<radarr>
|
||||
<url>http://localhost:7878</url>
|
||||
<api_key>YOUR_RADARR_API_KEY</api_key>
|
||||
<new_release_group>CONVERTED</new_release_group>
|
||||
</radarr>
|
||||
</services>
|
||||
</config>
|
||||
"""
|
||||
|
||||
def load_config_xml(path: Path) -> dict:
|
||||
if not path.exists():
|
||||
path.write_text(DEFAULT_XML, encoding="utf-8")
|
||||
print(f"ℹ️ Created default config.xml at {path}")
|
||||
|
||||
tree = ET.parse(path)
|
||||
root = tree.getroot()
|
||||
|
||||
# --- Path Mappings ---
|
||||
path_mappings = []
|
||||
for m in root.findall("path_mappings/map"):
|
||||
f = m.attrib.get("from")
|
||||
t = m.attrib.get("to")
|
||||
if f and t:
|
||||
path_mappings.append({"from": f, "to": t})
|
||||
|
||||
# --- Services (Sonarr/Radarr) ---
|
||||
services = {"sonarr": {}, "radarr": {}}
|
||||
sonarr_elem = root.find("services/sonarr")
|
||||
if sonarr_elem is not None:
|
||||
url_elem = sonarr_elem.find("url")
|
||||
api_elem = sonarr_elem.find("api_key")
|
||||
rg_elem = sonarr_elem.find("new_release_group")
|
||||
services["sonarr"] = {
|
||||
"url": url_elem.text if url_elem is not None and url_elem.text else None,
|
||||
"api_key": api_elem.text if api_elem is not None and api_elem.text else None,
|
||||
"new_release_group": rg_elem.text if rg_elem is not None and rg_elem.text else "CONVERTED"
|
||||
}
|
||||
|
||||
radarr_elem = root.find("services/radarr")
|
||||
if radarr_elem is not None:
|
||||
url_elem = radarr_elem.find("url")
|
||||
api_elem = radarr_elem.find("api_key")
|
||||
rg_elem = radarr_elem.find("new_release_group")
|
||||
services["radarr"] = {
|
||||
"url": url_elem.text if url_elem is not None and url_elem.text else None,
|
||||
"api_key": api_elem.text if api_elem is not None and api_elem.text else None,
|
||||
"new_release_group": rg_elem.text if rg_elem is not None and rg_elem.text else "CONVERTED"
|
||||
}
|
||||
|
||||
return {
|
||||
"path_mappings": path_mappings,
|
||||
"services": services
|
||||
}
|
||||
64
core/logger_helper.py
Normal file
64
core/logger_helper.py
Normal file
@ -0,0 +1,64 @@
|
||||
import logging
|
||||
import json
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom JSON log formatter for structured logging.
|
||||
"""
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
log_object = {
|
||||
"timestamp": datetime.utcfromtimestamp(record.created).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"level": record.levelname,
|
||||
"message": record.getMessage(),
|
||||
"module": record.module,
|
||||
"funcName": record.funcName,
|
||||
"line": record.lineno,
|
||||
}
|
||||
|
||||
# Include any extra fields added via logger.info("msg", extra={...})
|
||||
if hasattr(record, "extra") and isinstance(record.extra, dict):
|
||||
log_object.update(record.extra)
|
||||
|
||||
# Include exception info if present
|
||||
if record.exc_info:
|
||||
log_object["exception"] = self.formatException(record.exc_info)
|
||||
|
||||
return json.dumps(log_object, ensure_ascii=False)
|
||||
|
||||
def setup_logger(log_folder: Path, log_file_name: str = "rolling_rename.log", level=logging.INFO) -> logging.Logger:
|
||||
"""
|
||||
Sets up a logger that prints to console and writes to a rotating JSON log file.
|
||||
"""
|
||||
log_folder.mkdir(parents=True, exist_ok=True)
|
||||
log_file = log_folder / log_file_name
|
||||
|
||||
logger = logging.getLogger("rolling_rename")
|
||||
logger.setLevel(level)
|
||||
logger.propagate = False # Prevent double logging
|
||||
|
||||
# Formatters
|
||||
text_formatter = logging.Formatter(
|
||||
"%(asctime)s [%(levelname)s] %(message)s (%(module)s:%(lineno)d)",
|
||||
datefmt="%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
json_formatter = JsonFormatter()
|
||||
|
||||
# Console handler (human-readable)
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(text_formatter)
|
||||
console_handler.setLevel(level)
|
||||
|
||||
# File handler (JSON logs)
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf-8")
|
||||
file_handler.setFormatter(json_formatter)
|
||||
file_handler.setLevel(level)
|
||||
|
||||
# Add handlers only once
|
||||
if not logger.handlers:
|
||||
logger.addHandler(console_handler)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
return logger
|
||||
320
core/sonarr_radarr_helper.py
Normal file
320
core/sonarr_radarr_helper.py
Normal file
@ -0,0 +1,320 @@
|
||||
"""
|
||||
Integration with Sonarr/Radarr for rolling rename functionality.
|
||||
Updates episode/movie release groups via API.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict
|
||||
from core.logger_helper import setup_logger
|
||||
|
||||
logger = setup_logger(Path(__file__).parent.parent / "logs")
|
||||
|
||||
|
||||
class SonarrRadarrHelper:
|
||||
def __init__(self, sonarr_url: str = None, sonarr_api_key: str = None,
|
||||
radarr_url: str = None, radarr_api_key: str = None,
|
||||
path_mappings: list = None):
|
||||
"""Initialize Sonarr/Radarr API clients.
|
||||
|
||||
Args:
|
||||
sonarr_url: Base URL like http://10.0.0.10:8989 (without /api/v3)
|
||||
radarr_url: Base URL like http://10.0.0.10:7878 (without /api/v3)
|
||||
path_mappings: List of dicts with 'from' (Windows) and 'to' (Linux) keys
|
||||
"""
|
||||
self.sonarr_url = f"{sonarr_url}/api/v3".rstrip('/') if sonarr_url else None
|
||||
self.sonarr_api_key = sonarr_api_key
|
||||
self.radarr_url = f"{radarr_url}/api/v3".rstrip('/') if radarr_url else None
|
||||
self.radarr_api_key = radarr_api_key
|
||||
self.path_mappings = path_mappings or []
|
||||
|
||||
# Cache for series and movies
|
||||
self.sonarr_cache = None
|
||||
self.radarr_cache = None
|
||||
self.cache_file_sonarr = Path(__file__).parent.parent / "cache" / "sonarr_cache.json"
|
||||
self.cache_file_radarr = Path(__file__).parent.parent / "cache" / "radarr_cache.json"
|
||||
|
||||
def _convert_to_linux_path(self, windows_path: str) -> str:
|
||||
"""Convert Windows path to Linux path using path_mappings."""
|
||||
windows_path = str(windows_path).replace("\\", "/")
|
||||
|
||||
# Ensure path_mappings is a list
|
||||
if not self.path_mappings:
|
||||
logger.debug(f"No path mappings configured, returning path as-is: {windows_path}")
|
||||
return windows_path
|
||||
|
||||
# Try to find matching mapping
|
||||
for mapping in self.path_mappings:
|
||||
# Safely extract from and to values
|
||||
if isinstance(mapping, dict):
|
||||
from_path = str(mapping.get("from", "")).replace("\\", "/").lower()
|
||||
to_path = mapping.get("to", "")
|
||||
else:
|
||||
# Skip invalid mapping entries
|
||||
logger.warning(f"Invalid path mapping (not a dict): {mapping}")
|
||||
continue
|
||||
|
||||
if not from_path or not to_path:
|
||||
continue
|
||||
|
||||
if windows_path.lower().startswith(from_path):
|
||||
# Replace the Windows portion with Linux portion
|
||||
relative_path = windows_path[len(from_path):]
|
||||
linux_path = to_path.rstrip("/") + "/" + relative_path.lstrip("/")
|
||||
logger.debug(f"Path conversion: {windows_path} → {linux_path}")
|
||||
return linux_path
|
||||
|
||||
# No mapping found, return as-is (already converted to /)
|
||||
logger.debug(f"No path mapping found for: {windows_path}")
|
||||
return windows_path
|
||||
|
||||
def load_sonarr_cache(self) -> bool:
|
||||
"""Load and cache all Sonarr series data.
|
||||
|
||||
Returns:
|
||||
True if cache loaded successfully, False otherwise
|
||||
"""
|
||||
if not self.sonarr_url or not self.sonarr_api_key:
|
||||
logger.warning("Sonarr API not configured")
|
||||
return False
|
||||
|
||||
try:
|
||||
print("📡 Fetching Sonarr series cache...")
|
||||
headers = {"X-Api-Key": self.sonarr_api_key}
|
||||
series_url = f"{self.sonarr_url}/series"
|
||||
response = requests.get(series_url, headers=headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
series_list = response.json()
|
||||
|
||||
# Store series data
|
||||
cache_data = []
|
||||
for series in series_list:
|
||||
cache_data.append({
|
||||
"type": "sonarr",
|
||||
"id": series.get("id"),
|
||||
"title": series.get("title", "Unknown"),
|
||||
"path": series.get("path", ""),
|
||||
})
|
||||
|
||||
self.sonarr_cache = cache_data
|
||||
|
||||
# Save to file
|
||||
self.cache_file_sonarr.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(self.cache_file_sonarr, 'w') as f:
|
||||
json.dump(cache_data, f, indent=2)
|
||||
|
||||
print(f"✓ Sonarr cache loaded: {len(cache_data)} series")
|
||||
logger.info(f"Sonarr cache loaded: {len(cache_data)} series")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error loading Sonarr cache: {e}")
|
||||
return False
|
||||
|
||||
def load_radarr_cache(self) -> bool:
|
||||
"""Load and cache all Radarr movies data.
|
||||
|
||||
Returns:
|
||||
True if cache loaded successfully, False otherwise
|
||||
"""
|
||||
if not self.radarr_url or not self.radarr_api_key:
|
||||
logger.warning("Radarr API not configured")
|
||||
return False
|
||||
|
||||
try:
|
||||
print("📡 Fetching Radarr movies cache...")
|
||||
headers = {"X-Api-Key": self.radarr_api_key}
|
||||
movie_url = f"{self.radarr_url}/movie"
|
||||
response = requests.get(movie_url, headers=headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
movies = response.json()
|
||||
cache_data = []
|
||||
|
||||
for movie in movies:
|
||||
if "movieFile" in movie and movie["movieFile"]:
|
||||
movie_file_path = movie["movieFile"].get("path", "")
|
||||
if movie_file_path:
|
||||
movie_file_path = str(Path(movie_file_path).resolve()).replace("\\", "/")
|
||||
cache_data.append({
|
||||
"type": "radarr",
|
||||
"movie_id": movie.get("id"),
|
||||
"title": movie.get("title", "Unknown"),
|
||||
"year": movie.get("year"),
|
||||
"file_path": movie_file_path,
|
||||
"quality_profile": movie.get("qualityProfileId"),
|
||||
})
|
||||
|
||||
self.radarr_cache = cache_data
|
||||
|
||||
# Save to file
|
||||
self.cache_file_radarr.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(self.cache_file_radarr, 'w') as f:
|
||||
json.dump(cache_data, f, indent=2)
|
||||
|
||||
print(f"✓ Radarr cache loaded: {len(cache_data)} movies")
|
||||
logger.info(f"Radarr cache loaded: {len(cache_data)} movies")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error loading Radarr cache: {e}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
def find_series_by_folder(self, folder_path: str) -> Optional[Dict]:
|
||||
"""Find series in cache by folder path and fetch episodes.
|
||||
|
||||
Args:
|
||||
folder_path: Windows folder path (e.g., P:\\tv\\Supernatural or P:\\tv\\Supernatural\\Season 13)
|
||||
|
||||
Returns:
|
||||
Dict with series info and episode count if found, None otherwise
|
||||
"""
|
||||
# Convert Windows folder path to Linux path
|
||||
windows_path = str(folder_path)
|
||||
linux_path = self._convert_to_linux_path(windows_path)
|
||||
# Just normalize separators, don't resolve
|
||||
linux_path = linux_path.replace("\\", "/").rstrip("/")
|
||||
|
||||
logger.info(f"Input folder: {windows_path}")
|
||||
logger.info(f"Converted to: {linux_path}")
|
||||
|
||||
# Remove Season subfolder if present (e.g., /path/Supernatural/Season 13 -> /path/Supernatural)
|
||||
# This handles cases where a season subfolder is passed instead of the series root
|
||||
path_parts = linux_path.split("/")
|
||||
if path_parts and path_parts[-1].lower().startswith("season"):
|
||||
linux_path = "/".join(path_parts[:-1])
|
||||
logger.info(f"Stripped season folder, searching for: {linux_path}")
|
||||
|
||||
# Search Sonarr cache for matching series path
|
||||
if self.sonarr_cache:
|
||||
for series in self.sonarr_cache:
|
||||
series_path = series.get("path", "").rstrip("/")
|
||||
|
||||
if linux_path.lower() == series_path.lower():
|
||||
series_id = series.get("id")
|
||||
series_title = series.get("title")
|
||||
logger.info(f"✓ Found series: {series_title} (ID: {series_id})")
|
||||
|
||||
# Fetch episodes from API
|
||||
episodes = []
|
||||
try:
|
||||
if self.sonarr_url and self.sonarr_api_key:
|
||||
headers = {"X-Api-Key": self.sonarr_api_key}
|
||||
episode_url = f"{self.sonarr_url}/episode?seriesId={series_id}"
|
||||
ep_response = requests.get(episode_url, headers=headers, timeout=10)
|
||||
ep_response.raise_for_status()
|
||||
episodes = ep_response.json()
|
||||
|
||||
# For each episode with a file, fetch the file details to get the path
|
||||
for episode in episodes:
|
||||
if episode.get("hasFile") and episode.get("episodeFileId"):
|
||||
try:
|
||||
file_id = episode.get("episodeFileId")
|
||||
file_url = f"{self.sonarr_url}/episodefile/{file_id}"
|
||||
file_response = requests.get(file_url, headers=headers, timeout=10)
|
||||
file_response.raise_for_status()
|
||||
file_data = file_response.json()
|
||||
# Add file path to episode
|
||||
episode["episodeFile"] = file_data
|
||||
except Exception as e:
|
||||
logger.debug(f"Error fetching episode file {file_id}: {e}")
|
||||
|
||||
logger.info(f"Fetched {len(episodes)} episodes for {series_title}")
|
||||
print(f"📡 Fetched {len(episodes)} episodes")
|
||||
|
||||
# Save to temp cache
|
||||
temp_cache = {
|
||||
"series_id": series_id,
|
||||
"series_title": series_title,
|
||||
"total_episodes": len(episodes),
|
||||
"episodes": episodes
|
||||
}
|
||||
cache_file = Path(__file__).parent.parent / "cache" / "temp_episodes.json"
|
||||
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump(temp_cache, f, indent=2)
|
||||
logger.info(f"Saved episodes to {cache_file}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching episodes: {e}")
|
||||
|
||||
return {
|
||||
"type": "sonarr",
|
||||
"id": series_id,
|
||||
"title": series_title,
|
||||
"path": series_path,
|
||||
"episode_count": len(episodes),
|
||||
}
|
||||
|
||||
# Search Radarr cache
|
||||
if self.radarr_cache:
|
||||
for item in self.radarr_cache:
|
||||
item_path = item.get("file_path", "").rstrip("/")
|
||||
# For movies, check if folder matches the parent directory
|
||||
if linux_path.lower() == item_path.lower() or linux_path.lower() in item_path.lower():
|
||||
logger.info(f"✓ Found movie: {item['title']} ({item['year']})")
|
||||
return item
|
||||
|
||||
logger.info(f"No series found for: {linux_path}")
|
||||
return None
|
||||
|
||||
def trigger_sonarr_rename(self, series_id: int, episode_file_id: int) -> bool:
|
||||
"""Trigger Sonarr to rename an episode file.
|
||||
|
||||
Args:
|
||||
series_id: Sonarr series ID
|
||||
episode_file_id: Episode file ID to rename
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if not self.sonarr_url or not self.sonarr_api_key:
|
||||
logger.warning("Sonarr not configured")
|
||||
return False
|
||||
|
||||
try:
|
||||
headers = {"X-Api-Key": self.sonarr_api_key}
|
||||
cmd_url = f"{self.sonarr_url}/command"
|
||||
cmd_data = {
|
||||
"name": "RenameFiles",
|
||||
"seriesId": series_id,
|
||||
"files": [episode_file_id]
|
||||
}
|
||||
response = requests.post(cmd_url, headers=headers, json=cmd_data, timeout=10)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering Sonarr rename: {e}")
|
||||
return False
|
||||
|
||||
def trigger_radarr_rename(self, movie_file_id: int) -> bool:
|
||||
"""Trigger Radarr to rename a movie file.
|
||||
|
||||
Args:
|
||||
movie_file_id: Movie file ID to rename
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if not self.radarr_url or not self.radarr_api_key:
|
||||
logger.warning("Radarr not configured")
|
||||
return False
|
||||
|
||||
try:
|
||||
headers = {"X-Api-Key": self.radarr_api_key}
|
||||
cmd_url = f"{self.radarr_url}/command"
|
||||
cmd_data = {
|
||||
"name": "RenameMovie",
|
||||
"movieFileIds": [movie_file_id]
|
||||
}
|
||||
response = requests.post(cmd_url, headers=headers, json=cmd_data, timeout=10)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering Radarr rename: {e}")
|
||||
return False
|
||||
269
main.py
Normal file
269
main.py
Normal file
@ -0,0 +1,269 @@
|
||||
"""
|
||||
Rolling rename script - Updates episode release groups one at a time with delays.
|
||||
Useful for staggering Sonarr/Radarr renames to avoid overwhelming the API or filesystem.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from core.config_helper import load_config_xml
|
||||
from core.sonarr_radarr_helper import SonarrRadarrHelper
|
||||
from core.logger_helper import setup_logger
|
||||
|
||||
logger = setup_logger(Path("logs"))
|
||||
|
||||
|
||||
def convert_server_path_to_local(server_path: str, path_mappings: list) -> str:
|
||||
"""Convert server path (Linux) back to local path (Windows/Mac/Linux).
|
||||
|
||||
Useful when user provides a path from Sonarr/Radarr server instead of local path.
|
||||
"""
|
||||
if not path_mappings:
|
||||
return server_path
|
||||
|
||||
# Normalize the input path
|
||||
server_path = str(server_path).replace("\\", "/")
|
||||
|
||||
# Try to find reverse mapping (server path -> local path)
|
||||
for mapping in path_mappings:
|
||||
if isinstance(mapping, dict):
|
||||
to_path = mapping.get("to", "").replace("\\", "/").rstrip("/")
|
||||
from_path = mapping.get("from", "")
|
||||
|
||||
# Check if server_path starts with the "to" (server) path
|
||||
if server_path.lower().startswith(to_path.lower()):
|
||||
relative = server_path[len(to_path):].lstrip("/")
|
||||
# Convert back to Windows path format
|
||||
result = (from_path + "\\" + relative).replace("/", "\\") if relative else from_path
|
||||
return result
|
||||
|
||||
return server_path
|
||||
|
||||
|
||||
def rolling_rename_series(folder_path: str, wait_seconds: int = 20, season: int = None, sr_helper: SonarrRadarrHelper = None):
|
||||
"""
|
||||
Rename episodes in a series one at a time with delays.
|
||||
|
||||
Args:
|
||||
folder_path: Path to series folder (e.g., P:\\tv\\Supernatural)
|
||||
wait_seconds: Seconds to wait between renames (default: 120)
|
||||
season: Optional season number to target (default: None for all seasons)
|
||||
sr_helper: SonarrRadarrHelper instance (optional, created if not provided)
|
||||
"""
|
||||
if sr_helper is None:
|
||||
config = load_config_xml(Path("config.xml"))
|
||||
sonarr_config = config.get("services", {}).get("sonarr", {})
|
||||
radarr_config = config.get("services", {}).get("radarr", {})
|
||||
path_mappings = config.get("path_mappings", [])
|
||||
|
||||
sr_helper = SonarrRadarrHelper(
|
||||
sonarr_url=sonarr_config.get("url"),
|
||||
sonarr_api_key=sonarr_config.get("api_key"),
|
||||
radarr_url=radarr_config.get("url"),
|
||||
radarr_api_key=radarr_config.get("api_key"),
|
||||
path_mappings=path_mappings
|
||||
)
|
||||
|
||||
folder = Path(folder_path)
|
||||
|
||||
# If folder doesn't exist locally, try converting from server path
|
||||
if not folder.is_dir():
|
||||
config = load_config_xml(Path("config.xml"))
|
||||
path_mappings = config.get("path_mappings", [])
|
||||
converted_path = convert_server_path_to_local(folder_path, path_mappings)
|
||||
folder = Path(converted_path)
|
||||
|
||||
if not folder.is_dir():
|
||||
logger.error(f"Folder not found: {folder}")
|
||||
return
|
||||
|
||||
# Load caches first
|
||||
logger.info("Loading Sonarr/Radarr caches...")
|
||||
sr_helper.load_sonarr_cache()
|
||||
sr_helper.load_radarr_cache()
|
||||
|
||||
# Find series
|
||||
logger.info(f"Finding series for: {folder}")
|
||||
series_info = sr_helper.find_series_by_folder(str(folder))
|
||||
|
||||
if not series_info:
|
||||
logger.error(f"Series not found in Sonarr/Radarr")
|
||||
return
|
||||
|
||||
series_type = series_info.get("type", "sonarr")
|
||||
series_id = series_info.get("id")
|
||||
series_title = series_info.get("title")
|
||||
episode_count = series_info.get("episode_count", 0)
|
||||
|
||||
logger.info(f"✓ Found {series_type.upper()} series: {series_title} (ID: {series_id}) - {episode_count} episodes")
|
||||
logger.info(f" Path: {folder}")
|
||||
logger.info(f"Will rename {episode_count} episodes with {wait_seconds} second(s) between each")
|
||||
|
||||
# Load temp cache with episodes
|
||||
cache_file = Path("cache") / "temp_episodes.json"
|
||||
if not cache_file.exists():
|
||||
logger.error(f"Episode cache not found: {cache_file}")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(cache_file, 'r') as f:
|
||||
cache_data = json.load(f)
|
||||
episodes = cache_data.get("episodes", [])
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading episode cache: {e}")
|
||||
return
|
||||
|
||||
if not episodes:
|
||||
logger.warning("No episodes found in cache")
|
||||
return
|
||||
|
||||
# Sort episodes by season and episode number
|
||||
episodes.sort(key=lambda x: (x.get("seasonNumber", 0), x.get("episodeNumber", 0)))
|
||||
|
||||
# Filter: only episodes with actual files (hasFile: true) and skip season 0
|
||||
episodes = [ep for ep in episodes if ep.get("hasFile") and ep.get("seasonNumber", 0) > 0]
|
||||
|
||||
# Filter by season if specified
|
||||
if season:
|
||||
episodes = [ep for ep in episodes if ep.get("seasonNumber") == season]
|
||||
logger.info(f"Filtering to season {season}")
|
||||
|
||||
if not episodes:
|
||||
logger.warning("No episodes with files found (excluding specials)")
|
||||
return
|
||||
|
||||
logger.info(f"Starting rolling rename of {len(episodes)} episodes...\n")
|
||||
|
||||
# Create progress tracking file
|
||||
progress_file = Path("logs") / "rolling_rename_progress.json"
|
||||
progress_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Load existing progress
|
||||
completed = {}
|
||||
if progress_file.exists():
|
||||
try:
|
||||
with open(progress_file, 'r') as f:
|
||||
progress_data = json.load(f)
|
||||
completed = progress_data.get("completed", {})
|
||||
logger.info(f"Found {len(completed)} previously completed episodes")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not load progress file: {e}")
|
||||
|
||||
# Filter out already completed episodes
|
||||
remaining_episodes = [ep for ep in episodes if str(ep.get("id")) not in completed]
|
||||
|
||||
if not remaining_episodes:
|
||||
logger.info("✓ All episodes have already been renamed!")
|
||||
return
|
||||
|
||||
logger.info(f"Starting rolling rename of {len(remaining_episodes)} episodes...\n")
|
||||
|
||||
for idx, episode in enumerate(remaining_episodes, 1):
|
||||
season = episode.get("seasonNumber")
|
||||
ep_num = episode.get("episodeNumber")
|
||||
title = episode.get("title", "Unknown")
|
||||
episode_id = episode.get("id")
|
||||
|
||||
logger.info(f"[{idx}/{len(remaining_episodes)}] {series_title} - S{season:02d}E{ep_num:02d} - {title}")
|
||||
|
||||
try:
|
||||
if series_type == "sonarr":
|
||||
# Get episode file ID
|
||||
headers = {"X-Api-Key": sr_helper.sonarr_api_key}
|
||||
episode_url = f"{sr_helper.sonarr_url}/episode/{episode_id}"
|
||||
response = requests.get(episode_url, headers=headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
ep_data = response.json()
|
||||
|
||||
# Trigger rename
|
||||
if sr_helper.trigger_sonarr_rename(series_id, ep_data.get("episodeFileId")):
|
||||
logger.info(f" ✓ Rename triggered for {series_title} S{season:02d}E{ep_num:02d}")
|
||||
completed[str(episode_id)] = {
|
||||
"season": season,
|
||||
"episode": ep_num,
|
||||
"title": title,
|
||||
"timestamp": time.time()
|
||||
}
|
||||
else:
|
||||
logger.warning(f" ✗ Failed to trigger rename")
|
||||
else:
|
||||
# Get movie file ID
|
||||
headers = {"X-Api-Key": sr_helper.radarr_api_key}
|
||||
movie_url = f"{sr_helper.radarr_url}/movie/{episode_id}"
|
||||
response = requests.get(movie_url, headers=headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
movie_data = response.json()
|
||||
|
||||
# Trigger rename
|
||||
movie_file_id = movie_data.get("movieFile", {}).get("id")
|
||||
if movie_file_id and sr_helper.trigger_radarr_rename(movie_file_id):
|
||||
logger.info(f" ✓ Rename triggered for {series_title}")
|
||||
completed[str(episode_id)] = {
|
||||
"season": season,
|
||||
"episode": ep_num,
|
||||
"title": title,
|
||||
"timestamp": time.time()
|
||||
}
|
||||
else:
|
||||
logger.warning(f" ✗ Failed to trigger rename")
|
||||
except Exception as e:
|
||||
logger.warning(f" ✗ Error triggering rename: {e}")
|
||||
|
||||
# Save progress after each update
|
||||
try:
|
||||
with open(progress_file, 'w') as f:
|
||||
json.dump({
|
||||
"series_id": series_id,
|
||||
"series_title": series_title,
|
||||
"total_episodes": len(episodes),
|
||||
"completed_episodes": len(completed),
|
||||
"completed": completed
|
||||
}, f, indent=2)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not save progress: {e}")
|
||||
|
||||
# Wait before next update (except for last episode)
|
||||
if idx < len(remaining_episodes):
|
||||
logger.info(f" Waiting {wait_seconds} second(s) before next update...")
|
||||
time.sleep(wait_seconds)
|
||||
|
||||
logger.info(f"\n✓ Rolling rename complete! {len(remaining_episodes)} episodes updated in {series_title}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Rolling rename script - Updates episode release groups with delays",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python main.py "P:\\tv\\Supernatural"
|
||||
python main.py "P:\\tv\\Supernatural" --wait 300
|
||||
python main.py "P:\\tv\\Breaking Bad" -w 60
|
||||
python main.py "P:\\tv\\Supernatural" -s 5
|
||||
python main.py "P:\\tv\\Supernatural" --season 10 --wait 180
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument("folder", nargs="?", help="Path to series folder")
|
||||
parser.add_argument("-w", "--wait", type=int, default=20,
|
||||
help="Seconds to wait between renames (default: 20)")
|
||||
parser.add_argument("-s", "--season", type=int, default=None,
|
||||
help="Target specific season number (default: all seasons)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# If no folder provided, ask for it
|
||||
if not args.folder:
|
||||
args.folder = input("Enter series folder path: ").strip()
|
||||
|
||||
if not args.folder:
|
||||
logger.error("No folder path provided")
|
||||
return
|
||||
|
||||
rolling_rename_series(args.folder, args.wait, args.season)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
x
Reference in New Issue
Block a user