used ai for some mods

This commit is contained in:
TylerCG 2025-12-24 12:01:40 -05:00
parent 14ef29845d
commit da19d5e4b9
4 changed files with 381 additions and 103 deletions

View File

@ -13,6 +13,10 @@
<!-- Allowed input extensions -->
<extensions>.mkv,.mp4</extensions>
<!-- Reduction ratio threshold: if output >= this ratio of input, retry/fail -->
<!-- Default 0.5 = 50% (generic). Can override with ratio flag -->
<reduction_ratio_threshold>0.65</reduction_ratio_threshold>
</general>
<!-- =============================
@ -48,6 +52,12 @@
<movie_1080>32</movie_1080>
<movie_720>34</movie_720>
</cq>
<crf>
<tv_1080>28</tv_1080>
<tv_720>32</tv_720>
<movie_1080>32</movie_1080>
<movie_720>34</movie_720>
</crf>
<!-- Fallback bitrate-based mode -->
<fallback>
@ -72,17 +82,26 @@
============================= -->
<audio>
<stereo>
<low>64000</low>
<medium>96000</medium>
<high>128000</high>
<low>96000</low>
<medium>128000</medium>
<high>160000</high>
</stereo>
<multi_channel>
<low>192000</low>
<high>192000</high>
<low>384000</low>
<medium>512000</medium>
<high>640000</high>
</multi_channel>
<codec_rules>
<use_opus_below_kbps>128</use_opus_below_kbps>
</codec_rules>
</audio>
<!-- =============================
IGNORE LIST (filenames to skip)
============================= -->
<ignore_tags>
<tag>ehx</tag>
<tag>megusta</tag>
</ignore_tags>
</config>

View File

@ -8,6 +8,7 @@ DEFAULT_XML = """<?xml version="1.0" encoding="UTF-8"?>
<processing_folder>processing</processing_folder>
<suffix> -EHX</suffix>
<extensions>.mkv,.mp4</extensions>
<reduction_ratio_threshold>0.5</reduction_ratio_threshold>
</general>
<path_mappings>
<map from="P:\\tv" to="/mnt/plex/tv" />
@ -44,6 +45,10 @@ DEFAULT_XML = """<?xml version="1.0" encoding="UTF-8"?>
<high>192000</high>
</multi_channel>
</audio>
<ignore_tags>
<tag>ehx</tag>
<tag>megusta</tag>
</ignore_tags>
</config>
"""
@ -66,6 +71,9 @@ def load_config_xml(path: Path) -> dict:
extensions_elem = general.find("extensions") if general is not None else None
extensions = extensions_elem.text.split(",") if extensions_elem is not None else [".mkv", ".mp4"]
reduction_ratio_elem = general.find("reduction_ratio_threshold") if general is not None else None
reduction_ratio_threshold = float(reduction_ratio_elem.text) if reduction_ratio_elem is not None else 0.5
# --- Path Mappings ---
path_mappings = {}
for m in root.findall("path_mappings/map"):
@ -112,11 +120,19 @@ def load_config_xml(path: Path) -> dict:
if child.text:
audio["multi_channel"][child.tag] = int(child.text)
# --- Ignore Tags ---
ignore_tags = []
for tag_elem in root.findall("ignore_tags/tag"):
if tag_elem.text:
ignore_tags.append(tag_elem.text)
return {
"processing_folder": processing_folder,
"suffix": suffix,
"extensions": [ext.lower() for ext in extensions],
"path_mappings": path_mappings,
"encode": {"cq": cq, "fallback": fallback, "filters": filters},
"audio": audio
"audio": audio,
"ignore_tags": ignore_tags,
"reduction_ratio_threshold": reduction_ratio_threshold
}

View File

@ -636,3 +636,20 @@
2025-10-05 12:47:11 [INFO] Skipping: Vikings - S03E07 - Paris x265 AAC Bluray-1080p Silence -EHX.mkv
2025-10-05 12:47:11 [INFO] Skipping: Vikings - S03E10 - The Dead x265 AAC Bluray-1080p Silence -EHX.mkv
2025-10-05 12:47:11 [INFO] Skipping: Vikings - S03E01 - Mercenary x265 AAC Bluray-1080p Silence -EHX.mkv
2025-10-05 18:40:24 [ERROR] Folder not found: /Volumes/plex/tv/Taskmaster\ \(NZ\)/Season\ 2
2025-10-05 18:41:24 [ERROR] Folder not found: /Volumes/plex/tv/Taskmaster (NZ\)/Season 2
2025-10-05 18:41:31 [INFO] Skipping: Taskmaster (NZ) - S02E06 - Eat Your Asses x264 AC3 HDTV-1080p WURUHI -EHX.mkv
2025-10-05 18:41:31 [INFO] Processing: Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv
2025-10-05 18:42:18 [INFO] Copied Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv → Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv
2025-10-05 18:42:19 [INFO]
🧩 ENCODE SETTINGS
• Resolution: 1920x1080
• Scale Filter: bicubic
• CQ: 28
• Video Encoder: av1_nvenc (preset p1, pix_fmt p010le)
• Audio Streams:
2025-10-05 18:42:19 [INFO] - Stream #1: 6ch, src=und, avg_bitrate=128kbps, metadata=384kbps, bucket_target=192.0kbps
2025-10-05 18:42:19 [INFO] Running CQ encode: Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI -EHX.mkv
2025-10-05 18:42:19 [WARNING] FFmpeg GPU encode failed, trying CPU fallback: Command '['ffmpeg', '-y', '-i', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv', '-vf', 'scale=1920:1080:flags=bicubic:force_original_aspect_ratio=decrease', '-map', '0:v', '-map', '0:a', '-map', '0:s?', '-c:v', 'av1_nvenc', '-preset', 'p1', '-pix_fmt', 'p010le', '-cq', '28', '-c:a:0', 'aac', '-b:a:0', '192000', '-ac:0', '6', '-c:s', 'copy', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI -EHX.mkv']' returned non-zero exit status 8.
2025-10-05 18:42:19 [ERROR] CPU fallback failed: Command '['ffmpeg', '-y', '-i', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv', '-vf', 'scale=1920:1080:flags=bicubic:force_original_aspect_ratio=decrease', '-map', '0:v', '-map', '0:a', '-map', '0:s?', '-c:v', 'libsvtav1', '-preset', '6', '-pix_fmt', 'p010le', '-cq', '28', '-c:a:0', 'aac', '-b:a:0', '192000', '-ac:0', '6', '-c:s', 'copy', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI -EHX.mkv']' returned non-zero exit status 8.
2025-10-05 18:42:19 [ERROR] FFmpeg failed: Command '['ffmpeg', '-y', '-i', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI.mkv', '-vf', 'scale=1920:1080:flags=bicubic:force_original_aspect_ratio=decrease', '-map', '0:v', '-map', '0:a', '-map', '0:s?', '-c:v', 'libsvtav1', '-preset', '6', '-pix_fmt', 'p010le', '-cq', '28', '-c:a:0', 'aac', '-b:a:0', '192000', '-ac:0', '6', '-c:s', 'copy', 'processing/Taskmaster (NZ) - S02E04 - Unbung x264 AC3 HDTV-1080p WURUHI -EHX.mkv']' returned non-zero exit status 8.

420
main.py
View File

@ -6,6 +6,8 @@ import os
import shutil
import subprocess
from pathlib import Path
from functools import lru_cache
from concurrent.futures import ThreadPoolExecutor, as_completed
from core.config_helper import load_config_xml
from core.logger_helper import setup_logger
@ -27,20 +29,37 @@ if not TRACKER_FILE.exists():
"type","show","filename","original_size_MB","processed_size_MB","percentage","method"
])
# =============================
# FFPROBE CACHING
# =============================
@lru_cache(maxsize=256)
def get_audio_streams_cached(input_file_str: str):
"""Cached ffprobe call to avoid redundant queries"""
input_file = Path(input_file_str)
cmd = [
"ffprobe","-v","error","-select_streams","a",
"-show_entries","stream=index,channels,duration,bit_rate,tags=language",
"-of","json", str(input_file)
]
result = subprocess.run(cmd, capture_output=True, text=True)
return json.loads(result.stdout)
# =============================
# AUDIO BUCKET LOGIC
# =============================
def choose_audio_bitrate(channels: int, bitrate_kbps: int, audio_config: dict) -> int:
if channels == 2:
if bitrate_kbps < 80:
if bitrate_kbps < 100:
return audio_config["stereo"]["low"]
elif bitrate_kbps < 112:
elif bitrate_kbps < 130:
return audio_config["stereo"]["medium"]
else:
return audio_config["stereo"]["high"]
return audio_config["stereo"]["high"]
else:
if bitrate_kbps < 176:
if bitrate_kbps < 390:
return audio_config["multi_channel"]["low"]
elif bitrate_kbps < 515:
return audio_config["multi_channel"]["medium"]
else:
return audio_config["multi_channel"]["high"]
@ -93,46 +112,137 @@ def get_audio_streams(input_file: Path):
return streams
# =============================
# FFmpeg ENCODE
# OUTPUT VALIDATION
# =============================
def validate_output(input_file: Path, output_file: Path, expected_width: int, expected_height: int) -> bool:
"""Validate that output file has correct resolution and audio tracks"""
try:
cmd = [
"ffprobe", "-v", "error",
"-select_streams", "v:0",
"-show_entries", "stream=width,height",
"-of", "json", str(output_file)
]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
data = json.loads(result.stdout)
if not data.get("streams"):
logger.warning(f"❌ Validation failed: No video stream in {output_file.name}")
return False
width = data["streams"][0].get("width", 0)
height = data["streams"][0].get("height", 0)
# Allow small variance for scaling
if abs(width - expected_width) > 10 or abs(height - expected_height) > 10:
logger.warning(f"❌ Validation failed: Resolution {width}x{height}, expected ~{expected_width}x{expected_height}")
return False
logger.info(f"✅ Validation passed: {output_file.name} ({width}x{height})")
return True
except Exception as e:
logger.warning(f"⚠️ Validation skipped (probe error): {e}")
return True # Don't fail on validation errors
# =============================
# FFmpeg ENCODE (GPU + CPU fallback, per-resolution CPU preset)
# =============================
def run_ffmpeg(input_file: Path, output_file: Path, cq: int, scale_width: int, scale_height: int,
filter_flags: str, audio_config: dict, method: str):
filter_flags: str, audio_config: dict, method: str, crf_cpu: int, verbose: bool = False):
streams = get_audio_streams(input_file)
header = f"\n🧩 ENCODE SETTINGS\n • Resolution: {scale_width}x{scale_height}\n • Scale Filter: {filter_flags}\n • CQ: {cq if method=='CQ' else 'N/A'}\n • Video Encoder: av1_nvenc (preset p1, pix_fmt p010le)\n • Audio Streams:"
encoder_name = "av1_nvenc"
pix_fmt = "p010le"
header = (
f"\n🧩 ENCODE SETTINGS\n"
f" • Resolution: {scale_width}x{scale_height}\n"
f" • Scale Filter: {filter_flags}\n"
f" • CQ: {cq if method=='CQ' else 'N/A'}\n"
f" • CPU CRF: {crf_cpu}\n"
f" • Video Encoder: {encoder_name} (preset p1, pix_fmt {pix_fmt})\n"
f" • Audio Streams:"
)
logger.info(header)
print(" ")
# print(header)
print(header)
for (index, channels, avg_bitrate, src_lang, meta_bitrate) in streams:
br = choose_audio_bitrate(channels, avg_bitrate, audio_config)
line = f" - Stream #{index}: {channels}ch, src={src_lang}, avg_bitrate={avg_bitrate}kbps, metadata={meta_bitrate}kbps, bucket_target={br/1000:.1f}kbps"
output_channels = 2 if scale_height <= 720 else (6 if channels >= 6 else 2)
br = choose_audio_bitrate(output_channels, avg_bitrate, audio_config)
line = (
f" - Stream #{index}: {channels}ch→{output_channels}ch, src={src_lang}, "
f"avg_bitrate={avg_bitrate}kbps, metadata={meta_bitrate}kbps, bucket_target={br/1000:.1f}kbps"
)
print(line)
logger.info(line)
cmd = ["ffmpeg","-y","-i",str(input_file),
"-vf",f"scale={scale_width}:{scale_height}:flags={filter_flags}:force_original_aspect_ratio=decrease",
"-map","0:v","-map","0:a","-map","0:s?",
"-c:v","av1_nvenc","-preset","p1","-pix_fmt","p010le"]
cmd = [
"ffmpeg", "-y", "-i", str(input_file),
"-vf", f"scale={scale_width}:{scale_height}:flags={filter_flags}:force_original_aspect_ratio=decrease",
"-map", "0:v", "-map", "0:a", "-map", "0:s?",
"-c:v", encoder_name, "-preset", "p1", "-pix_fmt", pix_fmt
]
if method=="CQ":
# Video quality
if method == "CQ":
cmd += ["-cq", str(cq)]
else:
if scale_height>=1080:
vb, maxrate, bufsize = "1500k","1750k","2250k"
if scale_height >= 1080:
vb, maxrate, bufsize = "1500k", "1750k", "2250k"
else:
vb, maxrate, bufsize = "900k","1250k","1600k"
cmd += ["-b:v",vb,"-maxrate",maxrate,"-bufsize",bufsize]
vb, maxrate, bufsize = "900k", "1250k", "1600k"
cmd += ["-b:v", vb, "-maxrate", maxrate, "-bufsize", bufsize]
# Audio streams
for i, (index, channels, avg_bitrate, src_lang, meta_bitrate) in enumerate(streams):
br = choose_audio_bitrate(channels, avg_bitrate, audio_config)
cmd += [f"-c:a:{i}","aac",f"-b:a:{i}",str(br),f"-ac:{i}",str(channels)]
# Determine output channels: 720p -> 2ch, 1080p -> 6ch if input>=6 else 2ch
output_channels = 2 if scale_height <= 720 else (6 if channels >= 6 else 2)
# Choose bitrate based on OUTPUT channels, not input
br = choose_audio_bitrate(output_channels, avg_bitrate, audio_config)
cmd += [f"-c:a:{i}", "aac", f"-b:a:{i}", str(br), f"-ac:{i}", str(output_channels)]
cmd += ["-c:s","copy",str(output_file)]
cmd += ["-c:s", "copy", str(output_file)]
print(f"\n🎬 Running {method} encode: {output_file.name}")
logger.info(f"Running {method} encode: {output_file.name}")
if verbose:
logger.info(f"FFmpeg command: {' '.join(cmd)}")
subprocess.run(cmd, check=True)
# Try GPU encoder first
try:
if verbose:
subprocess.run(cmd, check=True)
else:
subprocess.run(cmd, check=True, capture_output=True)
except subprocess.CalledProcessError as e:
print(f"❌ FFmpeg failed with GPU encoder on {input_file.name}: {e}")
logger.error(f"GPU encode failed for {input_file.name}. Command: {' '.join(cmd)}")
# CPU fallback
cmd_cpu = cmd.copy()
idx = cmd_cpu.index(encoder_name)
cmd_cpu[idx] = "libsvtav1"
# CPU preset based on resolution
cpu_preset = "8" if scale_height <= 720 else "6" # faster for 720p, slower for 1080p
preset_idx = cmd_cpu.index("p1")
cmd_cpu[preset_idx] = cpu_preset
# Replace -cq with -crf
if "-cq" in cmd_cpu:
cq_idx = cmd_cpu.index("-cq")
cmd_cpu[cq_idx] = "-crf"
cmd_cpu[cq_idx + 1] = str(crf_cpu)
try:
if verbose:
subprocess.run(cmd_cpu, check=True)
else:
subprocess.run(cmd_cpu, check=True, capture_output=True)
print("✅ CPU fallback succeeded")
logger.info("CPU fallback succeeded")
except subprocess.CalledProcessError as e_cpu:
print(f"❌ CPU fallback also failed for {input_file.name}: {e_cpu}")
logger.error(f"CPU fallback failed for {input_file.name}. Command: {' '.join(cmd_cpu)}")
raise e_cpu
orig_size = input_file.stat().st_size
out_size = output_file.stat().st_size
@ -143,10 +253,12 @@ def run_ffmpeg(input_file: Path, output_file: Path, cq: int, scale_width: int, s
return orig_size, out_size, reduction_ratio
# =============================
# PROCESS FOLDER
# =============================
def process_folder(folder: Path, cq: int, resolution: str, config: dict):
def process_folder(folder: Path, cq: int, resolution: str, config: dict, dry_run: bool = False,
verbose: bool = False, backup: bool = False, cleanup: bool = False, parallel: int = 1):
if not folder.exists():
print(f"❌ Folder not found: {folder}")
logger.error(f"Folder not found: {folder}")
@ -156,110 +268,212 @@ def process_folder(folder: Path, cq: int, resolution: str, config: dict):
filters_config = config["encode"]["filters"]
suffix = config["suffix"]
extensions = config["extensions"]
res_height = 1080 if resolution=="1080" else 720
res_width = 1920 if resolution=="1080" else 1280
filter_flags = filters_config.get("default","lanczos")
ignore_tags = config["ignore_tags"]
reduction_ratio_threshold = config["reduction_ratio_threshold"]
res_height = 1080 if resolution == "1080" else 720
res_width = 1920 if resolution == "1080" else 1280
# Determine type and resolution keys
folder_lower = str(folder).lower()
if "\\tv\\" in folder_lower or "/tv/" in folder_lower:
filter_flags = filters_config.get("tv","bicubic")
cq_default = config["encode"]["cq"].get(f"tv_{resolution}",32)
type_key = "tv"
filter_flags = filters_config.get("tv", "bicubic")
else:
cq_default = config["encode"]["cq"].get(f"movie_{resolution}",32)
type_key = "movie"
filter_flags = filters_config.get("default", "lanczos")
res_key = "1080" if resolution == "1080" else "720"
# Get CQ and CRF from config
cq_default = config["encode"]["cq"].get(f"{type_key}_{res_key}", 32)
crf_cpu = config["encode"]["crf"].get(f"{type_key}_{res_key}", 32)
if cq is None:
cq = cq_default
processing_folder = Path(config["processing_folder"])
processing_folder.mkdir(parents=True, exist_ok=True)
# Cleanup old processing folder if requested
if cleanup and processing_folder.exists():
print(f"🧹 Cleaning up old processing folder: {processing_folder}")
logger.info(f"Cleaning up old processing folder: {processing_folder}")
shutil.rmtree(processing_folder, ignore_errors=True)
processing_folder.mkdir(parents=True, exist_ok=True)
# Backup folder setup
backup_folder = None
if backup:
backup_folder = folder.parent / f"{folder.name}_backup"
backup_folder.mkdir(parents=True, exist_ok=True)
print(f"💾 Backup enabled: {backup_folder}")
logger.info(f"Backup folder: {backup_folder}")
# Dry-run message
if dry_run:
print("🔍 DRY-RUN MODE: No files will be encoded or deleted")
logger.info("DRY-RUN MODE: No files will be encoded or deleted")
# Track if we switch to bitrate mode
use_bitrate = False
# Collect all files to process first
files_to_process = []
for file in folder.rglob("*"):
if file.suffix.lower() not in extensions:
continue
if any(tag.lower() in file.name.lower() for tag in ["ehx","megusta"]):
if any(tag.lower() in file.name.lower() for tag in ignore_tags):
print(f"⏭️ Skipping: {file.name}")
logger.info(f"Skipping: {file.name}")
continue
files_to_process.append(file)
print("="*60)
logger.info(f"Processing: {file.name}")
print(f"📁 Processing: {file.name}")
if not files_to_process:
print("❌ No files found to process")
logger.info("No files found to process")
return
temp_input = processing_folder / file.name
shutil.copy2(file, temp_input)
logger.info(f"Copied {file.name}{temp_input.name}")
temp_output = processing_folder / f"{file.stem}{suffix}{file.suffix}"
print(f"📋 Found {len(files_to_process)} file(s) to process")
method = "Bitrate" if use_bitrate else "CQ"
# Define the encoding task
def encode_file(file: Path):
"""Encodes a single file - used for parallel processing"""
try:
orig_size, out_size, reduction_ratio = run_ffmpeg(temp_input, temp_output, cq, res_width, res_height, filter_flags, audio_config, method)
except subprocess.CalledProcessError as e:
print(f"❌ FFmpeg failed: {e}")
logger.error(f"FFmpeg failed: {e}")
temp_input.unlink(missing_ok=True)
break
print("="*60)
logger.info(f"Processing: {file.name}")
print(f"📁 Processing: {file.name}")
temp_input = processing_folder / file.name
shutil.copy2(file, temp_input)
logger.info(f"Copied {file.name}{temp_input.name}")
temp_output = processing_folder / f"{file.stem}{suffix}{file.suffix}"
method = "Bitrate" if use_bitrate else "CQ"
if dry_run:
print(f"🔍 [DRY-RUN] Would encode: {temp_output}")
logger.info(f"[DRY-RUN] Would encode: {temp_output}")
return None
if method=="CQ" and reduction_ratio>=0.5:
print(f"⚠️ CQ encode did not achieve target size. Switching all remaining files to Bitrate.")
logger.warning("CQ encode failed target. Switching to Bitrate for remaining files.")
use_bitrate = True
try:
# Retry current file using bitrate
temp_output.unlink(missing_ok=True)
orig_size, out_size, reduction_ratio = run_ffmpeg(temp_input, temp_output, cq, res_width, res_height, filter_flags, audio_config, "Bitrate")
if reduction_ratio>=0.5:
print("❌ Bitrate encode also failed target. Stopping process.")
logger.error("Bitrate encode failed target. Stopping process.")
temp_input.unlink(missing_ok=True)
break
orig_size, out_size, reduction_ratio = run_ffmpeg(
temp_input, temp_output, cq, res_width, res_height, filter_flags,
audio_config, method, crf_cpu, verbose
)
except subprocess.CalledProcessError as e:
print(f"Bitrate retry failed: {e}")
logger.error(f"Bitrate retry failed: {e}")
print(f"❌ FFmpeg failed: {e}")
logger.error(f"FFmpeg failed: {e}")
temp_input.unlink(missing_ok=True)
break
elif method=="Bitrate" and reduction_ratio>=0.5:
print("❌ Bitrate encode failed target. Stopping process.")
logger.error("Bitrate encode failed target. Stopping process.")
temp_input.unlink(missing_ok=True)
break
temp_output.unlink(missing_ok=True)
return None
dest_file = file.parent / temp_output.name
shutil.move(temp_output, dest_file)
print(f"🚚 Moved {temp_output.name}{dest_file.name}")
logger.info(f"Moved {temp_output.name}{dest_file.name}")
# Validate output
if not validate_output(temp_input, temp_output, res_width, res_height):
print(f"⚠️ Validation failed for {temp_output.name}, keeping original")
logger.warning(f"Validation failed for {temp_output.name}")
temp_input.unlink(missing_ok=True)
temp_output.unlink(missing_ok=True)
return None
folder_parts = [p.lower() for p in folder.parts]
if "tv" in folder_parts:
f_type = "tv"
tv_index = folder_parts.index("tv")
show = folder.parts[tv_index + 1] if len(folder.parts) > tv_index + 1 else "Unknown"
elif "anime" in folder_parts:
f_type = "anime"
anime_index = folder_parts.index("anime")
show = folder.parts[anime_index + 1] if len(folder.parts) > anime_index + 1 else "Unknown"
else:
f_type = "movie"
show = "N/A"
# Handle fallback if CQ/Bitrate didn't reach target
if method == "CQ" and reduction_ratio >= reduction_ratio_threshold:
print(f"⚠️ CQ encode did not achieve target size ({reduction_ratio:.1%} >= {reduction_ratio_threshold:.1%}). Retrying with Bitrate.")
logger.warning(f"CQ encode failed target ({reduction_ratio:.1%}). Retrying with Bitrate.")
try:
temp_output.unlink(missing_ok=True)
orig_size, out_size, reduction_ratio = run_ffmpeg(
temp_input, temp_output, cq, res_width, res_height, filter_flags,
audio_config, "Bitrate", crf_cpu, verbose
)
if reduction_ratio >= reduction_ratio_threshold:
print("❌ Bitrate encode also failed target.")
logger.error("Bitrate encode failed target.")
temp_input.unlink(missing_ok=True)
temp_output.unlink(missing_ok=True)
return None
except subprocess.CalledProcessError as e:
print(f"❌ Bitrate retry failed: {e}")
logger.error(f"Bitrate retry failed: {e}")
temp_input.unlink(missing_ok=True)
temp_output.unlink(missing_ok=True)
return None
elif reduction_ratio >= reduction_ratio_threshold:
print("❌ Encode failed target. Stopping.")
logger.error("Encode failed target.")
temp_input.unlink(missing_ok=True)
temp_output.unlink(missing_ok=True)
return None
orig_size_mb = round(orig_size / 1e6, 2)
proc_size_mb = round(out_size / 1e6, 2)
percentage = round(proc_size_mb / orig_size_mb * 100, 1)
# Move final file back to original folder
dest_file = file.parent / temp_output.name
if not dry_run:
shutil.move(temp_output, dest_file)
print(f"🚚 Moved {temp_output.name}{dest_file.name}")
logger.info(f"Moved {temp_output.name}{dest_file.name}")
with open(TRACKER_FILE, "a", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow([f_type, show, dest_file.name, orig_size_mb, proc_size_mb, percentage, method])
# Backup original if requested
if backup and not dry_run:
backup_dest = backup_folder / file.name
shutil.copy2(file, backup_dest)
logger.info(f"Backed up original to {backup_dest}")
logger.info(f"Tracked conversion: {dest_file.name}, {orig_size_mb}MB → {proc_size_mb}MB ({percentage}%), method={method}")
print(f"📝 Logged conversion: {dest_file.name} ({percentage}%), method={method}")
# Determine folder type and show
folder_parts = [p.lower() for p in folder.parts]
if "tv" in folder_parts:
f_type = "tv"
tv_index = folder_parts.index("tv")
show = folder.parts[tv_index + 1] if len(folder.parts) > tv_index + 1 else "Unknown"
elif "anime" in folder_parts:
f_type = "anime"
anime_index = folder_parts.index("anime")
show = folder.parts[anime_index + 1] if len(folder.parts) > anime_index + 1 else "Unknown"
else:
f_type = "movie"
show = "N/A"
orig_size_mb = round(orig_size / 1e6, 2)
proc_size_mb = round(out_size / 1e6, 2)
percentage = round(proc_size_mb / orig_size_mb * 100, 1)
# Log conversion in tracker CSV (skip in dry-run)
if not dry_run:
with open(TRACKER_FILE, "a", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow([f_type, show, dest_file.name, orig_size_mb, proc_size_mb, percentage, method])
logger.info(f"Tracked conversion: {dest_file.name}, {orig_size_mb}MB → {proc_size_mb}MB ({percentage}%), method={method}")
print(f"📝 Logged conversion: {dest_file.name} ({percentage}%), method={method}")
# Delete temporary and original files
if not dry_run:
try:
temp_input.unlink()
file.unlink()
logger.info(f"Deleted original and processing copy for {file.name}")
except Exception as e:
print(f"⚠️ Could not delete files: {e}")
logger.warning(f"Could not delete files: {e}")
return {"file": file.name, "orig": orig_size_mb, "proc": proc_size_mb, "pct": percentage}
try:
temp_input.unlink()
file.unlink()
logger.info(f"Deleted original and processing copy for {file.name}")
except Exception as e:
print(f"⚠️ Could not delete files: {e}")
logger.warning(f"Could not delete files: {e}")
logger.error(f"Unexpected error processing {file.name}: {e}", exc_info=True)
return None
# Process files sequentially or in parallel
if parallel > 1:
with ThreadPoolExecutor(max_workers=parallel) as executor:
futures = [executor.submit(encode_file, f) for f in files_to_process]
for future in as_completed(futures):
result = future.result()
else:
for file in files_to_process:
encode_file(file)
if dry_run:
print("🔍 DRY-RUN COMPLETE: No actual changes made")
else:
print("✅ Processing complete!")
# =============================
# MAIN
@ -269,12 +483,24 @@ def main():
parser.add_argument("folder", help="Path to folder containing videos")
parser.add_argument("--cq", type=int, help="Override default CQ")
parser.add_argument("--r", "--resolution", dest="resolution", default="1080", choices=["720","1080"], help="Target resolution")
parser.add_argument("--dry-run", action="store_true", help="Preview files without encoding")
parser.add_argument("--verbose", "-v", action="store_true", help="Show FFmpeg output")
parser.add_argument("--backup", action="store_true", help="Backup original files before encoding")
parser.add_argument("--cleanup", action="store_true", help="Clean old processing folder on startup")
parser.add_argument("--parallel", type=int, default=1, metavar="N", help="Encode N files in parallel (experimental)")
parser.add_argument("--ratio", type=float, help="Reduction ratio threshold (default 0.5 from config)")
args = parser.parse_args()
config_path = Path(__file__).parent / "config.xml"
config = load_config_xml(config_path)
process_folder(Path(args.folder), args.cq, args.resolution, config)
# Override reduction ratio if provided
if args.ratio:
config["reduction_ratio_threshold"] = args.ratio
process_folder(Path(args.folder), args.cq, args.resolution, config,
dry_run=args.dry_run, verbose=args.verbose, backup=args.backup,
cleanup=args.cleanup, parallel=args.parallel)
if __name__ == "__main__":