181 lines
4.7 KiB
Python
181 lines
4.7 KiB
Python
#!/usr/bin/env python3
|
|
|
|
import os
|
|
import csv
|
|
import re
|
|
import shutil
|
|
import hashlib
|
|
from pathlib import Path
|
|
|
|
import bencodepy
|
|
|
|
|
|
BT_BACKUP_DIR = Path("BT_backup")
|
|
FIXED_TORRENTS_DIR = Path("qbit1")
|
|
OUTPUT_DIR = Path("grouped_torrents")
|
|
|
|
|
|
def decode_value(v):
|
|
if isinstance(v, bytes):
|
|
return v.decode("utf-8", errors="replace")
|
|
return v
|
|
|
|
|
|
def get_field(data, key):
|
|
return decode_value(data.get(key.encode()) or data.get(key))
|
|
|
|
|
|
def safe_rel_path(path_str):
|
|
if not path_str:
|
|
return Path("_unknown")
|
|
|
|
s = path_str.strip().replace("\\", "/")
|
|
|
|
m = re.match(r"^([A-Za-z]):/(.*)$", s)
|
|
if m:
|
|
s = f"{m.group(1)}/{m.group(2)}"
|
|
else:
|
|
s = s.lstrip("/")
|
|
|
|
parts = []
|
|
for part in s.split("/"):
|
|
part = part.strip()
|
|
if not part:
|
|
continue
|
|
part = re.sub(r'[<>:"|?*\x00-\x1F]', "_", part)
|
|
parts.append(part)
|
|
|
|
return Path(*parts) if parts else Path("_unknown")
|
|
|
|
|
|
def read_bencode(path):
|
|
with open(path, "rb") as f:
|
|
return bencodepy.decode(f.read())
|
|
|
|
|
|
def torrent_infohash_v1(torrent_path):
|
|
"""
|
|
Liczy klasyczny SHA1 infohash z pola 'info' w pliku .torrent.
|
|
To jest hash używany przez qBittorrent dla zwykłych torrentów v1.
|
|
"""
|
|
data = read_bencode(torrent_path)
|
|
info = data.get(b"info") or data.get("info")
|
|
if info is None:
|
|
raise ValueError("Brak pola 'info' w torrent")
|
|
|
|
encoded_info = bencodepy.encode(info)
|
|
return hashlib.sha1(encoded_info).hexdigest().upper()
|
|
|
|
|
|
def build_fixed_torrent_index():
|
|
"""
|
|
Zwraca mapę: INFOHASH -> ścieżka do poprawionego .torrent
|
|
"""
|
|
index = {}
|
|
duplicates = []
|
|
|
|
for torrent_path in sorted(FIXED_TORRENTS_DIR.glob("*.torrent")):
|
|
try:
|
|
infohash = torrent_infohash_v1(torrent_path)
|
|
except Exception as e:
|
|
print(f"[WARN] Nie moge odczytac {torrent_path}: {e}")
|
|
continue
|
|
|
|
if infohash in index:
|
|
duplicates.append(infohash)
|
|
else:
|
|
index[infohash] = torrent_path
|
|
|
|
if duplicates:
|
|
print(f"[WARN] Zduplikowane infohash w qbit1: {len(duplicates)}")
|
|
|
|
print(f"Zindeksowano poprawione torrenty: {len(index)}")
|
|
return index
|
|
|
|
|
|
def main():
|
|
if not BT_BACKUP_DIR.is_dir():
|
|
raise SystemExit(f"Brak katalogu: {BT_BACKUP_DIR}")
|
|
if not FIXED_TORRENTS_DIR.is_dir():
|
|
raise SystemExit(f"Brak katalogu: {FIXED_TORRENTS_DIR}")
|
|
|
|
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
fixed_index = build_fixed_torrent_index()
|
|
|
|
rows = []
|
|
processed = 0
|
|
copied = 0
|
|
missing_fixed_torrent = 0
|
|
broken_fastresume = 0
|
|
|
|
for fastresume_path in sorted(BT_BACKUP_DIR.glob("*.fastresume")):
|
|
torrent_hash = fastresume_path.stem.upper()
|
|
processed += 1
|
|
|
|
try:
|
|
data = read_bencode(fastresume_path)
|
|
except Exception as e:
|
|
broken_fastresume += 1
|
|
rows.append({
|
|
"hash": torrent_hash,
|
|
"torrent_name": "",
|
|
"save_path": "",
|
|
"output_dir": "",
|
|
"status": f"broken_fastresume: {e}",
|
|
})
|
|
continue
|
|
|
|
save_path = (
|
|
get_field(data, "qBt-savePath")
|
|
or get_field(data, "save_path")
|
|
or ""
|
|
)
|
|
torrent_name = (
|
|
get_field(data, "qBt-name")
|
|
or get_field(data, "name")
|
|
or ""
|
|
)
|
|
|
|
rel_dir = safe_rel_path(save_path)
|
|
target_dir = OUTPUT_DIR / rel_dir
|
|
target_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
fixed_torrent_path = fixed_index.get(torrent_hash)
|
|
|
|
if fixed_torrent_path and fixed_torrent_path.exists():
|
|
out_name = fixed_torrent_path.name
|
|
shutil.copy2(fixed_torrent_path, target_dir / out_name)
|
|
status = "ok"
|
|
copied += 1
|
|
else:
|
|
status = "missing_fixed_torrent"
|
|
missing_fixed_torrent += 1
|
|
|
|
rows.append({
|
|
"hash": torrent_hash,
|
|
"torrent_name": torrent_name,
|
|
"save_path": save_path,
|
|
"output_dir": str(target_dir),
|
|
"status": status,
|
|
})
|
|
|
|
csv_path = OUTPUT_DIR / "mapping.csv"
|
|
with open(csv_path, "w", newline="", encoding="utf-8") as f:
|
|
writer = csv.DictWriter(
|
|
f,
|
|
fieldnames=["hash", "torrent_name", "save_path", "output_dir", "status"]
|
|
)
|
|
writer.writeheader()
|
|
writer.writerows(rows)
|
|
|
|
print(f"Przetworzono fastresume: {processed}")
|
|
print(f"Skopiowano poprawione torrent: {copied}")
|
|
print(f"Brak poprawionych torrentow: {missing_fixed_torrent}")
|
|
print(f"Uszkodzone fastresume: {broken_fastresume}")
|
|
print(f"CSV: {csv_path}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|