Files
pyTorrent/pytorrent/routes/api.py
Mateusz Gruszczyński 440b187c39 automatyzacje-comit1
2026-05-07 07:24:16 +02:00

1100 lines
43 KiB
Python

from __future__ import annotations
import base64
import os
import platform
import sys
import time
import re
from datetime import datetime, timezone
import urllib.request
import urllib.parse
import socket
import json
import psutil
import xml.etree.ElementTree as ET
from flask import Blueprint, jsonify, request, abort
from ..config import DB_PATH, JOBS_RETENTION_DAYS, SMART_QUEUE_HISTORY_RETENTION_DAYS, WORKERS
from ..db import connect, utcnow
from ..services.auth import current_user_id as default_user_id, current_user, list_users, save_user, delete_user, login_user, logout_user, enabled as auth_enabled, require_profile_write
from ..services import preferences, rtorrent, torrent_stats
from ..services.torrent_cache import torrent_cache
from ..services.torrent_summary import cached_summary
from ..services.workers import enqueue, list_jobs, cancel_job, retry_job, clear_jobs, emergency_clear_jobs
from ..services.geoip import lookup_ip
bp = Blueprint("api", __name__, url_prefix="/api")
MOVE_BULK_MAX_HASHES = 100
@bp.post("/auth/login")
def auth_login():
# Note: Auth API is hidden when optional authentication is disabled.
if not auth_enabled():
abort(404)
data = request.get_json(silent=True) or {}
user = login_user(str(data.get("username") or ""), str(data.get("password") or ""))
if not user:
return jsonify({"ok": False, "error": "Invalid username or password"}), 401
return ok({"user": user, "auth_enabled": auth_enabled()})
@bp.get("/auth/me")
def auth_me():
if not auth_enabled():
abort(404)
return ok({"user": current_user(), "auth_enabled": auth_enabled()})
@bp.post("/auth/logout")
def auth_logout():
if not auth_enabled():
abort(404)
logout_user()
return ok()
@bp.get("/auth/users")
def auth_users_list():
if not auth_enabled():
abort(404)
return ok({"users": list_users()})
@bp.post("/auth/users")
def auth_users_create():
if not auth_enabled():
abort(404)
try:
return ok({"user": save_user(request.get_json(silent=True) or {})})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.put("/auth/users/<int:user_id>")
def auth_users_update(user_id: int):
if not auth_enabled():
abort(404)
try:
return ok({"user": save_user(request.get_json(silent=True) or {}, user_id)})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.delete("/auth/users/<int:user_id>")
def auth_users_delete(user_id: int):
if not auth_enabled():
abort(404)
try:
delete_user(user_id)
return ok({"users": list_users()})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
def _job_profile_id(job_id: str) -> int | None:
with connect() as conn:
row = conn.execute("SELECT profile_id FROM jobs WHERE id=?", (job_id,)).fetchone()
return int(row.get("profile_id") or 0) if row else None
def ok(payload=None):
data = {"ok": True}
if payload:
data.update(payload)
return jsonify(data)
PORT_CHECK_CACHE_SECONDS = 6 * 60 * 60
def _app_setting_get(key: str):
with connect() as conn:
row = conn.execute("SELECT value FROM app_settings WHERE key=?", (key,)).fetchone()
return row.get("value") if row else None
def _app_setting_set(key: str, value: str):
with connect() as conn:
conn.execute("INSERT OR REPLACE INTO app_settings(key,value) VALUES(?,?)", (key, value))
def _iso_from_epoch(value) -> str | None:
try:
return datetime.fromtimestamp(float(value), timezone.utc).isoformat(timespec="seconds")
except Exception:
return None
def _public_ip(profile: dict | None = None, force: bool = False) -> str:
if profile and bool(profile.get("is_remote")):
return rtorrent.remote_public_ip(profile, force=force)
req = urllib.request.Request("https://api.ipify.org", headers={"User-Agent": "pyTorrent/port-check"})
with urllib.request.urlopen(req, timeout=8) as res:
return res.read(64).decode("utf-8", "replace").strip()
MAX_PORT_CHECK_CANDIDATES = 256
def _parse_port_candidates(value: str, limit: int = MAX_PORT_CHECK_CANDIDATES) -> tuple[list[int], bool]:
"""Return valid incoming port candidates from rTorrent network.port_range.
Note: rTorrent may keep a range/list and pick a random port on start.
The old checker used only the first number, which produced false "closed"
results when another configured port was actually active.
"""
ports: list[int] = []
seen: set[int] = set()
truncated = False
def add(port: int) -> None:
nonlocal truncated
if not 1 <= port <= 65535 or port in seen:
return
if len(ports) >= limit:
truncated = True
return
seen.add(port)
ports.append(port)
for start, end in re.findall(r"(\d{1,5})\s*-\s*(\d{1,5})", value or ""):
a, b = int(start), int(end)
if a > b:
a, b = b, a
for port in range(a, b + 1):
add(port)
if truncated:
break
without_ranges = re.sub(r"\d{1,5}\s*-\s*\d{1,5}", " ", value or "")
for item in re.findall(r"\d{1,5}", without_ranges):
add(int(item))
return ports, truncated
def _incoming_ports(profile: dict) -> dict:
try:
raw_value = str(rtorrent.client_for(profile).call("network.port_range") or "")
except Exception:
raw_value = ""
ports, truncated = _parse_port_candidates(raw_value)
return {"ports": ports, "raw": raw_value, "truncated": truncated}
def _yougetsignal_check(public_ip: str, port: int) -> dict:
body = urllib.parse.urlencode({"remoteAddress": public_ip, "portNumber": str(port)}).encode("utf-8")
req = urllib.request.Request(
"https://ports.yougetsignal.com/check-port.php",
data=body,
headers={
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"User-Agent": "pyTorrent/port-check",
"Accept": "text/html,application/json,*/*",
},
method="POST",
)
with urllib.request.urlopen(req, timeout=12) as res:
text = res.read(8192).decode("utf-8", "replace")
low = text.lower()
if "is open" in low:
return {"status": "open", "source": "yougetsignal", "raw": text[:500]}
if "is closed" in low:
return {"status": "closed", "source": "yougetsignal", "raw": text[:500]}
return {"status": "unknown", "source": "yougetsignal", "raw": text[:500]}
def _local_port_fallback(public_ip: str, port: int) -> dict:
try:
with socket.create_connection((public_ip, port), timeout=3):
return {"status": "open", "source": "local-fallback"}
except Exception as exc:
return {"status": "unknown", "source": "local-fallback", "error": f"Local fallback inconclusive: {exc}"}
def _check_ports(public_ip: str, ports: list[int], checker) -> dict:
checked: list[int] = []
first_closed: dict | None = None
last_result: dict = {"status": "unknown"}
for port in ports:
checked.append(port)
current = checker(public_ip, port)
last_result = current
if current.get("status") == "open":
current.update({"port": port, "open_port": port, "checked_ports": checked})
return current
if current.get("status") == "closed" and first_closed is None:
first_closed = current
result = first_closed or last_result
result.update({"port": ports[0] if ports else None, "open_port": None, "checked_ports": checked})
return result
def port_check_status(force: bool = False) -> dict:
profile = preferences.active_profile()
prefs = preferences.get_preferences()
enabled = bool((prefs or {}).get("port_check_enabled"))
if not profile:
return {"status": "unknown", "enabled": enabled, "error": "No profile"}
port_info = _incoming_ports(profile)
ports = port_info["ports"]
if not ports:
return {"status": "unknown", "enabled": enabled, "error": "Cannot read rTorrent network.port_range"}
ports_key = ",".join(str(port) for port in ports)
cache_key = f"port_check:{profile['id']}:{ports_key}:{int(bool(port_info['truncated']))}"
if not force:
cached = _app_setting_get(cache_key)
if cached:
try:
data = json.loads(cached)
if time.time() - float(data.get("checked_at_epoch") or 0) < PORT_CHECK_CACHE_SECONDS:
data["cached"] = True
data["enabled"] = enabled
if not data.get("checked_at"):
data["checked_at"] = _iso_from_epoch(data.get("checked_at_epoch"))
return data
except Exception:
pass
checked_at_epoch = time.time()
result = {
"status": "unknown",
"enabled": enabled,
"port": ports[0],
"ports": ports,
"port_range": port_info["raw"],
"ports_truncated": port_info["truncated"],
"checked_at_epoch": checked_at_epoch,
"checked_at": _iso_from_epoch(checked_at_epoch),
"cached": False,
}
try:
public_ip = _public_ip(profile, force=force)
result["public_ip"] = public_ip
result["remote"] = bool(profile.get("is_remote"))
result.update(_check_ports(public_ip, ports, _yougetsignal_check))
except Exception as exc:
result["error"] = f"YouGetSignal failed: {exc}"
try:
public_ip = result.get("public_ip") or _public_ip(profile, force=force)
result["public_ip"] = public_ip
result["remote"] = bool(profile.get("is_remote"))
result.update(_check_ports(public_ip, ports, _local_port_fallback))
except Exception as fallback_exc:
result["fallback_error"] = str(fallback_exc)
result["source"] = "none"
_app_setting_set(cache_key, json.dumps(result))
return result
def _safe_len(callable_obj) -> int | None:
try:
return len(callable_obj())
except Exception:
return None
def _table_count(table: str, where: str = "", params: tuple = ()) -> int:
with connect() as conn:
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table,)).fetchone()
if not exists:
return 0
row = conn.execute(f"SELECT COUNT(*) AS n FROM {table} {where}", params).fetchone()
return int((row or {}).get("n") or 0)
def _db_size() -> dict:
try:
size = DB_PATH.stat().st_size if DB_PATH.exists() else 0
return {"path": str(DB_PATH), "size": size, "size_h": rtorrent.human_size(size)}
except Exception as exc:
return {"path": str(DB_PATH), "size": 0, "size_h": "0 B", "error": str(exc)}
def cleanup_summary() -> dict:
return {
"jobs_total": _table_count("jobs"),
"jobs_clearable": _table_count("jobs", "WHERE status NOT IN ('pending', 'running')"),
"smart_queue_history_total": _table_count("smart_queue_history"),
"automation_history_total": _table_count("automation_history"),
"retention_days": {
"jobs": JOBS_RETENTION_DAYS,
"smart_queue_history": SMART_QUEUE_HISTORY_RETENTION_DAYS,
"automation_history": SMART_QUEUE_HISTORY_RETENTION_DAYS,
},
"database": _db_size(),
}
def active_default_download_path(profile: dict | None) -> str:
if not profile:
return ""
try:
return rtorrent.default_download_path(profile)
except Exception:
return ""
def enrich_bulk_payload(profile: dict, action_name: str, data: dict) -> dict:
payload = dict(data or {})
hashes = payload.get("hashes") or []
if isinstance(hashes, str):
hashes = [hashes]
hashes = [str(h) for h in hashes if h]
payload["hashes"] = hashes
payload["job_context"] = {
"source": "api",
"action": action_name,
"bulk": len(hashes) > 1,
"hash_count": len(hashes),
"requested_at": utcnow(),
}
if hashes:
try:
by_hash = {str(t.get("hash")): t for t in torrent_cache.snapshot(profile["id"])}
payload["job_context"]["items"] = [
{
"hash": h,
"name": str((by_hash.get(h) or {}).get("name") or ""),
"path": str((by_hash.get(h) or {}).get("path") or ""),
}
for h in hashes
]
except Exception as exc:
payload["job_context"]["items_error"] = str(exc)
if action_name == "move":
payload["job_context"]["target_path"] = str(payload.get("path") or "")
payload["job_context"]["move_data"] = bool(payload.get("move_data"))
if action_name == "remove":
payload["job_context"]["remove_data"] = bool(payload.get("remove_data"))
return payload
def _chunk_hashes(hashes: list[str], size: int = MOVE_BULK_MAX_HASHES) -> list[list[str]]:
# Note: Splits very large torrent selections into predictable chunks so each queued job stays small and recoverable.
safe_size = max(1, int(size or MOVE_BULK_MAX_HASHES))
return [hashes[index:index + safe_size] for index in range(0, len(hashes), safe_size)]
def enqueue_bulk_parts(profile: dict, action_name: str, data: dict) -> list[dict]:
# Note: One shared helper splits large move/remove operations into small ordered parts without changing other actions.
base_payload = enrich_bulk_payload(profile, action_name, data)
hashes = base_payload.get("hashes") or []
chunks = _chunk_hashes(hashes)
if len(chunks) <= 1:
job_id = enqueue(action_name, profile["id"], base_payload)
return [{"job_id": job_id, "label": "bulk-1", "part": 1, "parts": 1, "hashes": hashes, "hash_count": len(hashes)}]
jobs = []
items_by_hash = {str(item.get("hash")): item for item in (base_payload.get("job_context") or {}).get("items") or []}
for index, chunk in enumerate(chunks, start=1):
payload = dict(base_payload)
payload["hashes"] = chunk
context = dict(base_payload.get("job_context") or {})
context.update({
"bulk": True,
"bulk_label": f"bulk-{index}",
"bulk_part": index,
"bulk_parts": len(chunks),
"hash_count": len(chunk),
"parent_hash_count": len(hashes),
"items": [items_by_hash[h] for h in chunk if h in items_by_hash],
})
payload["job_context"] = context
job_id = enqueue(action_name, profile["id"], payload)
jobs.append({"job_id": job_id, "label": context["bulk_label"], "part": index, "parts": len(chunks), "hashes": chunk, "hash_count": len(chunk)})
return jobs
def enqueue_move_bulk_parts(profile: dict, data: dict) -> list[dict]:
# Note: Keep the old public move helper while using the same partitioning logic.
return enqueue_bulk_parts(profile, "move", data)
def enqueue_remove_bulk_parts(profile: dict, data: dict) -> list[dict]:
# Note: Remove/rm uses the same partitioning as move, which lowers rTorrent load.
return enqueue_bulk_parts(profile, "remove", data)
@bp.get("/profiles")
def profiles_list():
return ok({"profiles": preferences.list_profiles(), "active": preferences.active_profile()})
@bp.post("/profiles")
def profiles_create():
try:
return ok({"profile": preferences.save_profile(request.json or {})})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.put("/profiles/<int:profile_id>")
def profiles_update(profile_id: int):
try:
return ok({"profile": preferences.update_profile(profile_id, request.json or {})})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.delete("/profiles/<int:profile_id>")
def profiles_delete(profile_id: int):
preferences.delete_profile(profile_id)
return ok({"profiles": preferences.list_profiles(), "active": preferences.active_profile()})
@bp.post("/profiles/<int:profile_id>/activate")
def profiles_activate(profile_id: int):
try:
return ok({"profile": preferences.activate_profile(profile_id)})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 404
@bp.get("/preferences")
def prefs_get():
return ok({"preferences": preferences.get_preferences()})
@bp.post("/preferences")
def prefs_save():
return ok({"preferences": preferences.save_preferences(request.json or {})})
@bp.get("/torrents")
def torrents():
profile = preferences.active_profile()
if not profile:
return ok({"torrents": [], "summary": cached_summary(0, []), "error": "No rTorrent profile"})
rows = torrent_cache.snapshot(profile["id"])
return ok({
"profile_id": profile["id"],
"torrents": rows,
"summary": cached_summary(profile["id"], rows),
"error": torrent_cache.error(profile["id"]),
})
@bp.get("/torrent-stats")
def torrent_stats_get():
profile = preferences.active_profile()
if not profile:
return ok({"stats": {}, "error": "No profile"})
force = str(request.args.get("force") or "").lower() in {"1", "true", "yes"}
try:
# Note: Heavy file metadata is served from a 15-minute DB cache unless the user explicitly refreshes it.
return ok({"stats": torrent_stats.get(profile, force=force)})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 500
@bp.get("/torrents/<torrent_hash>/files")
def torrent_files(torrent_hash: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
return ok({"files": rtorrent.torrent_files(profile, torrent_hash)})
@bp.post("/torrents/<torrent_hash>/files/priority")
def torrent_file_priority(torrent_hash: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
data = request.get_json(silent=True) or {}
files = data.get("files") or []
if not isinstance(files, list) or not files:
return jsonify({"ok": False, "error": "No files selected"}), 400
result = rtorrent.set_file_priorities(profile, torrent_hash, files)
status = 207 if result.get("errors") else 200
return ok(result), status
@bp.get("/torrents/<torrent_hash>/peers")
def torrent_peers(torrent_hash: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
peers = rtorrent.torrent_peers(profile, torrent_hash)
for peer in peers:
peer.update(lookup_ip(peer.get("ip", "")))
return ok({"peers": peers})
@bp.get("/torrents/<torrent_hash>/trackers")
def torrent_trackers(torrent_hash: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
return ok({"trackers": rtorrent.torrent_trackers(profile, torrent_hash)})
@bp.post("/torrents/<torrent_hash>/trackers/<action_name>")
def torrent_tracker_action(torrent_hash: str, action_name: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
try:
result = rtorrent.tracker_action(profile, torrent_hash, action_name, request.get_json(silent=True) or {})
return ok({"result": result, "message": f"Tracker {action_name} via {result.get('method', 'XMLRPC')}"})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.post("/torrents/<action_name>")
def torrent_action(action_name: str):
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
data = request.get_json(silent=True) or {}
allowed = {"start", "pause", "unpause", "stop", "resume", "recheck", "reannounce", "remove", "move", "set_label", "set_ratio_group"}
if action_name not in allowed:
return jsonify({"ok": False, "error": "Unknown action"}), 400
if action_name in {"move", "remove"}:
# Note: Large move/remove requests are split into ordered bulk parts; smaller requests keep the old single-job response shape.
jobs = enqueue_bulk_parts(profile, action_name, data)
first_job_id = jobs[0]["job_id"] if jobs else None
total_hashes = sum(int(job.get("hash_count") or 0) for job in jobs)
return ok({
"job_id": first_job_id,
"job_ids": [job["job_id"] for job in jobs],
"jobs": jobs,
"hash_count": total_hashes,
"bulk": total_hashes > 1,
"bulk_parts": len(jobs),
"chunk_size": MOVE_BULK_MAX_HASHES,
})
payload = enrich_bulk_payload(profile, action_name, data)
job_id = enqueue(action_name, profile["id"], payload)
return ok({"job_id": job_id, "hash_count": len(payload.get("hashes") or []), "bulk": len(payload.get("hashes") or []) > 1})
@bp.post("/torrents/add")
def torrent_add():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
job_ids = []
if request.content_type and request.content_type.startswith("multipart/form-data"):
start = request.form.get("start", "1") in {"1", "true", "on", "yes"}
directory = request.form.get("directory", "") or active_default_download_path(profile)
label = request.form.get("label", "")
uris = [x.strip() for x in request.form.get("uris", "").splitlines() if x.strip()]
for uri in uris:
job_ids.append(enqueue("add_magnet", profile["id"], {"uri": uri, "start": start, "directory": directory, "label": label}))
for uploaded in request.files.getlist("files"):
data_b64 = base64.b64encode(uploaded.read()).decode("ascii")
job_ids.append(enqueue("add_torrent_raw", profile["id"], {"filename": uploaded.filename, "data_b64": data_b64, "start": start, "directory": directory, "label": label}))
return ok({"job_ids": job_ids})
data = request.get_json(silent=True) or {}
uris = data.get("uris") or []
if isinstance(uris, str):
uris = [x.strip() for x in uris.splitlines() if x.strip()]
for uri in uris:
job_ids.append(enqueue("add_magnet", profile["id"], {"uri": uri, "start": data.get("start", True), "directory": data.get("directory", "") or active_default_download_path(profile), "label": data.get("label", "")}))
return ok({"job_ids": job_ids})
@bp.post("/speed/limits")
def speed_limits():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
data = request.get_json(silent=True) or {}
job_id = enqueue("set_limits", profile["id"], {"down": data.get("down"), "up": data.get("up")})
return ok({"job_id": job_id})
@bp.get("/system/status")
def system_status():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"})
try:
status = rtorrent.system_status(profile)
if bool(profile.get("is_remote")):
status["usage_source"] = "remote-hidden"
status["usage_available"] = False
else:
status["cpu"] = psutil.cpu_percent(interval=None)
status["ram"] = psutil.virtual_memory().percent
status["usage_source"] = "local"
status["usage_available"] = True
return ok({"status": status})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)})
@bp.get("/app/status")
def app_status():
started = time.perf_counter()
profile = preferences.active_profile()
proc = psutil.Process(os.getpid())
try:
jobs = list_jobs(10, 0)
jobs_total = jobs.get("total", 0)
except Exception:
jobs_total = 0
status = {
"pytorrent": {
"ok": True,
"pid": os.getpid(),
"uptime_seconds": round(time.time() - proc.create_time(), 1),
"memory_rss": proc.memory_info().rss,
"memory_rss_h": rtorrent.human_size(proc.memory_info().rss),
"threads": proc.num_threads(),
"cpu_percent": proc.cpu_percent(interval=None),
"jobs_total": jobs_total,
"python": platform.python_version(),
"platform": platform.platform(),
"executable": sys.executable,
"worker_threads": WORKERS,
"open_files": _safe_len(proc.open_files) if hasattr(proc, "open_files") else None,
"connections": _safe_len(lambda: proc.net_connections(kind="inet")) if hasattr(proc, "net_connections") else None,
},
"cleanup": cleanup_summary(),
"profile": profile,
"scgi": None,
}
if profile:
try:
status["scgi"] = rtorrent.scgi_diagnostics(profile)
except Exception as exc:
status["scgi"] = {"ok": False, "error": str(exc), "url": profile.get("scgi_url")}
try:
prefs = preferences.get_preferences()
status["port_check"] = {"status": "disabled", "enabled": False} if not bool((prefs or {}).get("port_check_enabled")) else port_check_status(force=False)
except Exception as exc:
status["port_check"] = {"status": "error", "error": str(exc)}
status["api_ms"] = round((time.perf_counter() - started) * 1000, 2)
return ok({"status": status})
@bp.get("/port-check")
def port_check_get():
prefs = preferences.get_preferences()
if not bool((prefs or {}).get("port_check_enabled")):
return ok({"port_check": {"status": "disabled", "enabled": False}})
return ok({"port_check": port_check_status(force=False)})
@bp.post("/port-check")
def port_check_post():
return ok({"port_check": port_check_status(force=True)})
@bp.get("/jobs")
def jobs_list():
limit = int(request.args.get("limit", 50))
offset = int(request.args.get("offset", 0))
data = list_jobs(limit, offset)
return ok({"jobs": data["rows"], "total": data["total"], "limit": data["limit"], "offset": data["offset"]})
@bp.post("/jobs/clear")
def jobs_clear():
if str(request.args.get("force") or "").lower() in {"1", "true", "yes"}:
# Note: Emergency cleanup keeps the endpoint behavior unchanged, while force=1 enables rescue mode.
deleted = emergency_clear_jobs()
return ok({"deleted": deleted, "emergency": True})
deleted = clear_jobs()
return ok({"deleted": deleted, "emergency": False})
@bp.get("/cleanup/summary")
def cleanup_status():
return ok({"cleanup": cleanup_summary()})
@bp.post("/cleanup/jobs")
def cleanup_jobs():
deleted = clear_jobs()
return ok({"deleted": deleted, "cleanup": cleanup_summary()})
@bp.post("/cleanup/smart-queue")
def cleanup_smart_queue():
with connect() as conn:
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='smart_queue_history'").fetchone()
if not exists:
deleted = 0
else:
cur = conn.execute("DELETE FROM smart_queue_history")
deleted = int(cur.rowcount or 0)
return ok({"deleted": deleted, "cleanup": cleanup_summary()})
@bp.post("/cleanup/automations")
def cleanup_automations():
with connect() as conn:
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='automation_history'").fetchone()
if not exists:
deleted = 0
else:
# Note: Cleanup panel removes only automation logs, not saved automation rules.
cur = conn.execute("DELETE FROM automation_history")
deleted = int(cur.rowcount or 0)
return ok({"deleted": deleted, "cleanup": cleanup_summary()})
@bp.post("/cleanup/all")
def cleanup_all():
deleted_jobs = clear_jobs()
with connect() as conn:
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='smart_queue_history'").fetchone()
if not exists:
deleted_smart = 0
else:
cur = conn.execute("DELETE FROM smart_queue_history")
deleted_smart = int(cur.rowcount or 0)
exists_auto = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='automation_history'").fetchone()
if not exists_auto:
deleted_auto = 0
else:
cur = conn.execute("DELETE FROM automation_history")
deleted_auto = int(cur.rowcount or 0)
return ok({"deleted": {"jobs": deleted_jobs, "smart_queue_history": deleted_smart, "automation_history": deleted_auto}, "cleanup": cleanup_summary()})
@bp.post("/jobs/<job_id>/cancel")
def jobs_cancel(job_id: str):
require_profile_write(_job_profile_id(job_id))
if not cancel_job(job_id):
return jsonify({"ok": False, "error": "Only unfinished jobs can be cancelled"}), 400
return ok({"emergency": True})
@bp.post("/jobs/<job_id>/retry")
def jobs_retry(job_id: str):
require_profile_write(_job_profile_id(job_id))
if not retry_job(job_id):
return jsonify({"ok": False, "error": "Only failed or cancelled jobs can be retried"}), 400
return ok()
@bp.get("/path/default")
def path_default():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
try:
return ok({"path": rtorrent.default_download_path(profile)})
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.get("/path/browse")
def path_browse():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
base = request.args.get("path") or ""
try:
return ok(rtorrent.browse_path(profile, base))
except Exception as exc:
return jsonify({"ok": False, "error": str(exc)}), 400
@bp.get("/labels")
def labels_list():
profile = preferences.active_profile()
pid = profile["id"] if profile else None
with connect() as conn:
rows = conn.execute("SELECT * FROM labels WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name COLLATE NOCASE", (default_user_id(), pid)).fetchall()
return ok({"labels": rows})
@bp.post("/labels")
def labels_save():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
data = request.get_json(silent=True) or {}
name = str(data.get("name") or "").strip()
if not name:
return jsonify({"ok": False, "error": "Missing label name"}), 400
now = utcnow()
with connect() as conn:
conn.execute("INSERT OR IGNORE INTO labels(user_id,profile_id,name,color,created_at,updated_at) VALUES(?,?,?,?,?,?)", (default_user_id(), profile["id"], name, data.get("color") or "#64748b", now, now))
return labels_list()
@bp.delete("/labels/<int:label_id>")
def labels_delete(label_id: int):
profile = preferences.active_profile()
pid = profile["id"] if profile else None
with connect() as conn:
conn.execute("DELETE FROM labels WHERE id=? AND user_id=? AND (profile_id=? OR profile_id IS NULL)", (label_id, default_user_id(), pid))
return labels_list()
@bp.get("/ratio-groups")
def ratio_groups_list():
profile = preferences.active_profile()
pid = profile["id"] if profile else None
with connect() as conn:
rows = conn.execute("SELECT * FROM ratio_groups WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name COLLATE NOCASE", (default_user_id(), pid)).fetchall()
return ok({"groups": rows})
@bp.post("/ratio-groups")
def ratio_groups_save():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
data = request.get_json(silent=True) or {}
name = str(data.get("name") or "").strip()
if not name:
return jsonify({"ok": False, "error": "Missing group name"}), 400
now = utcnow()
with connect() as conn:
conn.execute("INSERT OR REPLACE INTO ratio_groups(user_id,profile_id,name,min_ratio,max_ratio,seed_time_minutes,action,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?,?,?,?)", (default_user_id(), profile["id"], name, float(data.get("min_ratio") or 1), float(data.get("max_ratio") or 2), int(data.get("seed_time_minutes") or 0), data.get("action") or "stop", 1 if data.get("enabled", True) else 0, now, now))
return ratio_groups_list()
@bp.get("/rss")
def rss_list():
profile = preferences.active_profile()
pid = profile["id"] if profile else None
with connect() as conn:
feeds = conn.execute("SELECT * FROM rss_feeds WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name", (default_user_id(), pid)).fetchall()
rules = conn.execute("SELECT * FROM rss_rules WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name", (default_user_id(), pid)).fetchall()
return ok({"feeds": feeds, "rules": rules})
@bp.post("/rss/feeds")
def rss_feed_save():
profile = preferences.active_profile()
data = request.get_json(silent=True) or {}
now = utcnow()
with connect() as conn:
conn.execute("INSERT INTO rss_feeds(user_id,profile_id,name,url,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?)", (default_user_id(), profile["id"] if profile else None, data.get("name") or "RSS", data.get("url") or "", 1, now, now))
return rss_list()
@bp.post("/rss/rules")
def rss_rule_save():
profile = preferences.active_profile()
data = request.get_json(silent=True) or {}
now = utcnow()
with connect() as conn:
conn.execute("INSERT INTO rss_rules(user_id,profile_id,name,pattern,save_path,label,start,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?,?,?,?)", (default_user_id(), profile["id"] if profile else None, data.get("name") or "Rule", data.get("pattern") or ".*", data.get("save_path") or active_default_download_path(profile), data.get("label") or "", 1 if data.get("start", True) else 0, 1, now, now))
return rss_list()
@bp.post("/rss/check")
def rss_check():
profile = preferences.active_profile()
if not profile:
return jsonify({"ok": False, "error": "No profile"}), 400
queued = 0
with connect() as conn:
feeds = conn.execute("SELECT * FROM rss_feeds WHERE user_id=? AND profile_id=? AND enabled=1", (default_user_id(), profile["id"])).fetchall()
rules = conn.execute("SELECT * FROM rss_rules WHERE user_id=? AND profile_id=? AND enabled=1", (default_user_id(), profile["id"])).fetchall()
for feed in feeds:
try:
raw = urllib.request.urlopen(feed["url"], timeout=10).read(2_000_000)
root = ET.fromstring(raw)
for item in root.findall('.//item')[:100]:
title = item.findtext('title') or ''
link = item.findtext('link') or ''
enc = item.find('enclosure')
if enc is not None and enc.get('url'):
link = enc.get('url') or link
for rule in rules:
if re.search(rule["pattern"], title, re.I) and link:
enqueue("add_magnet", profile["id"], {"uri": link, "start": bool(rule["start"]), "directory": rule.get("save_path") or active_default_download_path(profile), "label": rule.get("label") or ""})
queued += 1
except Exception as exc:
with connect() as conn:
conn.execute("UPDATE rss_feeds SET last_error=?, last_checked_at=?, updated_at=? WHERE id=?", (str(exc), utcnow(), utcnow(), feed["id"]))
return ok({"queued": queued})
@bp.get('/rtorrent-config')
def rtorrent_config_get():
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
return ok({'config': rtorrent.get_config(profile)})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
@bp.post('/rtorrent-config')
def rtorrent_config_save():
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
data = request.get_json(silent=True) or {}
result = rtorrent.set_config(profile, data.get('values') or {}, bool(data.get('apply_now', True)), bool(data.get('apply_on_start')), data.get('clear_keys') or [])
if not result.get('ok'):
return jsonify({'ok': False, 'error': 'Some settings were not saved', 'result': result}), 400
return ok({'result': result})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
@bp.post('/rtorrent-config/generate')
def rtorrent_config_generate():
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
data = request.get_json(silent=True) or {}
return ok({'config_text': rtorrent.generate_config_text(data.get('values') or {})})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
@bp.get('/smart-queue')
def smart_queue_get():
from ..services import smart_queue
profile = preferences.active_profile()
if not profile:
return ok({'settings': {}, 'exclusions': [], 'error': 'No profile'})
try:
history_limit = max(1, min(int(request.args.get('history_limit', 10) or 10), 100))
settings = smart_queue.get_settings(profile['id'])
exclusions = smart_queue.list_exclusions(profile['id'])
history = smart_queue.list_history(profile['id'], limit=history_limit)
history_total = smart_queue.count_history(profile['id'])
return ok({'settings': settings, 'exclusions': exclusions, 'history': history, 'history_total': history_total})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc), 'settings': {}, 'exclusions': []})
@bp.post('/smart-queue')
def smart_queue_save():
from ..services import smart_queue
profile = preferences.active_profile()
if not profile:
return ok({'settings': {}, 'error': 'No profile'})
try:
payload = request.get_json(silent=True) or {}
return ok({'settings': smart_queue.save_settings(profile['id'], payload)})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)})
@bp.post('/smart-queue/check')
def smart_queue_check():
from ..services import smart_queue
profile = preferences.active_profile()
if not profile:
return ok({'result': {'ok': False, 'error': 'No profile'}})
try:
result = smart_queue.check(profile, force=True)
# Note: Manual check immediately returns a fresh snapshot so the UI shows the real Downloading count after the action.
diff = torrent_cache.refresh(profile)
rows = torrent_cache.snapshot(profile['id'])
return ok({'result': result, 'torrent_patch': {**diff, 'summary': cached_summary(profile['id'], rows, force=True)}})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
@bp.post('/smart-queue/exclusion')
def smart_queue_exclusion():
from ..services import smart_queue
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
data = request.get_json(silent=True) or {}
torrent_hash = str(data.get('hash') or '').strip()
if not torrent_hash:
return jsonify({'ok': False, 'error': 'Missing torrent hash'}), 400
smart_queue.set_exclusion(profile['id'], torrent_hash, bool(data.get('excluded', True)), str(data.get('reason') or 'manual'))
return ok({'exclusions': smart_queue.list_exclusions(profile['id'])})
@bp.get('/traffic/history')
def traffic_history_get():
from ..services import traffic_history
profile = preferences.active_profile()
if not profile:
return ok({'history': {'range': request.args.get('range') or '7d', 'bucket': 'day', 'rows': []}})
range_name = request.args.get('range') or '7d'
if range_name not in {'15m', '1h', '3h', '6h', '24h', '7d', '30d', '90d'}:
range_name = '7d'
try:
try:
from ..services import rtorrent
status = rtorrent.system_status(profile)
traffic_history.record(profile['id'], status.get('down_rate', 0), status.get('up_rate', 0), status.get('total_down', 0), status.get('total_up', 0), force=True)
except Exception:
pass
return ok({'history': traffic_history.history(profile['id'], range_name)})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc), 'history': {'range': range_name, 'rows': []}})
@bp.get('/automations')
def automations_get():
from ..services import automation_rules
profile = preferences.active_profile()
if not profile:
return ok({'rules': [], 'history': [], 'error': 'No profile'})
try:
return ok({'rules': automation_rules.list_rules(profile['id']), 'history': automation_rules.list_history(profile['id'])})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc), 'rules': [], 'history': []}), 500
@bp.post('/automations')
def automations_save():
from ..services import automation_rules
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
rule = automation_rules.save_rule(profile['id'], request.get_json(silent=True) or {})
return ok({'rule': rule, 'rules': automation_rules.list_rules(profile['id'])})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 400
@bp.delete('/automations/<int:rule_id>')
def automations_delete(rule_id: int):
from ..services import automation_rules
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
automation_rules.delete_rule(rule_id, profile['id'])
return ok({'rules': automation_rules.list_rules(profile['id'])})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 400
@bp.post('/automations/check')
def automations_check():
from ..services import automation_rules
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
return ok({'result': automation_rules.check(profile, force=True), 'history': automation_rules.list_history(profile['id'])})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
@bp.delete('/automations/history')
def automations_history_clear():
from ..services import automation_rules
profile = preferences.active_profile()
if not profile:
return jsonify({'ok': False, 'error': 'No profile'}), 400
try:
# Note: Clear only automation execution logs; rules and cooldown state stay unchanged.
deleted = automation_rules.clear_history(profile['id'])
return ok({'deleted': deleted, 'history': automation_rules.list_history(profile['id']), 'cleanup': cleanup_summary()})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500