first commit
This commit is contained in:
848
pytorrent/routes/api.py
Normal file
848
pytorrent/routes/api.py
Normal file
@@ -0,0 +1,848 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import socket
|
||||
import json
|
||||
import psutil
|
||||
import xml.etree.ElementTree as ET
|
||||
from flask import Blueprint, jsonify, request
|
||||
from ..config import DB_PATH, JOBS_RETENTION_DAYS, SMART_QUEUE_HISTORY_RETENTION_DAYS, WORKERS
|
||||
from ..db import default_user_id, connect, utcnow
|
||||
from ..services import preferences, rtorrent
|
||||
from ..services.torrent_cache import torrent_cache
|
||||
from ..services.torrent_summary import cached_summary
|
||||
from ..services.workers import enqueue, list_jobs, cancel_job, retry_job, clear_jobs
|
||||
from ..services.geoip import lookup_ip
|
||||
|
||||
bp = Blueprint("api", __name__, url_prefix="/api")
|
||||
|
||||
|
||||
def ok(payload=None):
|
||||
data = {"ok": True}
|
||||
if payload:
|
||||
data.update(payload)
|
||||
return jsonify(data)
|
||||
|
||||
|
||||
|
||||
PORT_CHECK_CACHE_SECONDS = 6 * 60 * 60
|
||||
|
||||
|
||||
def _app_setting_get(key: str):
|
||||
with connect() as conn:
|
||||
row = conn.execute("SELECT value FROM app_settings WHERE key=?", (key,)).fetchone()
|
||||
return row.get("value") if row else None
|
||||
|
||||
|
||||
def _app_setting_set(key: str, value: str):
|
||||
with connect() as conn:
|
||||
conn.execute("INSERT OR REPLACE INTO app_settings(key,value) VALUES(?,?)", (key, value))
|
||||
|
||||
|
||||
def _iso_from_epoch(value) -> str | None:
|
||||
try:
|
||||
return datetime.fromtimestamp(float(value), timezone.utc).isoformat(timespec="seconds")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _public_ip(profile: dict | None = None, force: bool = False) -> str:
|
||||
if profile and bool(profile.get("is_remote")):
|
||||
return rtorrent.remote_public_ip(profile, force=force)
|
||||
req = urllib.request.Request("https://api.ipify.org", headers={"User-Agent": "pyTorrent/port-check"})
|
||||
with urllib.request.urlopen(req, timeout=8) as res:
|
||||
return res.read(64).decode("utf-8", "replace").strip()
|
||||
|
||||
|
||||
def _incoming_port(profile: dict) -> int | None:
|
||||
try:
|
||||
value = str(rtorrent.client_for(profile).call("network.port_range") or "")
|
||||
except Exception:
|
||||
value = ""
|
||||
match = re.search(r"(\d{2,5})", value)
|
||||
if not match:
|
||||
return None
|
||||
port = int(match.group(1))
|
||||
return port if 1 <= port <= 65535 else None
|
||||
|
||||
|
||||
def _yougetsignal_check(public_ip: str, port: int) -> dict:
|
||||
body = urllib.parse.urlencode({"remoteAddress": public_ip, "portNumber": str(port)}).encode("utf-8")
|
||||
req = urllib.request.Request(
|
||||
"https://ports.yougetsignal.com/check-port.php",
|
||||
data=body,
|
||||
headers={
|
||||
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
|
||||
"User-Agent": "pyTorrent/port-check",
|
||||
"Accept": "text/html,application/json,*/*",
|
||||
},
|
||||
method="POST",
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=12) as res:
|
||||
text = res.read(8192).decode("utf-8", "replace")
|
||||
low = text.lower()
|
||||
if "is open" in low:
|
||||
return {"status": "open", "source": "yougetsignal", "raw": text[:500]}
|
||||
if "is closed" in low:
|
||||
return {"status": "closed", "source": "yougetsignal", "raw": text[:500]}
|
||||
return {"status": "unknown", "source": "yougetsignal", "raw": text[:500]}
|
||||
|
||||
|
||||
def _local_port_fallback(public_ip: str, port: int) -> dict:
|
||||
try:
|
||||
with socket.create_connection((public_ip, port), timeout=3):
|
||||
return {"status": "open", "source": "local-fallback"}
|
||||
except Exception as exc:
|
||||
return {"status": "unknown", "source": "local-fallback", "error": f"Local fallback inconclusive: {exc}"}
|
||||
|
||||
|
||||
def port_check_status(force: bool = False) -> dict:
|
||||
profile = preferences.active_profile()
|
||||
prefs = preferences.get_preferences()
|
||||
enabled = bool((prefs or {}).get("port_check_enabled"))
|
||||
if not profile:
|
||||
return {"status": "unknown", "enabled": enabled, "error": "No profile"}
|
||||
port = _incoming_port(profile)
|
||||
if not port:
|
||||
return {"status": "unknown", "enabled": enabled, "error": "Cannot read rTorrent network.port_range"}
|
||||
cache_key = f"port_check:{profile['id']}:{port}"
|
||||
if not force:
|
||||
cached = _app_setting_get(cache_key)
|
||||
if cached:
|
||||
try:
|
||||
data = json.loads(cached)
|
||||
if time.time() - float(data.get("checked_at_epoch") or 0) < PORT_CHECK_CACHE_SECONDS:
|
||||
data["cached"] = True
|
||||
data["enabled"] = enabled
|
||||
if not data.get("checked_at"):
|
||||
data["checked_at"] = _iso_from_epoch(data.get("checked_at_epoch"))
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
checked_at_epoch = time.time()
|
||||
result = {"status": "unknown", "enabled": enabled, "port": port, "checked_at_epoch": checked_at_epoch, "checked_at": _iso_from_epoch(checked_at_epoch), "cached": False}
|
||||
try:
|
||||
public_ip = _public_ip(profile, force=force)
|
||||
result["public_ip"] = public_ip
|
||||
result["remote"] = bool(profile.get("is_remote"))
|
||||
result.update(_yougetsignal_check(public_ip, port))
|
||||
except Exception as exc:
|
||||
result["error"] = f"YouGetSignal failed: {exc}"
|
||||
try:
|
||||
public_ip = result.get("public_ip") or _public_ip(profile, force=force)
|
||||
result["public_ip"] = public_ip
|
||||
result["remote"] = bool(profile.get("is_remote"))
|
||||
result.update(_local_port_fallback(public_ip, port))
|
||||
except Exception as fallback_exc:
|
||||
result["fallback_error"] = str(fallback_exc)
|
||||
result["source"] = "none"
|
||||
_app_setting_set(cache_key, json.dumps(result))
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def _safe_len(callable_obj) -> int | None:
|
||||
try:
|
||||
return len(callable_obj())
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _table_count(table: str, where: str = "", params: tuple = ()) -> int:
|
||||
with connect() as conn:
|
||||
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table,)).fetchone()
|
||||
if not exists:
|
||||
return 0
|
||||
row = conn.execute(f"SELECT COUNT(*) AS n FROM {table} {where}", params).fetchone()
|
||||
return int((row or {}).get("n") or 0)
|
||||
|
||||
|
||||
def _db_size() -> dict:
|
||||
try:
|
||||
size = DB_PATH.stat().st_size if DB_PATH.exists() else 0
|
||||
return {"path": str(DB_PATH), "size": size, "size_h": rtorrent.human_size(size)}
|
||||
except Exception as exc:
|
||||
return {"path": str(DB_PATH), "size": 0, "size_h": "0 B", "error": str(exc)}
|
||||
|
||||
|
||||
def cleanup_summary() -> dict:
|
||||
return {
|
||||
"jobs_total": _table_count("jobs"),
|
||||
"jobs_clearable": _table_count("jobs", "WHERE status NOT IN ('pending', 'running')"),
|
||||
"smart_queue_history_total": _table_count("smart_queue_history"),
|
||||
"retention_days": {
|
||||
"jobs": JOBS_RETENTION_DAYS,
|
||||
"smart_queue_history": SMART_QUEUE_HISTORY_RETENTION_DAYS,
|
||||
},
|
||||
"database": _db_size(),
|
||||
}
|
||||
|
||||
def active_default_download_path(profile: dict | None) -> str:
|
||||
if not profile:
|
||||
return ""
|
||||
try:
|
||||
return rtorrent.default_download_path(profile)
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def enrich_bulk_payload(profile: dict, action_name: str, data: dict) -> dict:
|
||||
payload = dict(data or {})
|
||||
hashes = payload.get("hashes") or []
|
||||
if isinstance(hashes, str):
|
||||
hashes = [hashes]
|
||||
hashes = [str(h) for h in hashes if h]
|
||||
payload["hashes"] = hashes
|
||||
payload["job_context"] = {
|
||||
"source": "api",
|
||||
"action": action_name,
|
||||
"bulk": len(hashes) > 1,
|
||||
"hash_count": len(hashes),
|
||||
"requested_at": utcnow(),
|
||||
}
|
||||
if hashes:
|
||||
try:
|
||||
by_hash = {str(t.get("hash")): t for t in torrent_cache.snapshot(profile["id"])}
|
||||
payload["job_context"]["items"] = [
|
||||
{
|
||||
"hash": h,
|
||||
"name": str((by_hash.get(h) or {}).get("name") or ""),
|
||||
"path": str((by_hash.get(h) or {}).get("path") or ""),
|
||||
}
|
||||
for h in hashes
|
||||
]
|
||||
except Exception as exc:
|
||||
payload["job_context"]["items_error"] = str(exc)
|
||||
if action_name == "move":
|
||||
payload["job_context"]["target_path"] = str(payload.get("path") or "")
|
||||
payload["job_context"]["move_data"] = bool(payload.get("move_data"))
|
||||
if action_name == "remove":
|
||||
payload["job_context"]["remove_data"] = bool(payload.get("remove_data"))
|
||||
return payload
|
||||
|
||||
|
||||
@bp.get("/profiles")
|
||||
def profiles_list():
|
||||
return ok({"profiles": preferences.list_profiles(), "active": preferences.active_profile()})
|
||||
|
||||
|
||||
@bp.post("/profiles")
|
||||
def profiles_create():
|
||||
try:
|
||||
return ok({"profile": preferences.save_profile(request.json or {})})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.put("/profiles/<int:profile_id>")
|
||||
def profiles_update(profile_id: int):
|
||||
try:
|
||||
return ok({"profile": preferences.update_profile(profile_id, request.json or {})})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.delete("/profiles/<int:profile_id>")
|
||||
def profiles_delete(profile_id: int):
|
||||
preferences.delete_profile(profile_id)
|
||||
return ok({"profiles": preferences.list_profiles(), "active": preferences.active_profile()})
|
||||
|
||||
|
||||
@bp.post("/profiles/<int:profile_id>/activate")
|
||||
def profiles_activate(profile_id: int):
|
||||
try:
|
||||
return ok({"profile": preferences.activate_profile(profile_id)})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 404
|
||||
|
||||
|
||||
@bp.get("/preferences")
|
||||
def prefs_get():
|
||||
return ok({"preferences": preferences.get_preferences()})
|
||||
|
||||
|
||||
@bp.post("/preferences")
|
||||
def prefs_save():
|
||||
return ok({"preferences": preferences.save_preferences(request.json or {})})
|
||||
|
||||
|
||||
@bp.get("/torrents")
|
||||
def torrents():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({"torrents": [], "summary": cached_summary(0, []), "error": "No rTorrent profile"})
|
||||
rows = torrent_cache.snapshot(profile["id"])
|
||||
return ok({
|
||||
"profile_id": profile["id"],
|
||||
"torrents": rows,
|
||||
"summary": cached_summary(profile["id"], rows),
|
||||
"error": torrent_cache.error(profile["id"]),
|
||||
})
|
||||
|
||||
|
||||
@bp.get("/torrents/<torrent_hash>/files")
|
||||
def torrent_files(torrent_hash: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
return ok({"files": rtorrent.torrent_files(profile, torrent_hash)})
|
||||
|
||||
|
||||
@bp.post("/torrents/<torrent_hash>/files/priority")
|
||||
def torrent_file_priority(torrent_hash: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
files = data.get("files") or []
|
||||
if not isinstance(files, list) or not files:
|
||||
return jsonify({"ok": False, "error": "No files selected"}), 400
|
||||
result = rtorrent.set_file_priorities(profile, torrent_hash, files)
|
||||
status = 207 if result.get("errors") else 200
|
||||
return ok(result), status
|
||||
|
||||
|
||||
@bp.get("/torrents/<torrent_hash>/peers")
|
||||
def torrent_peers(torrent_hash: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
peers = rtorrent.torrent_peers(profile, torrent_hash)
|
||||
for peer in peers:
|
||||
peer.update(lookup_ip(peer.get("ip", "")))
|
||||
return ok({"peers": peers})
|
||||
|
||||
|
||||
@bp.post("/torrents/<torrent_hash>/peers/action")
|
||||
def torrent_peer_action(torrent_hash: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
try:
|
||||
result = rtorrent.peer_action(profile, torrent_hash, int(data.get("peer_index")), str(data.get("action") or ""))
|
||||
return ok({"result": result, "message": f"Peer {result['action']} via {result['method']}"})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.get("/torrents/<torrent_hash>/trackers")
|
||||
def torrent_trackers(torrent_hash: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
return ok({"trackers": rtorrent.torrent_trackers(profile, torrent_hash)})
|
||||
|
||||
|
||||
@bp.post("/torrents/<torrent_hash>/trackers/<action_name>")
|
||||
def torrent_tracker_action(torrent_hash: str, action_name: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
try:
|
||||
result = rtorrent.tracker_action(profile, torrent_hash, action_name, request.get_json(silent=True) or {})
|
||||
return ok({"result": result, "message": f"Tracker {action_name} via {result.get('method', 'XMLRPC')}"})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.post("/torrents/<action_name>")
|
||||
def torrent_action(action_name: str):
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
allowed = {"start", "pause", "stop", "resume", "recheck", "reannounce", "remove", "move", "set_label", "set_ratio_group"}
|
||||
if action_name not in allowed:
|
||||
return jsonify({"ok": False, "error": "Unknown action"}), 400
|
||||
payload = enrich_bulk_payload(profile, action_name, data)
|
||||
job_id = enqueue(action_name, profile["id"], payload)
|
||||
return ok({"job_id": job_id, "hash_count": len(payload.get("hashes") or []), "bulk": len(payload.get("hashes") or []) > 1})
|
||||
|
||||
|
||||
@bp.post("/torrents/add")
|
||||
def torrent_add():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
job_ids = []
|
||||
if request.content_type and request.content_type.startswith("multipart/form-data"):
|
||||
start = request.form.get("start", "1") in {"1", "true", "on", "yes"}
|
||||
directory = request.form.get("directory", "") or active_default_download_path(profile)
|
||||
label = request.form.get("label", "")
|
||||
uris = [x.strip() for x in request.form.get("uris", "").splitlines() if x.strip()]
|
||||
for uri in uris:
|
||||
job_ids.append(enqueue("add_magnet", profile["id"], {"uri": uri, "start": start, "directory": directory, "label": label}))
|
||||
for uploaded in request.files.getlist("files"):
|
||||
data_b64 = base64.b64encode(uploaded.read()).decode("ascii")
|
||||
job_ids.append(enqueue("add_torrent_raw", profile["id"], {"filename": uploaded.filename, "data_b64": data_b64, "start": start, "directory": directory, "label": label}))
|
||||
return ok({"job_ids": job_ids})
|
||||
data = request.get_json(silent=True) or {}
|
||||
uris = data.get("uris") or []
|
||||
if isinstance(uris, str):
|
||||
uris = [x.strip() for x in uris.splitlines() if x.strip()]
|
||||
for uri in uris:
|
||||
job_ids.append(enqueue("add_magnet", profile["id"], {"uri": uri, "start": data.get("start", True), "directory": data.get("directory", "") or active_default_download_path(profile), "label": data.get("label", "")}))
|
||||
return ok({"job_ids": job_ids})
|
||||
|
||||
|
||||
@bp.post("/speed/limits")
|
||||
def speed_limits():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
job_id = enqueue("set_limits", profile["id"], {"down": data.get("down"), "up": data.get("up")})
|
||||
return ok({"job_id": job_id})
|
||||
|
||||
|
||||
@bp.get("/system/status")
|
||||
def system_status():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"})
|
||||
try:
|
||||
status = rtorrent.system_status(profile)
|
||||
if bool(profile.get("is_remote")):
|
||||
status["usage_source"] = "remote-hidden"
|
||||
status["usage_available"] = False
|
||||
else:
|
||||
status["cpu"] = psutil.cpu_percent(interval=None)
|
||||
status["ram"] = psutil.virtual_memory().percent
|
||||
status["usage_source"] = "local"
|
||||
status["usage_available"] = True
|
||||
return ok({"status": status})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)})
|
||||
|
||||
|
||||
@bp.get("/app/status")
|
||||
def app_status():
|
||||
started = time.perf_counter()
|
||||
profile = preferences.active_profile()
|
||||
proc = psutil.Process(os.getpid())
|
||||
try:
|
||||
jobs = list_jobs(10, 0)
|
||||
jobs_total = jobs.get("total", 0)
|
||||
except Exception:
|
||||
jobs_total = 0
|
||||
status = {
|
||||
"pytorrent": {
|
||||
"ok": True,
|
||||
"pid": os.getpid(),
|
||||
"uptime_seconds": round(time.time() - proc.create_time(), 1),
|
||||
"memory_rss": proc.memory_info().rss,
|
||||
"memory_rss_h": rtorrent.human_size(proc.memory_info().rss),
|
||||
"threads": proc.num_threads(),
|
||||
"cpu_percent": proc.cpu_percent(interval=None),
|
||||
"jobs_total": jobs_total,
|
||||
"python": platform.python_version(),
|
||||
"platform": platform.platform(),
|
||||
"executable": sys.executable,
|
||||
"worker_threads": WORKERS,
|
||||
"open_files": _safe_len(proc.open_files) if hasattr(proc, "open_files") else None,
|
||||
"connections": _safe_len(lambda: proc.net_connections(kind="inet")) if hasattr(proc, "net_connections") else None,
|
||||
},
|
||||
"cleanup": cleanup_summary(),
|
||||
"profile": profile,
|
||||
"scgi": None,
|
||||
}
|
||||
if profile:
|
||||
try:
|
||||
status["scgi"] = rtorrent.scgi_diagnostics(profile)
|
||||
except Exception as exc:
|
||||
status["scgi"] = {"ok": False, "error": str(exc), "url": profile.get("scgi_url")}
|
||||
try:
|
||||
prefs = preferences.get_preferences()
|
||||
status["port_check"] = {"status": "disabled", "enabled": False} if not bool((prefs or {}).get("port_check_enabled")) else port_check_status(force=False)
|
||||
except Exception as exc:
|
||||
status["port_check"] = {"status": "error", "error": str(exc)}
|
||||
status["api_ms"] = round((time.perf_counter() - started) * 1000, 2)
|
||||
return ok({"status": status})
|
||||
|
||||
|
||||
@bp.get("/port-check")
|
||||
def port_check_get():
|
||||
prefs = preferences.get_preferences()
|
||||
if not bool((prefs or {}).get("port_check_enabled")):
|
||||
return ok({"port_check": {"status": "disabled", "enabled": False}})
|
||||
return ok({"port_check": port_check_status(force=False)})
|
||||
|
||||
|
||||
@bp.post("/port-check")
|
||||
def port_check_post():
|
||||
return ok({"port_check": port_check_status(force=True)})
|
||||
|
||||
|
||||
@bp.get("/jobs")
|
||||
def jobs_list():
|
||||
limit = int(request.args.get("limit", 50))
|
||||
offset = int(request.args.get("offset", 0))
|
||||
data = list_jobs(limit, offset)
|
||||
return ok({"jobs": data["rows"], "total": data["total"], "limit": data["limit"], "offset": data["offset"]})
|
||||
|
||||
|
||||
@bp.post("/jobs/clear")
|
||||
def jobs_clear():
|
||||
deleted = clear_jobs()
|
||||
return ok({"deleted": deleted})
|
||||
|
||||
|
||||
@bp.get("/cleanup/summary")
|
||||
def cleanup_status():
|
||||
return ok({"cleanup": cleanup_summary()})
|
||||
|
||||
|
||||
@bp.post("/cleanup/jobs")
|
||||
def cleanup_jobs():
|
||||
deleted = clear_jobs()
|
||||
return ok({"deleted": deleted, "cleanup": cleanup_summary()})
|
||||
|
||||
|
||||
@bp.post("/cleanup/smart-queue")
|
||||
def cleanup_smart_queue():
|
||||
with connect() as conn:
|
||||
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='smart_queue_history'").fetchone()
|
||||
if not exists:
|
||||
deleted = 0
|
||||
else:
|
||||
cur = conn.execute("DELETE FROM smart_queue_history")
|
||||
deleted = int(cur.rowcount or 0)
|
||||
return ok({"deleted": deleted, "cleanup": cleanup_summary()})
|
||||
|
||||
|
||||
@bp.post("/cleanup/all")
|
||||
def cleanup_all():
|
||||
deleted_jobs = clear_jobs()
|
||||
with connect() as conn:
|
||||
exists = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='smart_queue_history'").fetchone()
|
||||
if not exists:
|
||||
deleted_smart = 0
|
||||
else:
|
||||
cur = conn.execute("DELETE FROM smart_queue_history")
|
||||
deleted_smart = int(cur.rowcount or 0)
|
||||
return ok({"deleted": {"jobs": deleted_jobs, "smart_queue_history": deleted_smart}, "cleanup": cleanup_summary()})
|
||||
|
||||
|
||||
@bp.post("/jobs/<job_id>/cancel")
|
||||
def jobs_cancel(job_id: str):
|
||||
if not cancel_job(job_id):
|
||||
return jsonify({"ok": False, "error": "Only pending or failed jobs can be cancelled"}), 400
|
||||
return ok()
|
||||
|
||||
|
||||
@bp.post("/jobs/<job_id>/retry")
|
||||
def jobs_retry(job_id: str):
|
||||
if not retry_job(job_id):
|
||||
return jsonify({"ok": False, "error": "Only failed or cancelled jobs can be retried"}), 400
|
||||
return ok()
|
||||
|
||||
|
||||
@bp.get("/path/default")
|
||||
def path_default():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
try:
|
||||
return ok({"path": rtorrent.default_download_path(profile)})
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.get("/path/browse")
|
||||
def path_browse():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
base = request.args.get("path") or ""
|
||||
try:
|
||||
return ok(rtorrent.browse_path(profile, base))
|
||||
except Exception as exc:
|
||||
return jsonify({"ok": False, "error": str(exc)}), 400
|
||||
|
||||
|
||||
@bp.get("/labels")
|
||||
def labels_list():
|
||||
profile = preferences.active_profile()
|
||||
pid = profile["id"] if profile else None
|
||||
with connect() as conn:
|
||||
rows = conn.execute("SELECT * FROM labels WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name COLLATE NOCASE", (default_user_id(), pid)).fetchall()
|
||||
return ok({"labels": rows})
|
||||
|
||||
|
||||
@bp.post("/labels")
|
||||
def labels_save():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
name = str(data.get("name") or "").strip()
|
||||
if not name:
|
||||
return jsonify({"ok": False, "error": "Missing label name"}), 400
|
||||
now = utcnow()
|
||||
with connect() as conn:
|
||||
conn.execute("INSERT OR IGNORE INTO labels(user_id,profile_id,name,color,created_at,updated_at) VALUES(?,?,?,?,?,?)", (default_user_id(), profile["id"], name, data.get("color") or "#64748b", now, now))
|
||||
return labels_list()
|
||||
|
||||
|
||||
@bp.delete("/labels/<int:label_id>")
|
||||
def labels_delete(label_id: int):
|
||||
profile = preferences.active_profile()
|
||||
pid = profile["id"] if profile else None
|
||||
with connect() as conn:
|
||||
conn.execute("DELETE FROM labels WHERE id=? AND user_id=? AND (profile_id=? OR profile_id IS NULL)", (label_id, default_user_id(), pid))
|
||||
return labels_list()
|
||||
|
||||
|
||||
@bp.get("/ratio-groups")
|
||||
def ratio_groups_list():
|
||||
profile = preferences.active_profile()
|
||||
pid = profile["id"] if profile else None
|
||||
with connect() as conn:
|
||||
rows = conn.execute("SELECT * FROM ratio_groups WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name COLLATE NOCASE", (default_user_id(), pid)).fetchall()
|
||||
return ok({"groups": rows})
|
||||
|
||||
|
||||
@bp.post("/ratio-groups")
|
||||
def ratio_groups_save():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
name = str(data.get("name") or "").strip()
|
||||
if not name:
|
||||
return jsonify({"ok": False, "error": "Missing group name"}), 400
|
||||
now = utcnow()
|
||||
with connect() as conn:
|
||||
conn.execute("INSERT OR REPLACE INTO ratio_groups(user_id,profile_id,name,min_ratio,max_ratio,seed_time_minutes,action,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?,?,?,?)", (default_user_id(), profile["id"], name, float(data.get("min_ratio") or 1), float(data.get("max_ratio") or 2), int(data.get("seed_time_minutes") or 0), data.get("action") or "stop", 1 if data.get("enabled", True) else 0, now, now))
|
||||
return ratio_groups_list()
|
||||
|
||||
|
||||
@bp.get("/rss")
|
||||
def rss_list():
|
||||
profile = preferences.active_profile()
|
||||
pid = profile["id"] if profile else None
|
||||
with connect() as conn:
|
||||
feeds = conn.execute("SELECT * FROM rss_feeds WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name", (default_user_id(), pid)).fetchall()
|
||||
rules = conn.execute("SELECT * FROM rss_rules WHERE user_id=? AND (profile_id=? OR profile_id IS NULL) ORDER BY name", (default_user_id(), pid)).fetchall()
|
||||
return ok({"feeds": feeds, "rules": rules})
|
||||
|
||||
|
||||
@bp.post("/rss/feeds")
|
||||
def rss_feed_save():
|
||||
profile = preferences.active_profile()
|
||||
data = request.get_json(silent=True) or {}
|
||||
now = utcnow()
|
||||
with connect() as conn:
|
||||
conn.execute("INSERT INTO rss_feeds(user_id,profile_id,name,url,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?)", (default_user_id(), profile["id"] if profile else None, data.get("name") or "RSS", data.get("url") or "", 1, now, now))
|
||||
return rss_list()
|
||||
|
||||
|
||||
@bp.post("/rss/rules")
|
||||
def rss_rule_save():
|
||||
profile = preferences.active_profile()
|
||||
data = request.get_json(silent=True) or {}
|
||||
now = utcnow()
|
||||
with connect() as conn:
|
||||
conn.execute("INSERT INTO rss_rules(user_id,profile_id,name,pattern,save_path,label,start,enabled,created_at,updated_at) VALUES(?,?,?,?,?,?,?,?,?,?)", (default_user_id(), profile["id"] if profile else None, data.get("name") or "Rule", data.get("pattern") or ".*", data.get("save_path") or active_default_download_path(profile), data.get("label") or "", 1 if data.get("start", True) else 0, 1, now, now))
|
||||
return rss_list()
|
||||
|
||||
|
||||
@bp.post("/rss/check")
|
||||
def rss_check():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({"ok": False, "error": "No profile"}), 400
|
||||
queued = 0
|
||||
with connect() as conn:
|
||||
feeds = conn.execute("SELECT * FROM rss_feeds WHERE user_id=? AND profile_id=? AND enabled=1", (default_user_id(), profile["id"])).fetchall()
|
||||
rules = conn.execute("SELECT * FROM rss_rules WHERE user_id=? AND profile_id=? AND enabled=1", (default_user_id(), profile["id"])).fetchall()
|
||||
for feed in feeds:
|
||||
try:
|
||||
raw = urllib.request.urlopen(feed["url"], timeout=10).read(2_000_000)
|
||||
root = ET.fromstring(raw)
|
||||
for item in root.findall('.//item')[:100]:
|
||||
title = item.findtext('title') or ''
|
||||
link = item.findtext('link') or ''
|
||||
enc = item.find('enclosure')
|
||||
if enc is not None and enc.get('url'):
|
||||
link = enc.get('url') or link
|
||||
for rule in rules:
|
||||
if re.search(rule["pattern"], title, re.I) and link:
|
||||
enqueue("add_magnet", profile["id"], {"uri": link, "start": bool(rule["start"]), "directory": rule.get("save_path") or active_default_download_path(profile), "label": rule.get("label") or ""})
|
||||
queued += 1
|
||||
except Exception as exc:
|
||||
with connect() as conn:
|
||||
conn.execute("UPDATE rss_feeds SET last_error=?, last_checked_at=?, updated_at=? WHERE id=?", (str(exc), utcnow(), utcnow(), feed["id"]))
|
||||
return ok({"queued": queued})
|
||||
|
||||
|
||||
@bp.get('/rtorrent-config')
|
||||
def rtorrent_config_get():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
return ok({'config': rtorrent.get_config(profile)})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 500
|
||||
|
||||
@bp.post('/rtorrent-config')
|
||||
def rtorrent_config_save():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
data = request.get_json(silent=True) or {}
|
||||
result = rtorrent.set_config(profile, data.get('values') or {}, bool(data.get('apply_now', True)), bool(data.get('apply_on_start')), data.get('clear_keys') or [])
|
||||
if not result.get('ok'):
|
||||
return jsonify({'ok': False, 'error': 'Some settings were not saved', 'result': result}), 400
|
||||
return ok({'result': result})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 500
|
||||
|
||||
|
||||
@bp.post('/rtorrent-config/generate')
|
||||
def rtorrent_config_generate():
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
data = request.get_json(silent=True) or {}
|
||||
return ok({'config_text': rtorrent.generate_config_text(data.get('values') or {})})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 500
|
||||
|
||||
@bp.get('/smart-queue')
|
||||
def smart_queue_get():
|
||||
from ..services import smart_queue
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({'settings': {}, 'exclusions': [], 'error': 'No profile'})
|
||||
try:
|
||||
history_limit = max(1, min(int(request.args.get('history_limit', 10) or 10), 100))
|
||||
settings = smart_queue.get_settings(profile['id'])
|
||||
exclusions = smart_queue.list_exclusions(profile['id'])
|
||||
history = smart_queue.list_history(profile['id'], limit=history_limit)
|
||||
history_total = smart_queue.count_history(profile['id'])
|
||||
return ok({'settings': settings, 'exclusions': exclusions, 'history': history, 'history_total': history_total})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc), 'settings': {}, 'exclusions': []})
|
||||
|
||||
|
||||
@bp.post('/smart-queue')
|
||||
def smart_queue_save():
|
||||
from ..services import smart_queue
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({'settings': {}, 'error': 'No profile'})
|
||||
try:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
return ok({'settings': smart_queue.save_settings(profile['id'], payload)})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)})
|
||||
|
||||
|
||||
@bp.post('/smart-queue/check')
|
||||
def smart_queue_check():
|
||||
from ..services import smart_queue
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({'result': {'ok': False, 'error': 'No profile'}})
|
||||
try:
|
||||
return ok({'result': smart_queue.check(profile, force=True)})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 500
|
||||
|
||||
|
||||
@bp.post('/smart-queue/exclusion')
|
||||
def smart_queue_exclusion():
|
||||
from ..services import smart_queue
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
data = request.get_json(silent=True) or {}
|
||||
torrent_hash = str(data.get('hash') or '').strip()
|
||||
if not torrent_hash:
|
||||
return jsonify({'ok': False, 'error': 'Missing torrent hash'}), 400
|
||||
smart_queue.set_exclusion(profile['id'], torrent_hash, bool(data.get('excluded', True)), str(data.get('reason') or 'manual'))
|
||||
return ok({'exclusions': smart_queue.list_exclusions(profile['id'])})
|
||||
|
||||
@bp.get('/traffic/history')
|
||||
def traffic_history_get():
|
||||
from ..services import traffic_history
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({'history': {'range': request.args.get('range') or '7d', 'bucket': 'day', 'rows': []}})
|
||||
range_name = request.args.get('range') or '7d'
|
||||
if range_name not in {'15m', '1h', '3h', '6h', '24h', '7d', '30d', '90d'}:
|
||||
range_name = '7d'
|
||||
try:
|
||||
try:
|
||||
from ..services import rtorrent
|
||||
status = rtorrent.system_status(profile)
|
||||
traffic_history.record(profile['id'], status.get('down_rate', 0), status.get('up_rate', 0), status.get('total_down', 0), status.get('total_up', 0), force=True)
|
||||
except Exception:
|
||||
pass
|
||||
return ok({'history': traffic_history.history(profile['id'], range_name)})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc), 'history': {'range': range_name, 'rows': []}})
|
||||
|
||||
@bp.get('/automations')
|
||||
def automations_get():
|
||||
from ..services import automation_rules
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return ok({'rules': [], 'history': [], 'error': 'No profile'})
|
||||
try:
|
||||
return ok({'rules': automation_rules.list_rules(profile['id']), 'history': automation_rules.list_history(profile['id'])})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc), 'rules': [], 'history': []}), 500
|
||||
|
||||
|
||||
@bp.post('/automations')
|
||||
def automations_save():
|
||||
from ..services import automation_rules
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
rule = automation_rules.save_rule(profile['id'], request.get_json(silent=True) or {})
|
||||
return ok({'rule': rule, 'rules': automation_rules.list_rules(profile['id'])})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 400
|
||||
|
||||
|
||||
@bp.delete('/automations/<int:rule_id>')
|
||||
def automations_delete(rule_id: int):
|
||||
from ..services import automation_rules
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
automation_rules.delete_rule(rule_id, profile['id'])
|
||||
return ok({'rules': automation_rules.list_rules(profile['id'])})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 400
|
||||
|
||||
|
||||
@bp.post('/automations/check')
|
||||
def automations_check():
|
||||
from ..services import automation_rules
|
||||
profile = preferences.active_profile()
|
||||
if not profile:
|
||||
return jsonify({'ok': False, 'error': 'No profile'}), 400
|
||||
try:
|
||||
return ok({'result': automation_rules.check(profile, force=True), 'history': automation_rules.list_history(profile['id'])})
|
||||
except Exception as exc:
|
||||
return jsonify({'ok': False, 'error': str(exc)}), 500
|
||||
281
pytorrent/routes/main.py
Normal file
281
pytorrent/routes/main.py
Normal file
@@ -0,0 +1,281 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, render_template, jsonify, Response
|
||||
from ..services.preferences import get_preferences, list_profiles, active_profile, BOOTSTRAP_THEMES, FONT_FAMILIES, bootstrap_css_url
|
||||
|
||||
bp = Blueprint("main", __name__)
|
||||
|
||||
|
||||
@bp.get("/")
|
||||
def index():
|
||||
prefs = get_preferences()
|
||||
return render_template(
|
||||
"index.html",
|
||||
prefs=prefs,
|
||||
profiles=list_profiles(),
|
||||
active_profile=active_profile(),
|
||||
bootstrap_themes=BOOTSTRAP_THEMES,
|
||||
font_families=FONT_FAMILIES,
|
||||
bootstrap_css_url=bootstrap_css_url((prefs or {}).get("bootstrap_theme")),
|
||||
)
|
||||
|
||||
|
||||
@bp.get("/docs")
|
||||
def docs():
|
||||
html = """<!doctype html><html lang=\"en\"><head><meta charset=\"utf-8\"><meta name=\"viewport\" content=\"width=device-width, initial-scale=1\"><title>pyTorrent API Docs</title><link rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css\"></head><body><div id=\"swagger-ui\"></div><script src=\"https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui-bundle.js\"></script><script>window.onload=()=>SwaggerUIBundle({url:'/api/openapi.json',dom_id:'#swagger-ui',deepLinking:true,persistAuthorization:true});</script></body></html>"""
|
||||
return Response(html, mimetype="text/html")
|
||||
|
||||
|
||||
@bp.get("/api/openapi.json")
|
||||
def openapi():
|
||||
paths = {
|
||||
"/api/profiles": {
|
||||
"get": {"summary": "List rTorrent profiles", "responses": {"200": {"description": "Profiles"}}},
|
||||
"post": {"summary": "Create rTorrent profile", "requestBody": {"required": True, "content": {"application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}, "scgi_url": {"type": "string"}, "timeout_seconds": {"type": "integer"}, "max_parallel_jobs": {"type": "integer", "default": 5, "description": "Maximum queued jobs that may run at once for this rTorrent. Move/remove jobs keep request order."}, "is_remote": {"type": "boolean", "description": "When true, CPU/RAM host usage is hidden; public IP checks try remote rTorrent commands when supported."}}}}}}, "responses": {"200": {"description": "Created"}}}
|
||||
},
|
||||
|
||||
"/api/profiles/{profile_id}": {
|
||||
"put": {"summary": "Update rTorrent profile", "parameters": [{"name": "profile_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "requestBody": {"required": True, "content": {"application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}, "scgi_url": {"type": "string"}, "timeout_seconds": {"type": "integer"}, "max_parallel_jobs": {"type": "integer", "default": 5, "description": "Maximum queued jobs that may run at once for this rTorrent. Move/remove jobs keep request order."}, "is_remote": {"type": "boolean", "description": "When true, CPU/RAM host usage is hidden; public IP checks try remote rTorrent commands when supported."}}}}}}, "responses": {"200": {"description": "Updated"}}},
|
||||
"delete": {"summary": "Delete rTorrent profile", "parameters": [{"name": "profile_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "responses": {"200": {"description": "Deleted"}}}
|
||||
},
|
||||
"/api/profiles/{profile_id}/activate": {"post": {"summary": "Activate profile", "parameters": [{"name": "profile_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "responses": {"200": {"description": "Activated"}}}},
|
||||
"/api/preferences": {
|
||||
"get": {"summary": "Get preferences", "responses": {"200": {"description": "Preferences including theme, bootstrap_theme and font_family"}}},
|
||||
"post": {
|
||||
"summary": "Save preferences",
|
||||
"requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {
|
||||
"theme": {"type": "string", "enum": ["light", "dark"]},
|
||||
"bootstrap_theme": {"type": "string", "enum": list(BOOTSTRAP_THEMES.keys())},
|
||||
"font_family": {"type": "string", "enum": list(FONT_FAMILIES.keys())},
|
||||
"table_columns_json": {"type": "string"},
|
||||
"peers_refresh_seconds": {"type": "integer", "enum": [0, 10, 15, 30, 60]},
|
||||
"port_check_enabled": {"type": "boolean"},
|
||||
}}}}},
|
||||
"responses": {"200": {"description": "Saved"}},
|
||||
},
|
||||
},
|
||||
"/api/torrents": {"get": {"summary": "Get cached torrent snapshot", "responses": {"200": {"description": "Torrent list"}}}},
|
||||
"/api/torrents/{action_name}": {"post": {"summary": "Queue torrent action", "description": "For move, path is the target directory; move_data=true physically moves data on the rTorrent host using a detached shell move with status polling, force-overwrites an existing destination, tolerates rTorrent execute timeouts around mkdir/start/polling, handles retries after a partially completed move, avoids SCGI timeout on long mv operations, and recheck defaults to move_data. Move and remove jobs are ordered per profile, so a later remove waits for earlier move/remove jobs to finish.", "parameters": [{"name": "action_name", "in": "path", "required": True, "schema": {"type": "string", "enum": ["start", "pause", "stop", "resume", "recheck", "remove", "move", "set_label", "set_ratio_group"]}}], "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"hashes": {"type": "array", "items": {"type": "string"}}, "path": {"type": "string", "description": "Target directory for move"}, "move_data": {"type": "boolean", "description": "Physically move data before setting torrent directory"}, "recheck": {"type": "boolean", "description": "Run hash check after physical move; defaults to move_data"}, "label": {"type": "string"}, "ratio_group": {"type": "string"}, "remove_data": {"type": "boolean"}}}}}}, "responses": {"200": {"description": "Job queued"}}}},
|
||||
"/api/torrents/add": {"post": {"summary": "Add magnet links or torrent files", "requestBody": {"content": {"multipart/form-data": {"schema": {"type": "object", "properties": {"uris": {"type": "string"}, "directory": {"type": "string"}, "label": {"type": "string"}, "start": {"type": "boolean"}, "files": {"type": "array", "items": {"type": "string", "format": "binary"}}}}}, "application/json": {"schema": {"type": "object"}}}}, "responses": {"200": {"description": "Jobs queued"}}}},
|
||||
"/api/torrents/{torrent_hash}/files": {"get": {"summary": "Torrent files", "parameters": [{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}}], "responses": {"200": {"description": "Files"}}}},
|
||||
"/api/torrents/{torrent_hash}/peers": {"get": {"summary": "Torrent peers with GeoIP", "parameters": [{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}}], "responses": {"200": {"description": "Peers"}}}},
|
||||
"/api/torrents/{torrent_hash}/trackers": {"get": {"summary": "Torrent trackers", "parameters": [{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}}], "responses": {"200": {"description": "Trackers"}}}},
|
||||
"/api/speed/limits": {"post": {"summary": "Queue global speed limit change", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"down": {"type": "integer", "description": "Bytes per second, 0 unlimited"}, "up": {"type": "integer", "description": "Bytes per second, 0 unlimited"}}}}}}, "responses": {"200": {"description": "Job queued"}}}},
|
||||
"/api/system/status": {"get": {"summary": "rTorrent/system status", "description": "For remote profiles CPU/RAM host usage is not returned and usage_available is false.", "responses": {"200": {"description": "Status"}}}},
|
||||
"/api/port-check": {"get": {"summary": "Read cached incoming port check status", "responses": {"200": {"description": "Port check status including status, port, public_ip, source, cached, checked_at and checked_at_epoch"}}}, "post": {"summary": "Run incoming port check immediately, bypassing cache", "responses": {"200": {"description": "Fresh port check status including checked_at and checked_at_epoch"}}}},
|
||||
"/api/jobs": {"get": {"summary": "List job queue history", "parameters": [{"name": "limit", "in": "query", "schema": {"type": "integer", "default": 50}}, {"name": "offset", "in": "query", "schema": {"type": "integer", "default": 0}}], "responses": {"200": {"description": "Jobs"}}}},
|
||||
"/api/jobs/clear": {"post": {"summary": "Clear finished job history", "description": "Deletes jobs that are not pending or running.", "responses": {"200": {"description": "Deleted count"}}}},
|
||||
"/api/jobs/{job_id}/cancel": {"post": {"summary": "Cancel pending or failed job", "parameters": [{"name": "job_id", "in": "path", "required": True, "schema": {"type": "string"}}], "responses": {"200": {"description": "Cancelled"}}}},
|
||||
"/api/jobs/{job_id}/retry": {"post": {"summary": "Retry failed or cancelled job", "parameters": [{"name": "job_id", "in": "path", "required": True, "schema": {"type": "string"}}], "responses": {"200": {"description": "Retried"}}}},
|
||||
"/api/path/browse": {"get": {"summary": "Browse server directories", "parameters": [{"name": "path", "in": "query", "schema": {"type": "string"}}], "responses": {"200": {"description": "Directory listing"}}}},
|
||||
"/api/labels": {"get": {"summary": "List labels", "responses": {"200": {"description": "Labels"}}}, "post": {"summary": "Create label", "requestBody": {"content": {"application/json": {"schema": {"type": "object"}}}}, "responses": {"200": {"description": "Labels"}}}},
|
||||
"/api/ratio-groups": {"get": {"summary": "List ratio groups", "responses": {"200": {"description": "Ratio groups"}}}, "post": {"summary": "Create or update ratio group", "requestBody": {"content": {"application/json": {"schema": {"type": "object"}}}}, "responses": {"200": {"description": "Ratio groups"}}}},
|
||||
"/api/rss": {"get": {"summary": "List RSS feeds and rules", "responses": {"200": {"description": "RSS config"}}}},
|
||||
"/api/rss/feeds": {"post": {"summary": "Add RSS feed", "requestBody": {"content": {"application/json": {"schema": {"type": "object"}}}}, "responses": {"200": {"description": "RSS config"}}}},
|
||||
"/api/rss/rules": {"post": {"summary": "Add RSS rule", "requestBody": {"content": {"application/json": {"schema": {"type": "object"}}}}, "responses": {"200": {"description": "RSS config"}}}},
|
||||
"/api/rss/check": {"post": {"summary": "Manually check RSS feeds", "responses": {"200": {"description": "Queued matches"}}}},
|
||||
"/api/smart-queue": {"get": {"summary": "Get Smart Queue settings, exceptions and history", "parameters": [{"name": "history_limit", "in": "query", "schema": {"type": "integer", "default": 10, "minimum": 1, "maximum": 100}, "description": "Number of Smart Queue history rows to return"}], "responses": {"200": {"description": "Smart Queue config with history and history_total"}}}, "post": {"summary": "Save Smart Queue settings", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"enabled": {"type": "boolean"}, "max_active_downloads": {"type": "integer"}, "stalled_seconds": {"type": "integer"}, "min_speed_bytes": {"type": "integer"}, "min_seeds": {"type": "integer"}}}}}}, "responses": {"200": {"description": "Saved"}}}},
|
||||
"/api/smart-queue/check": {"post": {"summary": "Run Smart Queue immediately", "responses": {"200": {"description": "Smart Queue action result"}}}},
|
||||
"/api/smart-queue/exclusion": {"post": {"summary": "Add or remove a torrent Smart Queue exception", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"hash": {"type": "string"}, "excluded": {"type": "boolean"}, "reason": {"type": "string"}}}}}}, "responses": {"200": {"description": "Exception list"}}}},
|
||||
"/api/traffic/history": {"get": {"summary": "Transfer history for charts", "parameters": [{"name": "range", "in": "query", "schema": {"type": "string", "enum": ["15m", "1h", "3h", "6h", "24h", "7d", "30d", "90d"]}}], "responses": {"200": {"description": "Aggregated traffic history"}}}}
|
||||
}
|
||||
paths.update({
|
||||
"/api/profiles/{profile_id}": {"delete": {"summary": "Delete rTorrent profile", "parameters": [{"name": "profile_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "responses": {"200": {"description": "Deleted"}}}},
|
||||
"/api/path/default": {"get": {"summary": "Read active rTorrent default download path", "responses": {"200": {"description": "Default path"}}}},
|
||||
"/api/torrents/{torrent_hash}/files/priority": {"post": {"summary": "Set file priorities", "parameters": [{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}}], "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"files": {"type": "array", "items": {"type": "object", "properties": {"index": {"type": "integer"}, "priority": {"type": "integer", "enum": [0, 1, 2]}}}}}}}}}, "responses": {"200": {"description": "Updated priorities"}, "207": {"description": "Partial update"}}}},
|
||||
"/api/torrents/{torrent_hash}/peers/action": {"post": {"summary": "Run peer action", "parameters": [{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}}], "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"peer_index": {"type": "integer"}, "action": {"type": "string", "enum": ["disconnect", "kick", "snub", "unsnub", "ban"]}}}}}}, "responses": {"200": {"description": "Peer action result"}}}},
|
||||
"/api/labels/{label_id}": {"delete": {"summary": "Delete saved label", "parameters": [{"name": "label_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "responses": {"200": {"description": "Labels"}}}},
|
||||
"/api/rtorrent-config": {"get": {"summary": "Read supported rTorrent config fields", "responses": {"200": {"description": "Config fields"}}}, "post": {"summary": "Save supported rTorrent config fields", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"values": {"type": "object"}}}}}}, "responses": {"200": {"description": "Save result"}}}},
|
||||
"/api/rtorrent-config/generate": {"post": {"summary": "Generate rTorrent config text from provided values", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"values": {"type": "object"}}}}}}, "responses": {"200": {"description": "Generated config text"}}}},
|
||||
"/api/automations": {"get": {"summary": "List automation rules and history", "responses": {"200": {"description": "Rules and history"}}}, "post": {"summary": "Create or update automation rule", "requestBody": {"content": {"application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}, "enabled": {"type": "boolean"}, "cooldown_minutes": {"type": "integer"}, "conditions": {"type": "array"}, "effects": {"type": "array"}}}}}}, "responses": {"200": {"description": "Rule saved"}}}},
|
||||
"/api/automations/{rule_id}": {"delete": {"summary": "Delete automation rule", "parameters": [{"name": "rule_id", "in": "path", "required": True, "schema": {"type": "integer"}}], "responses": {"200": {"description": "Deleted"}}}},
|
||||
"/api/automations/check": {"post": {"summary": "Run automation rules immediately", "responses": {"200": {"description": "Automation result"}}}}
|
||||
})
|
||||
components = {
|
||||
"schemas": {
|
||||
"ApiOk": {
|
||||
"type": "object",
|
||||
"properties": {"ok": {"type": "boolean"}},
|
||||
"required": ["ok"],
|
||||
},
|
||||
"Profile": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"id": {"type": "integer"},
|
||||
"name": {"type": "string"},
|
||||
"scgi_url": {"type": "string"},
|
||||
"timeout_seconds": {"type": "integer"},
|
||||
"max_parallel_jobs": {"type": "integer"},
|
||||
},
|
||||
},
|
||||
"Torrent": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"hash": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"path": {"type": "string"},
|
||||
"status": {"type": "string"},
|
||||
"size": {"type": "integer", "format": "int64"},
|
||||
"completed_bytes": {"type": "integer", "format": "int64"},
|
||||
"down_total": {"type": "integer", "format": "int64"},
|
||||
"up_total": {"type": "integer", "format": "int64"},
|
||||
"complete": {"type": "boolean"},
|
||||
"state": {"type": "boolean"},
|
||||
"paused": {"type": "boolean"},
|
||||
"hashing": {"type": "integer"},
|
||||
"message": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"TorrentFilterSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {"type": "integer", "description": "Number of torrents in this filter."},
|
||||
"size": {"type": "integer", "format": "int64", "description": "Total torrent payload size in bytes."},
|
||||
"disk_bytes": {"type": "integer", "format": "int64", "description": "Completed bytes reported by rTorrent; used as the displayed Data value."},
|
||||
"completed_bytes": {"type": "integer", "format": "int64", "description": "Completed bytes reported by rTorrent."},
|
||||
"remaining_bytes": {"type": "integer", "format": "int64", "description": "size - completed_bytes, never below zero."},
|
||||
"progress_percent": {"type": "number", "format": "float", "description": "Completed percentage for this filter."},
|
||||
"remaining_percent": {"type": "number", "format": "float", "description": "Remaining percentage for this filter."},
|
||||
"down_total": {"type": "integer", "format": "int64", "deprecated": True, "description": "Backward compatibility field; not used by the filters UI."},
|
||||
"up_total": {"type": "integer", "format": "int64", "deprecated": True, "description": "Backward compatibility field; not used by the filters UI."},
|
||||
},
|
||||
"required": ["count", "size", "disk_bytes", "completed_bytes", "remaining_bytes", "progress_percent", "remaining_percent"],
|
||||
},
|
||||
"TorrentSummaryFilters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"all": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"downloading": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"seeding": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"paused": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"checking": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"error": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
"stopped": {"$ref": "#/components/schemas/TorrentFilterSummary"},
|
||||
},
|
||||
"required": ["all", "downloading", "seeding", "paused", "checking", "error", "stopped"],
|
||||
},
|
||||
"TorrentSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"filters": {"$ref": "#/components/schemas/TorrentSummaryFilters"},
|
||||
"cache_ttl_seconds": {"type": "integer", "description": "Summary cache TTL in seconds."},
|
||||
"generated_at_epoch": {"type": "number", "format": "double", "description": "Unix timestamp when summary was generated."},
|
||||
"cached": {"type": "boolean", "description": "True when returned from cache."},
|
||||
},
|
||||
"required": ["filters", "cache_ttl_seconds", "generated_at_epoch", "cached"],
|
||||
},
|
||||
"TorrentListResponse": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/ApiOk"},
|
||||
{"type": "object", "properties": {
|
||||
"profile_id": {"type": "integer"},
|
||||
"torrents": {"type": "array", "items": {"$ref": "#/components/schemas/Torrent"}},
|
||||
"summary": {"$ref": "#/components/schemas/TorrentSummary"},
|
||||
"error": {"type": "string", "nullable": True},
|
||||
}, "required": ["torrents", "summary"]},
|
||||
],
|
||||
},
|
||||
"CleanupSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"jobs_total": {"type": "integer"},
|
||||
"jobs_clearable": {"type": "integer"},
|
||||
"smart_queue_history_total": {"type": "integer"},
|
||||
"retention_days": {"type": "object", "properties": {"jobs": {"type": "integer"}, "smart_queue_history": {"type": "integer"}}},
|
||||
"database": {"type": "object", "properties": {"path": {"type": "string"}, "size": {"type": "integer", "format": "int64"}, "size_h": {"type": "string"}, "error": {"type": "string"}}},
|
||||
},
|
||||
"required": ["jobs_total", "jobs_clearable", "smart_queue_history_total", "retention_days", "database"],
|
||||
},
|
||||
"CleanupResponse": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/ApiOk"},
|
||||
{"type": "object", "properties": {"cleanup": {"$ref": "#/components/schemas/CleanupSummary"}, "deleted": {"oneOf": [{"type": "integer"}, {"type": "object"}]}}},
|
||||
],
|
||||
},
|
||||
"PortCheckStatus": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"status": {"type": "string", "enum": ["open", "closed", "unknown", "disabled", "error"]},
|
||||
"enabled": {"type": "boolean"},
|
||||
"port": {"type": "integer"},
|
||||
"public_ip": {"type": "string"},
|
||||
"source": {"type": "string"},
|
||||
"cached": {"type": "boolean"},
|
||||
"checked_at": {"type": "string", "format": "date-time"},
|
||||
"checked_at_epoch": {"type": "number", "format": "double"},
|
||||
"error": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"AppStatus": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pytorrent": {"type": "object", "additionalProperties": True},
|
||||
"cleanup": {"$ref": "#/components/schemas/CleanupSummary"},
|
||||
"profile": {"$ref": "#/components/schemas/Profile"},
|
||||
"scgi": {"type": "object", "nullable": True, "additionalProperties": True},
|
||||
"port_check": {"$ref": "#/components/schemas/PortCheckStatus"},
|
||||
"api_ms": {"type": "number", "format": "float"},
|
||||
},
|
||||
"required": ["pytorrent", "cleanup", "scgi", "port_check", "api_ms"],
|
||||
},
|
||||
"AppStatusResponse": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/ApiOk"},
|
||||
{"type": "object", "properties": {"status": {"$ref": "#/components/schemas/AppStatus"}}, "required": ["status"]},
|
||||
],
|
||||
},
|
||||
"JobQueuedResponse": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/ApiOk"},
|
||||
{"type": "object", "properties": {"job_id": {"type": "string"}, "job_ids": {"type": "array", "items": {"type": "string"}}, "hash_count": {"type": "integer"}, "bulk": {"type": "boolean"}}},
|
||||
],
|
||||
},
|
||||
"TrackerActionResponse": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/ApiOk"},
|
||||
{"type": "object", "properties": {"result": {"type": "object", "additionalProperties": True}, "message": {"type": "string"}}},
|
||||
],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
def response_ref(schema_name: str, description: str = "OK") -> dict:
|
||||
return {"description": description, "content": {"application/json": {"schema": {"$ref": f"#/components/schemas/{schema_name}"}}}}
|
||||
|
||||
paths["/api/torrents"]["get"]["responses"]["200"] = response_ref("TorrentListResponse", "Torrent list with cached filter summary")
|
||||
paths["/api/torrents/{action_name}"]["post"]["responses"]["200"] = response_ref("JobQueuedResponse", "Job queued")
|
||||
paths["/api/torrents/add"]["post"]["responses"]["200"] = response_ref("JobQueuedResponse", "Jobs queued")
|
||||
|
||||
paths.update({
|
||||
"/api/torrents/{torrent_hash}/trackers/{action_name}": {
|
||||
"post": {
|
||||
"summary": "Run tracker action",
|
||||
"parameters": [
|
||||
{"name": "torrent_hash", "in": "path", "required": True, "schema": {"type": "string"}},
|
||||
{"name": "action_name", "in": "path", "required": True, "schema": {"type": "string"}},
|
||||
],
|
||||
"requestBody": {"content": {"application/json": {"schema": {"type": "object"}}}},
|
||||
"responses": {"200": response_ref("TrackerActionResponse", "Tracker action result")},
|
||||
}
|
||||
},
|
||||
"/api/app/status": {
|
||||
"get": {"summary": "pyTorrent application status", "responses": {"200": response_ref("AppStatusResponse", "Application status")}}
|
||||
},
|
||||
"/api/cleanup/summary": {
|
||||
"get": {"summary": "Cleanup summary", "responses": {"200": response_ref("CleanupResponse", "Cleanup summary")}}
|
||||
},
|
||||
"/api/cleanup/jobs": {
|
||||
"post": {"summary": "Clear finished job history", "responses": {"200": response_ref("CleanupResponse", "Cleanup result")}}
|
||||
},
|
||||
"/api/cleanup/smart-queue": {
|
||||
"post": {"summary": "Clear Smart Queue history", "responses": {"200": response_ref("CleanupResponse", "Cleanup result")}}
|
||||
},
|
||||
"/api/cleanup/all": {
|
||||
"post": {"summary": "Clear all cleanup-supported history", "responses": {"200": response_ref("CleanupResponse", "Cleanup result")}}
|
||||
},
|
||||
})
|
||||
|
||||
return jsonify({"openapi": "3.0.3", "info": {"title": "pyTorrent API", "version": "0.2.0"}, "paths": paths, "components": components})
|
||||
Reference in New Issue
Block a user