Compare commits
5 Commits
master
...
fix_labesl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2691442fc1 | ||
|
|
98f155b53a | ||
|
|
0730e7316c | ||
|
|
7c31136535 | ||
|
|
a72b6eb364 |
@@ -111,8 +111,11 @@ def _conditions_match(conn, rule: dict[str, Any], profile_id: int, t: dict[str,
|
||||
if not h: return False
|
||||
immediate_ok = True; delayed_ok = True; now = utcnow(); now_ts = _now_ts()
|
||||
for cond in rule.get('conditions') or []:
|
||||
ok = _condition_true(t, cond)
|
||||
if cond.get('type') == 'no_seeds' and int(cond.get('minutes') or 0) > 0:
|
||||
raw_ok = _condition_true(t, cond)
|
||||
negated = bool(cond.get('negate'))
|
||||
# Note: Negation is applied in the backend, so UI and API only store the condition flag.
|
||||
ok = (not raw_ok) if negated else raw_ok
|
||||
if cond.get('type') == 'no_seeds' and int(cond.get('minutes') or 0) > 0 and not negated:
|
||||
row = conn.execute('SELECT condition_since_at FROM automation_rule_state WHERE rule_id=? AND profile_id=? AND torrent_hash=?', (rule['id'], profile_id, h)).fetchone()
|
||||
if ok:
|
||||
since = row['condition_since_at'] if row and row.get('condition_since_at') else now
|
||||
@@ -125,33 +128,59 @@ def _conditions_match(conn, rule: dict[str, Any], profile_id: int, t: dict[str,
|
||||
return immediate_ok and delayed_ok
|
||||
|
||||
|
||||
def _cooldown_ok(conn, rule: dict[str, Any], profile_id: int, torrent_hash: str) -> bool:
|
||||
def _cooldown_ok(conn, rule: dict[str, Any], profile_id: int, torrent_hash: str = '__rule__') -> bool:
|
||||
cooldown = int(rule.get('cooldown_minutes') or 0)
|
||||
if cooldown <= 0: return True
|
||||
row = conn.execute('SELECT last_applied_at FROM automation_rule_state WHERE rule_id=? AND profile_id=? AND torrent_hash=?', (rule['id'], profile_id, torrent_hash)).fetchone()
|
||||
if not row or not row.get('last_applied_at'): return True
|
||||
return _now_ts() - _ts(row['last_applied_at']) >= cooldown * 60
|
||||
|
||||
|
||||
def _apply_effects(c: Any, profile: dict[str, Any], torrent: dict[str, Any], effects: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
h = str(torrent.get('hash') or ''); labels = _label_names(torrent.get('label')); applied = []
|
||||
def _mark_rule_cooldown(conn, rule: dict[str, Any], profile_id: int, now: str) -> None:
|
||||
# Note: Cooldown is rule-level, so one batch execution blocks the whole automation until the cooldown expires.
|
||||
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_applied_at,updated_at) VALUES(?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, '__rule__', now, now))
|
||||
|
||||
|
||||
def _apply_effects_bulk(c: Any, profile: dict[str, Any], torrents: list[dict[str, Any]], effects: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
hashes = [str(t.get('hash') or '') for t in torrents if str(t.get('hash') or '')]
|
||||
labels_by_hash = {str(t.get('hash') or ''): _label_names(t.get('label')) for t in torrents}
|
||||
applied: list[dict[str, Any]] = []
|
||||
if not hashes: return applied
|
||||
for eff in effects:
|
||||
typ = str(eff.get('type') or '')
|
||||
if typ == 'move':
|
||||
# Note: Automation move-to-path now uses the same move implementation as the main app action.
|
||||
path = str(eff.get('path') or '').strip() or rtorrent.default_download_path(profile)
|
||||
move_payload = {'path': path, 'move_data': bool(eff.get('move_data')), 'recheck': bool(eff.get('recheck', eff.get('move_data'))), 'keep_seeding': bool(eff.get('keep_seeding'))}
|
||||
result = rtorrent.move_torrents(profile, [h], move_payload) if path else None
|
||||
if path: applied.append({'type': 'move', 'path': path, 'move_data': bool(eff.get('move_data')), 'recheck': bool(move_payload['recheck']), 'keep_seeding': bool(eff.get('keep_seeding')), 'result': result})
|
||||
payload = {
|
||||
'path': path,
|
||||
'move_data': bool(eff.get('move_data')),
|
||||
'recheck': bool(eff.get('recheck', eff.get('move_data'))),
|
||||
'keep_seeding': bool(eff.get('keep_seeding')),
|
||||
}
|
||||
result = rtorrent.action(profile, hashes, 'move', payload)
|
||||
applied.append({'type': 'move', 'path': path, 'count': len(hashes), 'move_data': payload['move_data'], 'recheck': payload['recheck'], 'keep_seeding': payload['keep_seeding'], 'result': result})
|
||||
elif typ == 'add_label':
|
||||
label = str(eff.get('label') or '').strip()
|
||||
if label and label not in labels: labels.append(label); c.call('d.custom1.set', h, _label_value(labels))
|
||||
if label: applied.append({'type': 'add_label', 'label': label})
|
||||
if label:
|
||||
for h in hashes:
|
||||
labels = labels_by_hash.setdefault(h, [])
|
||||
if label not in labels:
|
||||
labels.append(label); c.call('d.custom1.set', h, _label_value(labels))
|
||||
applied.append({'type': 'add_label', 'label': label, 'count': len(hashes)})
|
||||
elif typ == 'remove_label':
|
||||
label = str(eff.get('label') or '').strip(); labels = [x for x in labels if x != label]; c.call('d.custom1.set', h, _label_value(labels)); applied.append({'type': 'remove_label', 'label': label})
|
||||
label = str(eff.get('label') or '').strip()
|
||||
if label:
|
||||
for h in hashes:
|
||||
labels = [x for x in labels_by_hash.get(h, []) if x != label]
|
||||
labels_by_hash[h] = labels; c.call('d.custom1.set', h, _label_value(labels))
|
||||
applied.append({'type': 'remove_label', 'label': label, 'count': len(hashes)})
|
||||
elif typ == 'set_labels':
|
||||
value = _label_value(_label_names(eff.get('labels'))); c.call('d.custom1.set', h, value); labels = _label_names(value); applied.append({'type': 'set_labels', 'labels': value})
|
||||
value = _label_value(_label_names(eff.get('labels')))
|
||||
for h in hashes:
|
||||
labels_by_hash[h] = _label_names(value); c.call('d.custom1.set', h, value)
|
||||
applied.append({'type': 'set_labels', 'labels': value, 'count': len(hashes)})
|
||||
elif typ in {'pause', 'stop', 'start', 'resume', 'recheck'}:
|
||||
method = {'pause':'d.pause','stop':'d.stop','start':'d.start','resume':'d.resume','recheck':'d.check_hash'}[typ]; c.call(method, h); applied.append({'type': typ})
|
||||
result = rtorrent.action(profile, hashes, typ, {})
|
||||
applied.append({'type': typ, 'count': len(hashes), 'result': result})
|
||||
return applied
|
||||
|
||||
|
||||
@@ -160,17 +189,30 @@ def check(profile: dict | None = None, user_id: int | None = None, force: bool =
|
||||
if not profile: return {'ok': False, 'error': 'No active rTorrent profile'}
|
||||
user_id = user_id or default_user_id(); profile_id = int(profile['id'])
|
||||
rules = [r for r in list_rules(profile_id, user_id) if force or int(r.get('enabled') or 0)]
|
||||
if not rules: return {'ok': True, 'checked': 0, 'applied': [], 'rules': 0}
|
||||
torrents = rtorrent.list_torrents(profile); c = rtorrent.client_for(profile); applied = []; now = utcnow()
|
||||
if not rules: return {'ok': True, 'checked': 0, 'applied': [], 'batches': [], 'rules': 0}
|
||||
torrents = rtorrent.list_torrents(profile); c = rtorrent.client_for(profile); applied = []; batches = []; now = utcnow()
|
||||
with connect() as conn:
|
||||
for rule in rules:
|
||||
for t in torrents:
|
||||
# Note: Automations now execute as one batch per rule, not as one independent action per torrent.
|
||||
if not force and not _cooldown_ok(conn, rule, profile_id):
|
||||
continue
|
||||
matched = [t for t in torrents if _conditions_match(conn, rule, profile_id, t)]
|
||||
if not matched:
|
||||
continue
|
||||
hashes = [str(t.get('hash') or '') for t in matched if str(t.get('hash') or '')]
|
||||
if not hashes:
|
||||
continue
|
||||
try:
|
||||
actions = _apply_effects_bulk(c, profile, matched, rule.get('effects') or [])
|
||||
except Exception as exc:
|
||||
actions = [{'error': str(exc), 'count': len(hashes)}]
|
||||
for t in matched:
|
||||
h = str(t.get('hash') or '')
|
||||
if not _conditions_match(conn, rule, profile_id, t): continue
|
||||
if not force and not _cooldown_ok(conn, rule, profile_id, h): continue
|
||||
try: actions = _apply_effects(c, profile, t, rule.get('effects') or [])
|
||||
except Exception as exc: actions = [{'error': str(exc)}]
|
||||
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_matched_at,last_applied_at,updated_at) VALUES(?,?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_matched_at=excluded.last_matched_at, last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, h, now, now, now))
|
||||
conn.execute('INSERT INTO automation_history(user_id,profile_id,rule_id,torrent_hash,torrent_name,rule_name,actions_json,created_at) VALUES(?,?,?,?,?,?,?,?)', (user_id, profile_id, rule['id'], h, str(t.get('name') or ''), str(rule.get('name') or ''), json.dumps(actions), now))
|
||||
applied.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'hash': h, 'name': t.get('name'), 'actions': actions})
|
||||
return {'ok': True, 'checked': len(torrents), 'rules': len(rules), 'applied': applied}
|
||||
applied.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'hash': h, 'name': t.get('name'), 'actions': [{'type': a.get('type', 'error'), 'count': a.get('count', len(hashes))} for a in actions]})
|
||||
_mark_rule_cooldown(conn, rule, profile_id, now)
|
||||
torrent_name = str(matched[0].get('name') or '') if len(matched) == 1 else f'{len(matched)} torrents'
|
||||
torrent_hash = hashes[0] if len(hashes) == 1 else f'batch:{rule["id"]}:{now}'
|
||||
conn.execute('INSERT INTO automation_history(user_id,profile_id,rule_id,torrent_hash,torrent_name,rule_name,actions_json,created_at) VALUES(?,?,?,?,?,?,?,?)', (user_id, profile_id, rule['id'], torrent_hash, torrent_name, str(rule.get('name') or ''), json.dumps(actions), now))
|
||||
batches.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'count': len(hashes), 'actions': actions})
|
||||
return {'ok': True, 'checked': len(torrents), 'rules': len(rules), 'applied': applied, 'batches': batches}
|
||||
|
||||
@@ -1274,73 +1274,6 @@ def start_or_resume_hash(c: ScgiRtorrentClient, torrent_hash: str) -> dict:
|
||||
result['ok'] = result.get('ok', True)
|
||||
return result
|
||||
|
||||
|
||||
def move_torrents(profile: dict, torrent_hashes: list[str], payload: dict | None = None) -> dict:
|
||||
# Note: Shared move implementation keeps API move and automation move-to-path identical.
|
||||
payload = payload or {}
|
||||
c = client_for(profile)
|
||||
path = _remote_clean_path(payload.get("path") or "")
|
||||
move_data = bool(payload.get("move_data"))
|
||||
recheck = bool(payload.get("recheck", move_data))
|
||||
keep_seeding = bool(payload.get("keep_seeding"))
|
||||
# Note: keep_seeding lets automation move completed data to another path and force the torrent back into seeding.
|
||||
if not path:
|
||||
raise ValueError("Missing path")
|
||||
results = []
|
||||
if move_data:
|
||||
_rt_execute_allow_timeout(c, "execute.throw", "mkdir", "-p", path)
|
||||
for h in torrent_hashes:
|
||||
item = {"hash": h, "path": path, "move_data": move_data, "keep_seeding": keep_seeding}
|
||||
try:
|
||||
was_state = int(c.call("d.state", h) or 0)
|
||||
except Exception:
|
||||
was_state = 0
|
||||
try:
|
||||
was_active = int(c.call("d.is_active", h) or 0)
|
||||
except Exception:
|
||||
was_active = was_state
|
||||
if move_data:
|
||||
src = _remote_clean_path(_torrent_data_path(c, h))
|
||||
if not src:
|
||||
raise ValueError(f"Cannot determine source path for {h}")
|
||||
dst = _remote_join(path, posixpath.basename(src.rstrip("/")))
|
||||
if src != dst:
|
||||
try:
|
||||
c.call("d.stop", h)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
c.call("d.close", h)
|
||||
except Exception:
|
||||
pass
|
||||
_run_remote_move(c, src, dst)
|
||||
item["moved_from"] = src
|
||||
item["moved_to"] = dst
|
||||
else:
|
||||
item["skipped"] = "source and destination are the same"
|
||||
c.call("d.directory.set", h, path)
|
||||
if recheck:
|
||||
try:
|
||||
c.call("d.check_hash", h)
|
||||
except Exception as exc:
|
||||
item["recheck_error"] = str(exc)
|
||||
if keep_seeding or was_state or was_active:
|
||||
try:
|
||||
c.call("d.start", h)
|
||||
item["started_after_move"] = True
|
||||
except Exception as exc:
|
||||
item["start_error"] = str(exc)
|
||||
else:
|
||||
c.call("d.directory.set", h, path)
|
||||
if keep_seeding:
|
||||
try:
|
||||
c.call("d.start", h)
|
||||
item["started_after_path_change"] = True
|
||||
except Exception as exc:
|
||||
item["start_error"] = str(exc)
|
||||
results.append(item)
|
||||
return {"ok": True, "count": len(torrent_hashes), "move_data": move_data, "keep_seeding": keep_seeding, "results": results}
|
||||
|
||||
def action(profile: dict, torrent_hashes: list[str], name: str, payload: dict | None = None) -> dict:
|
||||
payload = payload or {}
|
||||
c = client_for(profile)
|
||||
@@ -1361,8 +1294,61 @@ def action(profile: dict, torrent_hashes: list[str], name: str, payload: dict |
|
||||
c.call("d.custom.set", h, "py_ratio_group", group)
|
||||
return {"ok": True, "count": len(torrent_hashes), "ratio_group": group}
|
||||
if name == "move":
|
||||
# Note: Main move delegates to the shared helper used by automations.
|
||||
return move_torrents(profile, torrent_hashes, payload)
|
||||
path = _remote_clean_path(payload.get("path") or "")
|
||||
move_data = bool(payload.get("move_data"))
|
||||
recheck = bool(payload.get("recheck", move_data))
|
||||
keep_seeding = bool(payload.get("keep_seeding"))
|
||||
# Note: Automations can force seeding after a physical move even if the torrent was not active before.
|
||||
if not path:
|
||||
raise ValueError("Missing path")
|
||||
results = []
|
||||
if move_data:
|
||||
_rt_execute_allow_timeout(c, "execute.throw", "mkdir", "-p", path)
|
||||
for h in torrent_hashes:
|
||||
item = {"hash": h, "path": path, "move_data": move_data, "keep_seeding": keep_seeding}
|
||||
try:
|
||||
was_state = int(c.call("d.state", h) or 0)
|
||||
except Exception:
|
||||
was_state = 0
|
||||
try:
|
||||
was_active = int(c.call("d.is_active", h) or 0)
|
||||
except Exception:
|
||||
was_active = was_state
|
||||
if move_data:
|
||||
src = _remote_clean_path(_torrent_data_path(c, h))
|
||||
if not src:
|
||||
raise ValueError(f"Cannot determine source path for {h}")
|
||||
dst = _remote_join(path, posixpath.basename(src.rstrip("/")))
|
||||
if src != dst:
|
||||
try:
|
||||
c.call("d.stop", h)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
c.call("d.close", h)
|
||||
except Exception:
|
||||
pass
|
||||
_run_remote_move(c, src, dst)
|
||||
item["moved_from"] = src
|
||||
item["moved_to"] = dst
|
||||
else:
|
||||
item["skipped"] = "source and destination are the same"
|
||||
c.call("d.directory.set", h, path)
|
||||
if recheck:
|
||||
try:
|
||||
c.call("d.check_hash", h)
|
||||
except Exception as exc:
|
||||
item["recheck_error"] = str(exc)
|
||||
if keep_seeding or was_state or was_active:
|
||||
try:
|
||||
c.call("d.start", h)
|
||||
item["started_after_move"] = True
|
||||
except Exception as exc:
|
||||
item["start_after_move_error"] = str(exc)
|
||||
else:
|
||||
c.call("d.directory.set", h, path)
|
||||
results.append(item)
|
||||
return {"ok": True, "count": len(torrent_hashes), "move_data": move_data, "keep_seeding": keep_seeding, "results": results}
|
||||
if name == "pause":
|
||||
# Note: The app pause action is now a pure d.pause so later resume works without stop/start.
|
||||
results = [pause_hash(c, h) for h in torrent_hashes]
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1238,36 +1238,60 @@ body.mobile-mode .mobile-card {
|
||||
color: var(--bs-primary-text-emphasis);
|
||||
}
|
||||
|
||||
.automation-form-grid {
|
||||
.automation-shell {
|
||||
display: grid;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
.automation-main-card {
|
||||
padding: 0.75rem;
|
||||
border: 1px solid var(--bs-border-color);
|
||||
border-radius: 0.75rem;
|
||||
background: var(--bs-body-bg);
|
||||
}
|
||||
.automation-card-title {
|
||||
margin-bottom: 0.5rem;
|
||||
font-weight: 700;
|
||||
}
|
||||
.automation-rule-grid,
|
||||
.automation-builder-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(4, minmax(160px, 1fr));
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.auto-move-option {
|
||||
gap: 0.45rem;
|
||||
.automation-enabled,
|
||||
.automation-negate {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
|
||||
.automation-builder-list {
|
||||
display: grid;
|
||||
grid-column: 1 / -1;
|
||||
gap: 0.4rem;
|
||||
}
|
||||
|
||||
.automation-chip {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 0.5rem;
|
||||
padding: 0.4rem 0.55rem;
|
||||
padding: 0.45rem 0.6rem 0.45rem 2.5rem;
|
||||
border: 1px solid var(--bs-border-color);
|
||||
border-radius: 0.55rem;
|
||||
background: var(--bs-secondary-bg);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
.automation-path-input {
|
||||
grid-column: span 2;
|
||||
}
|
||||
.automation-chip-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.45rem;
|
||||
}
|
||||
.automation-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.35rem;
|
||||
max-width: 100%;
|
||||
padding: 0.25rem 0.5rem;
|
||||
border: 1px solid var(--bs-border-color);
|
||||
border-radius: 999px;
|
||||
background: var(--bs-tertiary-bg);
|
||||
font-size: 0.82rem;
|
||||
}
|
||||
.automation-actions,
|
||||
.automation-row-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.4rem;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.automation-row {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -1279,10 +1303,49 @@ body.mobile-mode .mobile-card {
|
||||
margin-bottom: 0.45rem;
|
||||
background: var(--bs-body-bg);
|
||||
}
|
||||
.automation-row-main {
|
||||
min-width: 0;
|
||||
}
|
||||
.automation-action-pill {
|
||||
display: inline-flex;
|
||||
max-width: 100%;
|
||||
margin: 0.1rem;
|
||||
padding: 0.15rem 0.4rem;
|
||||
border-radius: 999px;
|
||||
background: var(--bs-secondary-bg);
|
||||
font-size: 0.78rem;
|
||||
white-space: normal;
|
||||
}
|
||||
.automation-history-details {
|
||||
max-width: min(620px, 60vw);
|
||||
}
|
||||
.automation-history-details summary {
|
||||
cursor: pointer;
|
||||
list-style-position: inside;
|
||||
}
|
||||
.automation-history-details pre,
|
||||
.automation-history-raw {
|
||||
max-width: 100%;
|
||||
max-height: 220px;
|
||||
margin: 0.35rem 0 0;
|
||||
padding: 0.5rem;
|
||||
overflow: auto;
|
||||
border: 1px solid var(--bs-border-color);
|
||||
border-radius: 0.5rem;
|
||||
background: var(--bs-tertiary-bg);
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
@media (max-width: 900px) {
|
||||
.automation-form-grid {
|
||||
.automation-rule-grid,
|
||||
.automation-builder-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.automation-path-input,
|
||||
.automation-history-details {
|
||||
grid-column: auto;
|
||||
max-width: 100%;
|
||||
}
|
||||
}
|
||||
.disk-status {
|
||||
display: inline-flex;
|
||||
|
||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user