labels and automatizations
This commit is contained in:
@@ -88,10 +88,26 @@ def delete_rule(rule_id: int, profile_id: int, user_id: int | None = None) -> No
|
||||
conn.execute('DELETE FROM automation_rule_state WHERE rule_id=? AND profile_id=?', (rule_id, profile_id))
|
||||
|
||||
|
||||
def list_history(profile_id: int, user_id: int | None = None, limit: int = 30) -> list[dict[str, Any]]:
|
||||
def list_history(profile_id: int, user_id: int | None = None, limit: int = 50) -> list[dict[str, Any]]:
|
||||
user_id = user_id or default_user_id()
|
||||
with connect() as conn:
|
||||
return conn.execute('SELECT * FROM automation_history WHERE user_id=? AND profile_id=? ORDER BY created_at DESC LIMIT ?', (user_id, profile_id, max(1, min(int(limit or 30), 100)))).fetchall()
|
||||
return conn.execute('SELECT * FROM automation_history WHERE user_id=? AND profile_id=? ORDER BY created_at DESC LIMIT ?', (user_id, profile_id, max(1, min(int(limit or 50), 200)))).fetchall()
|
||||
|
||||
|
||||
def delete_history_item(history_id: int, profile_id: int, user_id: int | None = None) -> int:
|
||||
# Note: Allow removing a single automation history card from the UI without touching rules.
|
||||
user_id = user_id or default_user_id()
|
||||
with connect() as conn:
|
||||
cur = conn.execute('DELETE FROM automation_history WHERE id=? AND user_id=? AND profile_id=?', (int(history_id), user_id, profile_id))
|
||||
return int(cur.rowcount or 0)
|
||||
|
||||
|
||||
def clear_history(profile_id: int, user_id: int | None = None) -> int:
|
||||
# Note: History cleanup is separate from deleting automation rules.
|
||||
user_id = user_id or default_user_id()
|
||||
with connect() as conn:
|
||||
cur = conn.execute('DELETE FROM automation_history WHERE user_id=? AND profile_id=?', (user_id, profile_id))
|
||||
return int(cur.rowcount or 0)
|
||||
|
||||
|
||||
def _condition_true(t: dict[str, Any], cond: dict[str, Any]) -> bool:
|
||||
@@ -113,8 +129,8 @@ def _conditions_match(conn, rule: dict[str, Any], profile_id: int, t: dict[str,
|
||||
for cond in rule.get('conditions') or []:
|
||||
raw_ok = _condition_true(t, cond)
|
||||
negated = bool(cond.get('negate'))
|
||||
# Note: Negation is applied in the backend, so UI and API only store the condition flag.
|
||||
ok = (not raw_ok) if negated else raw_ok
|
||||
# Note: Conditions can now be negated in automation rules. Timed no-seeds keeps its old delayed behavior only for the positive condition, so old rules do not change.
|
||||
if cond.get('type') == 'no_seeds' and int(cond.get('minutes') or 0) > 0 and not negated:
|
||||
row = conn.execute('SELECT condition_since_at FROM automation_rule_state WHERE rule_id=? AND profile_id=? AND torrent_hash=?', (rule['id'], profile_id, h)).fetchone()
|
||||
if ok:
|
||||
@@ -128,7 +144,8 @@ def _conditions_match(conn, rule: dict[str, Any], profile_id: int, t: dict[str,
|
||||
return immediate_ok and delayed_ok
|
||||
|
||||
|
||||
def _cooldown_ok(conn, rule: dict[str, Any], profile_id: int, torrent_hash: str = '__rule__') -> bool:
|
||||
def _cooldown_ok(conn, rule: dict[str, Any], profile_id: int, torrent_hash: str = '*') -> bool:
|
||||
# Note: Automation cooldown is rule-wide for batch execution; '*' stores the last run for the whole rule.
|
||||
cooldown = int(rule.get('cooldown_minutes') or 0)
|
||||
if cooldown <= 0: return True
|
||||
row = conn.execute('SELECT last_applied_at FROM automation_rule_state WHERE rule_id=? AND profile_id=? AND torrent_hash=?', (rule['id'], profile_id, torrent_hash)).fetchone()
|
||||
@@ -136,12 +153,12 @@ def _cooldown_ok(conn, rule: dict[str, Any], profile_id: int, torrent_hash: str
|
||||
return _now_ts() - _ts(row['last_applied_at']) >= cooldown * 60
|
||||
|
||||
|
||||
def _mark_rule_cooldown(conn, rule: dict[str, Any], profile_id: int, now: str) -> None:
|
||||
# Note: Cooldown is rule-level, so one batch execution blocks the whole automation until the cooldown expires.
|
||||
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_applied_at,updated_at) VALUES(?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, '__rule__', now, now))
|
||||
def _touch_rule_cooldown(conn, rule: dict[str, Any], profile_id: int, now: str) -> None:
|
||||
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_matched_at,last_applied_at,updated_at) VALUES(?,?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_matched_at=excluded.last_matched_at, last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, '*', now, now, now))
|
||||
|
||||
|
||||
def _apply_effects_bulk(c: Any, profile: dict[str, Any], torrents: list[dict[str, Any]], effects: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
def _apply_effects_batch(c: Any, profile: dict[str, Any], torrents: list[dict[str, Any]], effects: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
# Note: Rules now execute actions once for all matching torrents instead of calling move/check/start one item at a time.
|
||||
hashes = [str(t.get('hash') or '') for t in torrents if str(t.get('hash') or '')]
|
||||
labels_by_hash = {str(t.get('hash') or ''): _label_names(t.get('label')) for t in torrents}
|
||||
applied: list[dict[str, Any]] = []
|
||||
@@ -150,37 +167,36 @@ def _apply_effects_bulk(c: Any, profile: dict[str, Any], torrents: list[dict[str
|
||||
typ = str(eff.get('type') or '')
|
||||
if typ == 'move':
|
||||
path = str(eff.get('path') or '').strip() or rtorrent.default_download_path(profile)
|
||||
payload = {
|
||||
'path': path,
|
||||
'move_data': bool(eff.get('move_data')),
|
||||
'recheck': bool(eff.get('recheck', eff.get('move_data'))),
|
||||
'keep_seeding': bool(eff.get('keep_seeding')),
|
||||
}
|
||||
result = rtorrent.action(profile, hashes, 'move', payload)
|
||||
applied.append({'type': 'move', 'path': path, 'count': len(hashes), 'move_data': payload['move_data'], 'recheck': payload['recheck'], 'keep_seeding': payload['keep_seeding'], 'result': result})
|
||||
move_payload = {'path': path, 'move_data': bool(eff.get('move_data')), 'recheck': bool(eff.get('recheck', eff.get('move_data'))), 'keep_seeding': bool(eff.get('keep_seeding'))}
|
||||
result = rtorrent.move_torrents(profile, hashes, move_payload) if path else None
|
||||
if path: applied.append({'type': 'move', 'path': path, 'count': len(hashes), 'move_data': bool(eff.get('move_data')), 'recheck': bool(move_payload['recheck']), 'keep_seeding': bool(eff.get('keep_seeding')), 'result': result})
|
||||
elif typ == 'add_label':
|
||||
label = str(eff.get('label') or '').strip()
|
||||
changed = 0
|
||||
if label:
|
||||
for h in hashes:
|
||||
labels = labels_by_hash.setdefault(h, [])
|
||||
labels = labels_by_hash.get(h, [])
|
||||
if label not in labels:
|
||||
labels.append(label); c.call('d.custom1.set', h, _label_value(labels))
|
||||
applied.append({'type': 'add_label', 'label': label, 'count': len(hashes)})
|
||||
labels.append(label); labels_by_hash[h] = labels; c.call('d.custom1.set', h, _label_value(labels)); changed += 1
|
||||
applied.append({'type': 'add_label', 'label': label, 'count': changed})
|
||||
elif typ == 'remove_label':
|
||||
label = str(eff.get('label') or '').strip()
|
||||
label = str(eff.get('label') or '').strip(); changed = 0
|
||||
if label:
|
||||
for h in hashes:
|
||||
labels = [x for x in labels_by_hash.get(h, []) if x != label]
|
||||
labels_by_hash[h] = labels; c.call('d.custom1.set', h, _label_value(labels))
|
||||
applied.append({'type': 'remove_label', 'label': label, 'count': len(hashes)})
|
||||
labels = labels_by_hash.get(h, [])
|
||||
new_labels = [x for x in labels if x != label]
|
||||
if new_labels != labels:
|
||||
labels_by_hash[h] = new_labels; c.call('d.custom1.set', h, _label_value(new_labels)); changed += 1
|
||||
applied.append({'type': 'remove_label', 'label': label, 'count': changed})
|
||||
elif typ == 'set_labels':
|
||||
value = _label_value(_label_names(eff.get('labels')))
|
||||
value = _label_value(_label_names(eff.get('labels'))); new_labels = _label_names(value)
|
||||
for h in hashes:
|
||||
labels_by_hash[h] = _label_names(value); c.call('d.custom1.set', h, value)
|
||||
labels_by_hash[h] = list(new_labels); c.call('d.custom1.set', h, value)
|
||||
applied.append({'type': 'set_labels', 'labels': value, 'count': len(hashes)})
|
||||
elif typ in {'pause', 'stop', 'start', 'resume', 'recheck'}:
|
||||
result = rtorrent.action(profile, hashes, typ, {})
|
||||
applied.append({'type': typ, 'count': len(hashes), 'result': result})
|
||||
method = {'pause':'d.pause','stop':'d.stop','start':'d.start','resume':'d.resume','recheck':'d.check_hash'}[typ]
|
||||
for h in hashes: c.call(method, h)
|
||||
applied.append({'type': typ, 'count': len(hashes)})
|
||||
return applied
|
||||
|
||||
|
||||
@@ -189,30 +205,23 @@ def check(profile: dict | None = None, user_id: int | None = None, force: bool =
|
||||
if not profile: return {'ok': False, 'error': 'No active rTorrent profile'}
|
||||
user_id = user_id or default_user_id(); profile_id = int(profile['id'])
|
||||
rules = [r for r in list_rules(profile_id, user_id) if force or int(r.get('enabled') or 0)]
|
||||
if not rules: return {'ok': True, 'checked': 0, 'applied': [], 'batches': [], 'rules': 0}
|
||||
torrents = rtorrent.list_torrents(profile); c = rtorrent.client_for(profile); applied = []; batches = []; now = utcnow()
|
||||
if not rules: return {'ok': True, 'checked': 0, 'applied': [], 'rules': 0}
|
||||
torrents = rtorrent.list_torrents(profile); c = rtorrent.client_for(profile); applied = []; now = utcnow()
|
||||
with connect() as conn:
|
||||
for rule in rules:
|
||||
# Note: Automations now execute as one batch per rule, not as one independent action per torrent.
|
||||
if not force and not _cooldown_ok(conn, rule, profile_id):
|
||||
continue
|
||||
matched = [t for t in torrents if _conditions_match(conn, rule, profile_id, t)]
|
||||
if not matched:
|
||||
continue
|
||||
if not matched: continue
|
||||
if not force and not _cooldown_ok(conn, rule, profile_id, '*'): continue
|
||||
hashes = [str(t.get('hash') or '') for t in matched if str(t.get('hash') or '')]
|
||||
if not hashes:
|
||||
continue
|
||||
try:
|
||||
actions = _apply_effects_bulk(c, profile, matched, rule.get('effects') or [])
|
||||
except Exception as exc:
|
||||
actions = [{'error': str(exc), 'count': len(hashes)}]
|
||||
for t in matched:
|
||||
h = str(t.get('hash') or '')
|
||||
names = [str(t.get('name') or '') for t in matched]
|
||||
try: actions = _apply_effects_batch(c, profile, matched, rule.get('effects') or [])
|
||||
except Exception as exc: actions = [{'error': str(exc), 'count': len(hashes)}]
|
||||
_touch_rule_cooldown(conn, rule, profile_id, now)
|
||||
for h in hashes:
|
||||
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_matched_at,last_applied_at,updated_at) VALUES(?,?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_matched_at=excluded.last_matched_at, last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, h, now, now, now))
|
||||
applied.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'hash': h, 'name': t.get('name'), 'actions': [{'type': a.get('type', 'error'), 'count': a.get('count', len(hashes))} for a in actions]})
|
||||
_mark_rule_cooldown(conn, rule, profile_id, now)
|
||||
torrent_name = str(matched[0].get('name') or '') if len(matched) == 1 else f'{len(matched)} torrents'
|
||||
torrent_hash = hashes[0] if len(hashes) == 1 else f'batch:{rule["id"]}:{now}'
|
||||
conn.execute('INSERT INTO automation_history(user_id,profile_id,rule_id,torrent_hash,torrent_name,rule_name,actions_json,created_at) VALUES(?,?,?,?,?,?,?,?)', (user_id, profile_id, rule['id'], torrent_hash, torrent_name, str(rule.get('name') or ''), json.dumps(actions), now))
|
||||
batches.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'count': len(hashes), 'actions': actions})
|
||||
return {'ok': True, 'checked': len(torrents), 'rules': len(rules), 'applied': applied, 'batches': batches}
|
||||
history_payload = {'mode': 'batch', 'count': len(hashes), 'hashes': hashes, 'names': names[:50], 'actions': actions}
|
||||
torrent_name = names[0] if len(names) == 1 else f'{len(hashes)} torrents'
|
||||
torrent_hash = hashes[0] if len(hashes) == 1 else ','.join(hashes[:20])
|
||||
conn.execute('INSERT INTO automation_history(user_id,profile_id,rule_id,torrent_hash,torrent_name,rule_name,actions_json,created_at) VALUES(?,?,?,?,?,?,?,?)', (user_id, profile_id, rule['id'], torrent_hash, torrent_name, str(rule.get('name') or ''), json.dumps(history_payload), now))
|
||||
applied.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'count': len(hashes), 'hashes': hashes, 'names': names[:20], 'actions': actions})
|
||||
return {'ok': True, 'checked': len(torrents), 'rules': len(rules), 'applied': applied}
|
||||
|
||||
Reference in New Issue
Block a user