automatyzacje-comit5

This commit is contained in:
Mateusz Gruszczyński
2026-05-07 09:16:52 +02:00
parent 7a4bda98a2
commit 85e1e6adcd

View File

@@ -286,27 +286,36 @@ def check(profile: dict | None = None, user_id: int | None = None, force: bool =
user_id = user_id or default_user_id(); profile_id = int(profile['id'])
rules = [r for r in list_rules(profile_id, user_id) if force or int(r.get('enabled') or 0)]
if not rules: return {'ok': True, 'checked': 0, 'applied': [], 'batches': [], 'rules': 0}
torrents = rtorrent.list_torrents(profile); c = rtorrent.client_for(profile); applied = []; batches = []; now = utcnow()
torrents = rtorrent.list_torrents(profile); applied = []; batches = []; now = utcnow()
planned: list[dict[str, Any]] = []
with connect() as conn:
for rule in rules:
# Note: Automations now execute as one batch per rule, not as one independent action per torrent.
# Note: This pass only matches rules and updates condition timers; job creation is intentionally delayed until after this DB transaction commits.
if not force and not _cooldown_ok(conn, rule, profile_id):
continue
matched = [t for t in torrents if _conditions_match(conn, rule, profile_id, t)]
if not matched:
continue
hashes = [str(t.get('hash') or '') for t in matched if str(t.get('hash') or '')]
if not hashes:
continue
if hashes:
planned.append({'rule': rule, 'matched': matched, 'hashes': hashes})
for item in planned:
rule = item['rule']
matched = item['matched']
hashes = item['hashes']
# Note: Automation jobs are enqueued outside the rule-state transaction, preventing SQLite self-locks when enqueue() writes to jobs.
try:
actions = _apply_effects_bulk(c, profile, matched, rule.get('effects') or [], rule, user_id)
actions = _apply_effects_bulk(None, profile, matched, rule.get('effects') or [], rule, user_id)
except Exception as exc:
actions = [{'error': str(exc), 'count': len(hashes), 'target_hashes': hashes}]
changed_hashes = sorted({h for a in actions for h in (a.get('target_hashes') or [])})
if not actions or not changed_hashes:
# Note: Matching torrents with no real action are not logged and do not restart the cooldown.
continue
history_actions = [{k: v for k, v in a.items() if k != 'target_hashes'} for a in actions]
matched_by_hash = {str(t.get('hash') or ''): t for t in matched}
with connect() as conn:
# Note: State/history writes happen after enqueue succeeds, so failed job creation does not create misleading automation history.
for h in changed_hashes:
t = matched_by_hash.get(h, {})
conn.execute('INSERT INTO automation_rule_state(rule_id,profile_id,torrent_hash,last_matched_at,last_applied_at,updated_at) VALUES(?,?,?,?,?,?) ON CONFLICT(rule_id,profile_id,torrent_hash) DO UPDATE SET last_matched_at=excluded.last_matched_at, last_applied_at=excluded.last_applied_at, updated_at=excluded.updated_at', (rule['id'], profile_id, h, now, now, now))
@@ -314,7 +323,6 @@ def check(profile: dict | None = None, user_id: int | None = None, force: bool =
_mark_rule_cooldown(conn, rule, profile_id, now)
torrent_name = str(matched_by_hash.get(changed_hashes[0], {}).get('name') or '') if len(changed_hashes) == 1 else f'{len(changed_hashes)} torrents'
torrent_hash = changed_hashes[0] if len(changed_hashes) == 1 else f'batch:{rule["id"]}:{now}'
history_actions = [{k: v for k, v in a.items() if k != 'target_hashes'} for a in actions]
conn.execute('INSERT INTO automation_history(user_id,profile_id,rule_id,torrent_hash,torrent_name,rule_name,actions_json,created_at) VALUES(?,?,?,?,?,?,?,?)', (user_id, profile_id, rule['id'], torrent_hash, torrent_name, str(rule.get('name') or ''), json.dumps(history_actions), now))
batches.append({'rule_id': rule['id'], 'rule_name': rule.get('name'), 'count': len(changed_hashes), 'actions': history_actions})
return {'ok': True, 'checked': len(torrents), 'rules': len(rules), 'applied': applied, 'batches': batches}