first commit

This commit is contained in:
Mateusz Gruszczyński
2026-04-12 21:26:12 +02:00
commit ff7dbcb4e4
123 changed files with 27749 additions and 0 deletions

View File

@@ -0,0 +1,321 @@
import difflib
from datetime import datetime, timedelta, timezone
from pathlib import Path
from fastapi import HTTPException
from sqlalchemy.orm import Session, joinedload
from app.models.backup import Backup
from app.models.router import Router
from app.models.user import User
from app.services.file_service import compute_checksum, ensure_data_dir
from app.services.log_service import log_service
from app.services.notification_service import notification_service
from app.services.router_service import router_service
from app.services.settings_service import settings_service
class BackupService:
def _router_for_user(self, db: Session, user: User, router_id: int) -> Router:
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == user.id).first()
if not router:
raise HTTPException(status_code=404, detail='Router not found')
return router
def _serialize_backup(self, backup: Backup):
file_path = Path(backup.file_path)
return {
'id': backup.id,
'router_id': backup.router_id,
'router_name': backup.router.name if backup.router else None,
'file_path': backup.file_path,
'file_name': backup.file_name,
'backup_type': backup.backup_type,
'checksum': backup.checksum,
'file_size': file_path.stat().st_size if file_path.exists() else None,
'created_at': backup.created_at,
}
def _build_structured_diff(self, left_lines: list[str], right_lines: list[str]):
matcher = difflib.SequenceMatcher(a=left_lines, b=right_lines)
rows = []
stats = {'added': 0, 'removed': 0, 'modified': 0, 'context': 0}
left_number = 1
right_number = 1
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
if tag == 'equal':
for left_text, right_text in zip(left_lines[i1:i2], right_lines[j1:j2]):
rows.append(
{
'type': 'context',
'left_number': left_number,
'right_number': right_number,
'left_text': left_text,
'right_text': right_text,
}
)
stats['context'] += 1
left_number += 1
right_number += 1
continue
if tag == 'delete':
for left_text in left_lines[i1:i2]:
rows.append(
{
'type': 'removed',
'left_number': left_number,
'right_number': None,
'left_text': left_text,
'right_text': '',
}
)
stats['removed'] += 1
left_number += 1
continue
if tag == 'insert':
for right_text in right_lines[j1:j2]:
rows.append(
{
'type': 'added',
'left_number': None,
'right_number': right_number,
'left_text': '',
'right_text': right_text,
}
)
stats['added'] += 1
right_number += 1
continue
block_left = left_lines[i1:i2]
block_right = right_lines[j1:j2]
block_size = max(len(block_left), len(block_right))
for index in range(block_size):
left_text = block_left[index] if index < len(block_left) else ''
right_text = block_right[index] if index < len(block_right) else ''
row_type = 'modified'
if left_text and not right_text:
row_type = 'removed'
stats['removed'] += 1
elif right_text and not left_text:
row_type = 'added'
stats['added'] += 1
else:
stats['modified'] += 1
rows.append(
{
'type': row_type,
'left_number': left_number if left_text else None,
'right_number': right_number if right_text else None,
'left_text': left_text,
'right_text': right_text,
}
)
if left_text:
left_number += 1
if right_text:
right_number += 1
return rows, stats
def get_backup_for_user(self, db: Session, user: User, backup_id: int) -> Backup:
backup = (
db.query(Backup)
.options(joinedload(Backup.router))
.join(Router)
.filter(Backup.id == backup_id, Router.owner_id == user.id)
.first()
)
if not backup:
raise HTTPException(status_code=404, detail='Backup not found')
return backup
def list_backups(
self,
db: Session,
user: User,
search: str | None = None,
backup_type: str | None = None,
router_id: int | None = None,
sort_by: str = 'created_at',
order: str = 'desc',
):
query = db.query(Backup).options(joinedload(Backup.router)).join(Router).filter(Router.owner_id == user.id)
if search:
query = query.filter(
Backup.file_name.ilike(f'%{search}%')
| Router.name.ilike(f'%{search}%')
| Router.host.ilike(f'%{search}%')
)
if backup_type:
query = query.filter(Backup.backup_type == backup_type)
if router_id:
query = query.filter(Backup.router_id == router_id)
sort_map = {
'created_at': Backup.created_at,
'file_name': Backup.file_name,
'backup_type': Backup.backup_type,
'router_name': Router.name,
}
sort_column = sort_map.get(sort_by, Backup.created_at)
query = query.order_by(sort_column.asc() if order == 'asc' else sort_column.desc())
return [self._serialize_backup(backup) for backup in query.all()]
def list_router_backups(self, db: Session, user: User, router_id: int):
router = self._router_for_user(db, user, router_id)
backups = (
db.query(Backup)
.options(joinedload(Backup.router))
.filter(Backup.router_id == router.id)
.order_by(Backup.created_at.desc())
.all()
)
return [self._serialize_backup(backup) for backup in backups]
def export_router(self, db: Session, user: User, router_id: int) -> Backup:
router = self._router_for_user(db, user, router_id)
settings = settings_service.get_or_create(db)
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
name = f'{router.name}_{router.id}_{stamp}.rsc'
file_path = ensure_data_dir() / name
try:
content = router_service.export(router, settings.global_ssh_key)
file_path.write_text(content, encoding='utf-8')
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='export')
db.add(backup)
db.commit()
db.refresh(backup)
log_service.add(db, f'Export OK for router {router.name}')
notification_service.notify(settings, f'Export {router.name} OK', True)
return backup
except Exception as exc:
notification_service.notify(settings, f'Export {router.name} FAIL: {exc}', False)
log_service.add(db, f'Export FAILED for router {router.name}: {exc}')
raise HTTPException(status_code=500, detail=str(exc)) from exc
def binary_backup(self, db: Session, user: User, router_id: int) -> Backup:
router = self._router_for_user(db, user, router_id)
settings = settings_service.get_or_create(db)
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
base_name = f'{router.name}_{router.id}_{stamp}'
name = f'{base_name}.backup'
file_path = ensure_data_dir() / name
try:
router_service.binary_backup(router, base_name, str(file_path), settings.global_ssh_key)
checksum = compute_checksum(str(file_path))
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='binary', checksum=checksum)
db.add(backup)
db.commit()
db.refresh(backup)
log_service.add(db, f'Binary backup OK for router {router.name}')
notification_service.notify(settings, f'Backup {router.name} OK', True)
return backup
except Exception as exc:
notification_service.notify(settings, f'Backup {router.name} FAIL: {exc}', False)
log_service.add(db, f'Binary backup FAILED for router {router.name}: {exc}')
raise HTTPException(status_code=500, detail=str(exc)) from exc
def upload_backup_to_router(self, db: Session, user: User, router_id: int, backup_id: int):
router = self._router_for_user(db, user, router_id)
backup = self.get_backup_for_user(db, user, backup_id)
if backup.backup_type != 'binary':
raise HTTPException(status_code=400, detail='Only binary backups can be uploaded')
checksum = compute_checksum(backup.file_path)
if backup.checksum and checksum != backup.checksum:
raise HTTPException(status_code=400, detail='Checksum mismatch')
settings = settings_service.get_or_create(db)
router_service.upload_backup(router, backup.file_path, settings.global_ssh_key)
log_service.add(db, f'Upload backup OK for router {router.name}')
def delete_backup(self, db: Session, user: User, backup_id: int, commit: bool = True):
backup = self.get_backup_for_user(db, user, backup_id)
path = Path(backup.file_path)
if path.exists():
path.unlink()
db.delete(backup)
if commit:
db.commit()
def diff_backups(self, db: Session, user: User, left_id: int, right_id: int):
left = self.get_backup_for_user(db, user, left_id)
right = self.get_backup_for_user(db, user, right_id)
if left.backup_type != 'export' or right.backup_type != 'export':
raise HTTPException(status_code=400, detail='Diff is supported only for export backups')
left_lines = Path(left.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
right_lines = Path(right.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
diff_lines = list(
difflib.unified_diff(left_lines, right_lines, fromfile=left.file_name, tofile=right.file_name, lineterm='')
)
diff_html = difflib.HtmlDiff(wrapcolumn=120).make_file(
left_lines,
right_lines,
fromdesc=left.file_name,
todesc=right.file_name,
context=True,
numlines=2,
)
structured_lines, stats = self._build_structured_diff(left_lines, right_lines)
return {
'left_backup_id': left_id,
'right_backup_id': right_id,
'left_file_name': left.file_name,
'right_file_name': right.file_name,
'diff_text': '\n'.join(diff_lines),
'diff_html': diff_html,
'stats': stats,
'lines': structured_lines,
}
def email_backup(self, db: Session, user: User, backup_id: int):
backup = self.get_backup_for_user(db, user, backup_id)
settings = settings_service.get_or_create(db)
noun = 'Export' if backup.backup_type == 'export' else 'Backup'
subject = f'RouterOS {noun}: {backup.file_name}'
body = f'Sending {backup.file_name} from router {backup.router.name}.'
notification_service.send_email(settings, subject, body, backup.file_path)
log_service.add(db, f'Email sent for backup {backup.file_name}')
def export_all(self, db: Session, user: User):
routers = db.query(Router).filter(Router.owner_id == user.id).all()
result = []
for router in routers:
try:
backup = self.export_router(db, user, router.id)
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
except Exception as exc:
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
return result
def binary_all(self, db: Session, user: User):
routers = db.query(Router).filter(Router.owner_id == user.id).all()
result = []
for router in routers:
try:
backup = self.binary_backup(db, user, router.id)
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
except Exception as exc:
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
return result
def cleanup_old_backups(self, db: Session):
settings = settings_service.get_or_create(db)
cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=settings.backup_retention_days)
old_backups = db.query(Backup).filter(Backup.created_at < cutoff).all()
deleted_count = 0
for backup in old_backups:
path = Path(backup.file_path)
if path.exists():
path.unlink()
db.delete(backup)
deleted_count += 1
db.commit()
log_service.add(db, f'Retention cleanup removed {deleted_count} backups older than {settings.backup_retention_days} days')
return deleted_count
backup_service = BackupService()

View File

@@ -0,0 +1,38 @@
import hashlib
import os
import shutil
from pathlib import Path
from app.core.config import settings
from app.schemas.dashboard import StorageStats
def compute_checksum(file_path: str) -> str:
sha256 = hashlib.sha256()
with open(file_path, "rb") as handle:
for chunk in iter(lambda: handle.read(4096), b""):
sha256.update(chunk)
return sha256.hexdigest()
def ensure_data_dir() -> Path:
return settings.data_path
def get_folder_size() -> int:
total = 0
for dirpath, _, filenames in os.walk(ensure_data_dir()):
for filename in filenames:
try:
total += os.path.getsize(Path(dirpath) / filename)
except OSError:
pass
return total
def get_storage_stats() -> StorageStats:
ensure_data_dir()
disk = shutil.disk_usage(ensure_data_dir())
folder_used = get_folder_size()
usage_percent = (folder_used / disk.total) * 100 if disk.total else 0
return StorageStats(total=disk.total, used=disk.used, free=disk.free, folder_used=folder_used, usage_percent=usage_percent)

View File

@@ -0,0 +1,24 @@
from datetime import datetime, timedelta, timezone
from sqlalchemy.orm import Session
from app.models.log import OperationLog
class LogService:
def add(self, db: Session, message: str, commit: bool = True) -> None:
db.add(OperationLog(message=message))
if commit:
db.commit()
def delete_older_than(self, db: Session, days: int) -> int:
cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=days)
logs = db.query(OperationLog).filter(OperationLog.timestamp < cutoff).all()
count = len(logs)
for log in logs:
db.delete(log)
db.commit()
return count
log_service = LogService()

View File

@@ -0,0 +1,78 @@
import smtplib
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from pathlib import Path
import requests
from app.core.config import settings as app_settings
from app.models.settings import GlobalSettings
class NotificationService:
def send_pushover(self, token: str, user_key: str, message: str, title: str = "RouterOS Backup") -> bool:
response = requests.post(
"https://api.pushover.net/1/messages.json",
data={"token": token, "user": user_key, "message": message, "title": title},
timeout=15,
)
return response.ok
def send_email(self, settings: GlobalSettings, subject: str, body: str, attachment_path: str | None = None):
if not (settings.smtp_host and settings.smtp_login and settings.smtp_password):
raise ValueError("SMTP is not configured")
recipient = (settings.recipient_email or settings.smtp_login or "").strip()
if not recipient:
raise ValueError("Recipient email is empty")
msg = MIMEMultipart()
msg["From"] = settings.smtp_login
msg["To"] = recipient
msg["Subject"] = subject
msg.attach(MIMEText(body, "plain", "utf-8"))
if attachment_path:
attachment = Path(attachment_path)
with attachment.open("rb") as handle:
part = MIMEBase("application", "octet-stream")
part.set_payload(handle.read())
encoders.encode_base64(part)
part.add_header("Content-Disposition", f'attachment; filename="{attachment.name}"')
msg.attach(part)
with smtplib.SMTP(settings.smtp_host, settings.smtp_port, timeout=app_settings.smtp_timeout_seconds) as server:
if app_settings.smtp_starttls:
server.starttls()
server.login(settings.smtp_login, settings.smtp_password)
server.sendmail(settings.smtp_login, [recipient], msg.as_string())
def notify(self, settings: GlobalSettings, message: str, success: bool):
if settings.notify_failures_only and success:
return
if settings.smtp_notifications_enabled:
try:
self.send_email(settings, "RouterOS Backup notification", message)
except Exception:
pass
if settings.pushover_token and settings.pushover_userkey:
try:
self.send_pushover(settings.pushover_token, settings.pushover_userkey, message)
except Exception:
pass
def send_test_email(self, settings: GlobalSettings):
self.send_email(settings, "RouterOS Backup test", "This is a test email from RouterOS Backup Manager Next")
def send_test_pushover(self, settings: GlobalSettings):
if not (settings.pushover_token and settings.pushover_userkey):
raise ValueError("Pushover is not configured")
self.send_pushover(
settings.pushover_token,
settings.pushover_userkey,
"Test pushover from RouterOS Backup Manager Next",
)
notification_service = NotificationService()

View File

@@ -0,0 +1,140 @@
from datetime import datetime
import io
from pathlib import Path
import paramiko
from sqlalchemy.orm import Session
from app.models.router import Router
class RouterService:
def _load_pkey(self, ssh_key_str: str):
key_str = (ssh_key_str or "").strip()
key_buffer = io.StringIO(key_str)
loaders = [
paramiko.RSAKey.from_private_key,
paramiko.Ed25519Key.from_private_key,
paramiko.ECDSAKey.from_private_key,
]
last_error = None
for loader in loaders:
key_buffer.seek(0)
try:
return loader(key_buffer)
except Exception as exc:
last_error = exc
raise ValueError("Failed to load SSH private key") from last_error
def _connect(self, router: Router, global_ssh_key: str | None = None):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key_source = router.ssh_key.strip() if router.ssh_key and router.ssh_key.strip() else (global_ssh_key or "")
if key_source:
pkey = self._load_pkey(key_source)
client.connect(router.host, port=router.port, username=router.ssh_user, pkey=pkey, timeout=10)
else:
client.connect(
router.host,
port=router.port,
username=router.ssh_user,
password=router.ssh_password,
timeout=10,
allow_agent=False,
look_for_keys=False,
banner_timeout=10,
)
return client
def export(self, router: Router, global_ssh_key: str | None = None) -> str:
client = self._connect(router, global_ssh_key)
_, stdout, _ = client.exec_command("/export")
output = stdout.read().decode("utf-8", errors="ignore")
client.close()
return output
def binary_backup(self, router: Router, backup_name: str, local_path: str, global_ssh_key: str | None = None) -> str:
client = self._connect(router, global_ssh_key)
_, stdout, _ = client.exec_command(f"/system backup save name={backup_name}")
stdout.channel.recv_exit_status()
sftp = client.open_sftp()
remote_file = f"{backup_name}.backup"
sftp.get(remote_file, local_path)
try:
sftp.remove(remote_file)
except Exception:
pass
sftp.close()
client.close()
return local_path
def upload_backup(self, router: Router, local_backup_path: str, global_ssh_key: str | None = None):
client = self._connect(router, global_ssh_key)
sftp = client.open_sftp()
target_name = Path(local_backup_path).name
sftp.put(local_backup_path, target_name)
sftp.close()
client.close()
def probe_connection(self, router: Router, global_ssh_key: str | None = None):
tested_at = datetime.utcnow()
try:
client = self._connect(router, global_ssh_key)
_, stdout, _ = client.exec_command("/system resource print without-paging")
resource_output = stdout.read().decode("utf-8", errors="ignore")
_, stdout, _ = client.exec_command("/system identity print")
identity_output = stdout.read().decode("utf-8", errors="ignore")
client.close()
model = "Unknown"
uptime = "Unknown"
hostname = "Unknown"
version = "Unknown"
for line in resource_output.splitlines():
if "board-name" in line:
model = line.split(":", 1)[1].strip()
if "uptime" in line:
uptime = line.split(":", 1)[1].strip()
if "version" in line:
version = line.split(":", 1)[1].strip()
for line in identity_output.splitlines():
if "name" in line:
hostname = line.split(":", 1)[1].strip()
return {
"success": True,
"tested_at": tested_at,
"model": model,
"uptime": uptime,
"hostname": hostname,
"version": version,
"error": None,
}
except Exception as exc:
return {
"success": False,
"tested_at": tested_at,
"model": "Unknown",
"uptime": "Unknown",
"hostname": router.name,
"version": None,
"error": str(exc),
}
def _store_connection_result(self, db: Session, router: Router, result: dict):
router.last_connection_status = result["success"]
router.last_connection_tested_at = result["tested_at"]
router.last_connection_error = result.get("error")
router.last_connection_hostname = result.get("hostname")
router.last_connection_model = result.get("model")
router.last_connection_version = result.get("version")
router.last_connection_uptime = result.get("uptime")
db.add(router)
db.commit()
db.refresh(router)
return result
def test_connection(self, db: Session, router: Router, global_ssh_key: str | None = None):
result = self.probe_connection(router, global_ssh_key)
return self._store_connection_result(db, router, result)
router_service = RouterService()

View File

@@ -0,0 +1,249 @@
from __future__ import annotations
from datetime import datetime, timedelta
from apscheduler.schedulers.background import BackgroundScheduler
from app.core.config import settings as app_settings
from app.core.cron_utils import CronValidationError, describe_cron_expression, parse_cron_expression, preview_next_runs
from app.db.session import SessionLocal
from app.models.router import Router
from app.services.backup_service import backup_service
from app.services.log_service import log_service
from app.services.router_service import router_service
from app.services.settings_service import settings_service
class SchedulerService:
def __init__(self):
self.scheduler = BackgroundScheduler(timezone=app_settings.timezone)
self.started = False
def start(self):
if self.started:
return
self.reschedule()
self.scheduler.start()
self.started = True
def shutdown(self):
if self.started:
self.scheduler.shutdown(wait=False)
self.started = False
def _parse_cron(self, expr: str):
return parse_cron_expression(expr, app_settings.timezone)
def validate_cron(self, expr: str):
return self._parse_cron(expr)
def _interval_next_runs(self, minutes: int, count: int = 3):
now = datetime.now()
return [now + timedelta(minutes=minutes * index) for index in range(1, count + 1)]
def scheduler_status(self):
with SessionLocal() as db:
settings = settings_service.get_or_create(db)
return {
'timezone': app_settings.timezone,
'running': self.started,
'jobs': [
self._describe_job(
key='auto_export',
label='settings.schedulerAutoExportLabel',
enabled=settings.enable_auto_export,
cron=settings.export_cron,
),
self._describe_job(
key='auto_binary',
label='settings.schedulerBinaryLabel',
enabled=bool(settings.binary_cron),
cron=settings.binary_cron,
),
self._describe_job(
key='retention',
label='settings.schedulerRetentionLabel',
enabled=bool(settings.retention_cron),
cron=settings.retention_cron,
),
self._describe_interval_job(
key='connection_probe',
label='settings.schedulerConnectionLabel',
minutes=settings.connection_test_interval_minutes,
),
{
'key': 'log_cleanup',
'label': 'settings.schedulerLogsLabel',
'enabled': True,
'cron': None,
'description': 'settings.schedulerLogsDescription',
'description_params': None,
'valid': True,
'next_runs': [],
'error': None,
},
],
}
def _describe_job(self, key: str, label: str, enabled: bool, cron: str | None):
cron = (cron or '').strip()
if not enabled or not cron:
return {
'key': key,
'label': label,
'enabled': False,
'cron': cron or None,
'description': 'settings.scheduleDisabledHint',
'description_params': None,
'valid': True,
'next_runs': [],
'error': None,
}
try:
next_runs = preview_next_runs(cron, app_settings.timezone, count=3)
return {
'key': key,
'label': label,
'enabled': True,
'cron': cron,
'description': 'settings.schedulerCronDescription',
'description_params': {'description': describe_cron_expression(cron)},
'valid': True,
'next_runs': next_runs,
'error': None,
}
except CronValidationError as exc:
return {
'key': key,
'label': label,
'enabled': True,
'cron': cron,
'description': 'settings.schedulerInvalidCron',
'description_params': None,
'valid': False,
'next_runs': [],
'error': str(exc),
}
def _describe_interval_job(self, key: str, label: str, minutes: int):
minutes = int(minutes or 0)
if minutes <= 0:
return {
'key': key,
'label': label,
'enabled': False,
'cron': None,
'description': 'settings.connectionTestsDisabledHint',
'description_params': None,
'valid': True,
'next_runs': [],
'error': None,
}
return {
'key': key,
'label': label,
'enabled': True,
'cron': None,
'description': 'settings.connectionTestsEverySummary',
'description_params': {'minutes': minutes},
'valid': True,
'next_runs': self._interval_next_runs(minutes),
'error': None,
}
def reschedule(self):
self.scheduler.remove_all_jobs()
with SessionLocal() as db:
settings = settings_service.get_or_create(db)
job_definitions = [
('auto_export', settings.enable_auto_export, settings.export_cron, self._run_auto_export, 'auto export'),
('auto_binary', bool(settings.binary_cron), settings.binary_cron, self._run_binary_backup, 'binary backup'),
('retention', bool(settings.retention_cron), settings.retention_cron, self._run_retention, 'retention cleanup'),
]
pending_logs: list[str] = []
for job_id, enabled, cron, callback, label in job_definitions:
cron = (cron or '').strip()
if not enabled or not cron:
continue
try:
trigger = self._parse_cron(cron)
self.scheduler.add_job(
callback,
trigger=trigger,
id=job_id,
replace_existing=True,
coalesce=True,
max_instances=1,
misfire_grace_time=300,
)
except Exception as exc:
pending_logs.append(f'Scheduler skipped invalid {label} cron ({cron}): {exc}')
if int(settings.connection_test_interval_minutes or 0) > 0:
self.scheduler.add_job(
self._run_connection_probes,
trigger='interval',
minutes=int(settings.connection_test_interval_minutes),
id='connection_probe',
replace_existing=True,
coalesce=True,
max_instances=1,
misfire_grace_time=300,
)
self.scheduler.add_job(
self._run_log_cleanup,
trigger='interval',
days=1,
id='log_cleanup',
replace_existing=True,
coalesce=True,
max_instances=1,
misfire_grace_time=300,
)
for message in pending_logs:
log_service.add(db, message, commit=False)
if pending_logs:
db.commit()
def _run_auto_export(self):
with SessionLocal() as db:
routers = db.query(Router).all()
for router in routers:
try:
backup_service.export_router(db, type('U', (), {'id': router.owner_id})(), router.id)
except Exception as exc:
log_service.add(db, f'Scheduled export failed for {router.name}: {exc}')
def _run_binary_backup(self):
with SessionLocal() as db:
routers = db.query(Router).all()
for router in routers:
try:
backup_service.binary_backup(db, type('U', (), {'id': router.owner_id})(), router.id)
except Exception as exc:
log_service.add(db, f'Scheduled binary backup failed for {router.name}: {exc}')
def _run_retention(self):
with SessionLocal() as db:
backup_service.cleanup_old_backups(db)
def _run_connection_probes(self):
with SessionLocal() as db:
settings = settings_service.get_or_create(db)
routers = db.query(Router).all()
for router in routers:
result = router_service.test_connection(db, router, settings.global_ssh_key)
if not result['success']:
log_service.add(db, f'Scheduled connection test failed for {router.name}: {result.get("error") or "Unknown error"}')
def _run_log_cleanup(self):
with SessionLocal() as db:
settings = settings_service.get_or_create(db)
deleted = log_service.delete_older_than(db, settings.log_retention_days)
log_service.add(db, f'Log retention cleanup removed {deleted} entries older than {settings.log_retention_days} days')
scheduler_service = SchedulerService()

View File

@@ -0,0 +1,32 @@
from sqlalchemy.orm import Session
from app.models.settings import GlobalSettings
from app.schemas.settings import SettingsUpdate
class SettingsService:
def get_or_create(self, db: Session) -> GlobalSettings:
settings = db.query(GlobalSettings).first()
if not settings:
settings = GlobalSettings()
db.add(settings)
db.commit()
db.refresh(settings)
return settings
def update(self, db: Session, payload: SettingsUpdate) -> GlobalSettings:
settings = self.get_or_create(db)
data = payload.model_dump(exclude={'clear_global_ssh_key'})
for key, value in data.items():
if key == 'global_ssh_key' and value is None and not payload.clear_global_ssh_key:
continue
setattr(settings, key, value)
if payload.clear_global_ssh_key:
settings.global_ssh_key = None
db.add(settings)
db.commit()
db.refresh(settings)
return settings
settings_service = SettingsService()

View File

@@ -0,0 +1,124 @@
import re
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from urllib.parse import urlparse
import requests
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
from app.schemas.swos_beta import SwosBetaCredentials, SwosBetaProbeResponse
@dataclass
class DownloadedSwosBackup:
filename: str
content: bytes
content_type: str
auth_mode: str
base_url: str
class SwosBetaService:
timeout_seconds = 12
def probe(self, payload: SwosBetaCredentials) -> SwosBetaProbeResponse:
base_url = self._build_base_url(payload.host, payload.port)
response, auth_mode = self._request_with_fallback('GET', base_url, payload)
html = response.text if 'text' in (response.headers.get('content-type') or '').lower() else ''
title = self._extract_title(html)
backup_response, _ = self._request_with_fallback('GET', f'{base_url}/backup.swb', payload, allow_text_fallback=False)
backup_ok = backup_response.status_code == 200 and len(backup_response.content) > 0
return SwosBetaProbeResponse(
success=response.ok,
base_url=base_url,
status_code=response.status_code,
auth_mode=auth_mode,
page_title=title,
content_type=response.headers.get('content-type'),
server=response.headers.get('server'),
save_backup_visible='save backup' in html.lower(),
backup_endpoint_ok=backup_ok,
note='Moduł działa osobno i nie zapisuje kopii do głównego repozytorium.'
)
def download_backup(self, payload: SwosBetaCredentials) -> DownloadedSwosBackup:
base_url = self._build_base_url(payload.host, payload.port)
response, auth_mode = self._request_with_fallback('GET', f'{base_url}/backup.swb', payload, allow_text_fallback=False)
if response.status_code != 200:
raise ValueError(f'Urządzenie zwróciło kod HTTP {response.status_code} dla /backup.swb.')
if not response.content:
raise ValueError('Urządzenie zwróciło pusty plik backupu.')
filename = self._build_filename(payload)
content_type = response.headers.get('content-type') or 'application/octet-stream'
return DownloadedSwosBackup(
filename=filename,
content=response.content,
content_type=content_type,
auth_mode=auth_mode,
base_url=base_url,
)
def _request_with_fallback(self, method: str, url: str, payload: SwosBetaCredentials, allow_text_fallback: bool = True):
attempts = []
auth_variants = [
('digest', HTTPDigestAuth(payload.username, payload.password)),
('basic', HTTPBasicAuth(payload.username, payload.password)),
]
if allow_text_fallback:
auth_variants.append(('none', None))
last_response = None
for label, auth in auth_variants:
try:
response = requests.request(
method,
url,
auth=auth,
timeout=self.timeout_seconds,
allow_redirects=True,
)
last_response = response
if response.status_code < 400:
return response, label
attempts.append(f'{label}:{response.status_code}')
except requests.RequestException as exc:
attempts.append(f'{label}:{exc.__class__.__name__}')
if last_response is not None:
raise ValueError(f'Nie udało się połączyć ze SwOS ({", ".join(attempts)}).')
raise ValueError('Nie udało się połączyć ze SwOS.')
def _build_base_url(self, host: str, port: int) -> str:
raw = host.strip()
parsed = urlparse(raw if '://' in raw else f'http://{raw}')
scheme = parsed.scheme or 'http'
if scheme not in {'http', 'https'}:
raise ValueError('Dozwolone są tylko adresy HTTP lub HTTPS.')
if not parsed.hostname:
raise ValueError('Nieprawidłowy adres hosta.')
resolved_port = parsed.port or port
base = f'{scheme}://{parsed.hostname}'
if resolved_port not in {80, 443} or (scheme == 'http' and resolved_port != 80) or (scheme == 'https' and resolved_port != 443):
base = f'{base}:{resolved_port}'
return base.rstrip('/')
def _extract_title(self, html: str) -> str | None:
if not html:
return None
match = re.search(r'<title>(.*?)</title>', html, flags=re.IGNORECASE | re.DOTALL)
if not match:
return None
return re.sub(r'\s+', ' ', match.group(1)).strip() or None
def _build_filename(self, payload: SwosBetaCredentials) -> str:
label = payload.label or payload.host
safe = re.sub(r'[^A-Za-z0-9._-]+', '-', label).strip('-') or 'switchos'
timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
return f'{safe}-swos-{timestamp}.swb'
swos_beta_service = SwosBetaService()