322 lines
14 KiB
Python
322 lines
14 KiB
Python
import difflib
|
|
from datetime import datetime, timedelta, timezone
|
|
from pathlib import Path
|
|
|
|
from fastapi import HTTPException
|
|
from sqlalchemy.orm import Session, joinedload
|
|
|
|
from app.models.backup import Backup
|
|
from app.models.router import Router
|
|
from app.models.user import User
|
|
from app.services.file_service import compute_checksum, ensure_data_dir
|
|
from app.services.log_service import log_service
|
|
from app.services.notification_service import notification_service
|
|
from app.services.router_service import router_service
|
|
from app.services.settings_service import settings_service
|
|
|
|
|
|
class BackupService:
|
|
def _router_for_user(self, db: Session, user: User, router_id: int) -> Router:
|
|
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == user.id).first()
|
|
if not router:
|
|
raise HTTPException(status_code=404, detail='Router not found')
|
|
return router
|
|
|
|
def _serialize_backup(self, backup: Backup):
|
|
file_path = Path(backup.file_path)
|
|
return {
|
|
'id': backup.id,
|
|
'router_id': backup.router_id,
|
|
'router_name': backup.router.name if backup.router else None,
|
|
'file_path': backup.file_path,
|
|
'file_name': backup.file_name,
|
|
'backup_type': backup.backup_type,
|
|
'checksum': backup.checksum,
|
|
'file_size': file_path.stat().st_size if file_path.exists() else None,
|
|
'created_at': backup.created_at,
|
|
}
|
|
|
|
def _build_structured_diff(self, left_lines: list[str], right_lines: list[str]):
|
|
matcher = difflib.SequenceMatcher(a=left_lines, b=right_lines)
|
|
rows = []
|
|
stats = {'added': 0, 'removed': 0, 'modified': 0, 'context': 0}
|
|
left_number = 1
|
|
right_number = 1
|
|
|
|
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
|
|
if tag == 'equal':
|
|
for left_text, right_text in zip(left_lines[i1:i2], right_lines[j1:j2]):
|
|
rows.append(
|
|
{
|
|
'type': 'context',
|
|
'left_number': left_number,
|
|
'right_number': right_number,
|
|
'left_text': left_text,
|
|
'right_text': right_text,
|
|
}
|
|
)
|
|
stats['context'] += 1
|
|
left_number += 1
|
|
right_number += 1
|
|
continue
|
|
|
|
if tag == 'delete':
|
|
for left_text in left_lines[i1:i2]:
|
|
rows.append(
|
|
{
|
|
'type': 'removed',
|
|
'left_number': left_number,
|
|
'right_number': None,
|
|
'left_text': left_text,
|
|
'right_text': '',
|
|
}
|
|
)
|
|
stats['removed'] += 1
|
|
left_number += 1
|
|
continue
|
|
|
|
if tag == 'insert':
|
|
for right_text in right_lines[j1:j2]:
|
|
rows.append(
|
|
{
|
|
'type': 'added',
|
|
'left_number': None,
|
|
'right_number': right_number,
|
|
'left_text': '',
|
|
'right_text': right_text,
|
|
}
|
|
)
|
|
stats['added'] += 1
|
|
right_number += 1
|
|
continue
|
|
|
|
block_left = left_lines[i1:i2]
|
|
block_right = right_lines[j1:j2]
|
|
block_size = max(len(block_left), len(block_right))
|
|
for index in range(block_size):
|
|
left_text = block_left[index] if index < len(block_left) else ''
|
|
right_text = block_right[index] if index < len(block_right) else ''
|
|
row_type = 'modified'
|
|
if left_text and not right_text:
|
|
row_type = 'removed'
|
|
stats['removed'] += 1
|
|
elif right_text and not left_text:
|
|
row_type = 'added'
|
|
stats['added'] += 1
|
|
else:
|
|
stats['modified'] += 1
|
|
|
|
rows.append(
|
|
{
|
|
'type': row_type,
|
|
'left_number': left_number if left_text else None,
|
|
'right_number': right_number if right_text else None,
|
|
'left_text': left_text,
|
|
'right_text': right_text,
|
|
}
|
|
)
|
|
if left_text:
|
|
left_number += 1
|
|
if right_text:
|
|
right_number += 1
|
|
|
|
return rows, stats
|
|
|
|
def get_backup_for_user(self, db: Session, user: User, backup_id: int) -> Backup:
|
|
backup = (
|
|
db.query(Backup)
|
|
.options(joinedload(Backup.router))
|
|
.join(Router)
|
|
.filter(Backup.id == backup_id, Router.owner_id == user.id)
|
|
.first()
|
|
)
|
|
if not backup:
|
|
raise HTTPException(status_code=404, detail='Backup not found')
|
|
return backup
|
|
|
|
def list_backups(
|
|
self,
|
|
db: Session,
|
|
user: User,
|
|
search: str | None = None,
|
|
backup_type: str | None = None,
|
|
router_id: int | None = None,
|
|
sort_by: str = 'created_at',
|
|
order: str = 'desc',
|
|
):
|
|
query = db.query(Backup).options(joinedload(Backup.router)).join(Router).filter(Router.owner_id == user.id)
|
|
if search:
|
|
query = query.filter(
|
|
Backup.file_name.ilike(f'%{search}%')
|
|
| Router.name.ilike(f'%{search}%')
|
|
| Router.host.ilike(f'%{search}%')
|
|
)
|
|
if backup_type:
|
|
query = query.filter(Backup.backup_type == backup_type)
|
|
if router_id:
|
|
query = query.filter(Backup.router_id == router_id)
|
|
|
|
sort_map = {
|
|
'created_at': Backup.created_at,
|
|
'file_name': Backup.file_name,
|
|
'backup_type': Backup.backup_type,
|
|
'router_name': Router.name,
|
|
}
|
|
sort_column = sort_map.get(sort_by, Backup.created_at)
|
|
query = query.order_by(sort_column.asc() if order == 'asc' else sort_column.desc())
|
|
return [self._serialize_backup(backup) for backup in query.all()]
|
|
|
|
def list_router_backups(self, db: Session, user: User, router_id: int):
|
|
router = self._router_for_user(db, user, router_id)
|
|
backups = (
|
|
db.query(Backup)
|
|
.options(joinedload(Backup.router))
|
|
.filter(Backup.router_id == router.id)
|
|
.order_by(Backup.created_at.desc())
|
|
.all()
|
|
)
|
|
return [self._serialize_backup(backup) for backup in backups]
|
|
|
|
def export_router(self, db: Session, user: User, router_id: int) -> Backup:
|
|
router = self._router_for_user(db, user, router_id)
|
|
settings = settings_service.get_or_create(db)
|
|
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
name = f'{router.name}_{router.id}_{stamp}.rsc'
|
|
file_path = ensure_data_dir() / name
|
|
try:
|
|
content = router_service.export(router, settings.global_ssh_key)
|
|
file_path.write_text(content, encoding='utf-8')
|
|
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='export')
|
|
db.add(backup)
|
|
db.commit()
|
|
db.refresh(backup)
|
|
log_service.add(db, f'Export OK for router {router.name}')
|
|
notification_service.notify(settings, f'Export {router.name} OK', True)
|
|
return backup
|
|
except Exception as exc:
|
|
notification_service.notify(settings, f'Export {router.name} FAIL: {exc}', False)
|
|
log_service.add(db, f'Export FAILED for router {router.name}: {exc}')
|
|
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
|
|
def binary_backup(self, db: Session, user: User, router_id: int) -> Backup:
|
|
router = self._router_for_user(db, user, router_id)
|
|
settings = settings_service.get_or_create(db)
|
|
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
base_name = f'{router.name}_{router.id}_{stamp}'
|
|
name = f'{base_name}.backup'
|
|
file_path = ensure_data_dir() / name
|
|
try:
|
|
router_service.binary_backup(router, base_name, str(file_path), settings.global_ssh_key)
|
|
checksum = compute_checksum(str(file_path))
|
|
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='binary', checksum=checksum)
|
|
db.add(backup)
|
|
db.commit()
|
|
db.refresh(backup)
|
|
log_service.add(db, f'Binary backup OK for router {router.name}')
|
|
notification_service.notify(settings, f'Backup {router.name} OK', True)
|
|
return backup
|
|
except Exception as exc:
|
|
notification_service.notify(settings, f'Backup {router.name} FAIL: {exc}', False)
|
|
log_service.add(db, f'Binary backup FAILED for router {router.name}: {exc}')
|
|
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
|
|
def upload_backup_to_router(self, db: Session, user: User, router_id: int, backup_id: int):
|
|
router = self._router_for_user(db, user, router_id)
|
|
backup = self.get_backup_for_user(db, user, backup_id)
|
|
if backup.backup_type != 'binary':
|
|
raise HTTPException(status_code=400, detail='Only binary backups can be uploaded')
|
|
checksum = compute_checksum(backup.file_path)
|
|
if backup.checksum and checksum != backup.checksum:
|
|
raise HTTPException(status_code=400, detail='Checksum mismatch')
|
|
settings = settings_service.get_or_create(db)
|
|
router_service.upload_backup(router, backup.file_path, settings.global_ssh_key)
|
|
log_service.add(db, f'Upload backup OK for router {router.name}')
|
|
|
|
def delete_backup(self, db: Session, user: User, backup_id: int, commit: bool = True):
|
|
backup = self.get_backup_for_user(db, user, backup_id)
|
|
path = Path(backup.file_path)
|
|
if path.exists():
|
|
path.unlink()
|
|
db.delete(backup)
|
|
if commit:
|
|
db.commit()
|
|
|
|
def diff_backups(self, db: Session, user: User, left_id: int, right_id: int):
|
|
left = self.get_backup_for_user(db, user, left_id)
|
|
right = self.get_backup_for_user(db, user, right_id)
|
|
if left.backup_type != 'export' or right.backup_type != 'export':
|
|
raise HTTPException(status_code=400, detail='Diff is supported only for export backups')
|
|
left_lines = Path(left.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
|
|
right_lines = Path(right.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
|
|
diff_lines = list(
|
|
difflib.unified_diff(left_lines, right_lines, fromfile=left.file_name, tofile=right.file_name, lineterm='')
|
|
)
|
|
diff_html = difflib.HtmlDiff(wrapcolumn=120).make_file(
|
|
left_lines,
|
|
right_lines,
|
|
fromdesc=left.file_name,
|
|
todesc=right.file_name,
|
|
context=True,
|
|
numlines=2,
|
|
)
|
|
structured_lines, stats = self._build_structured_diff(left_lines, right_lines)
|
|
return {
|
|
'left_backup_id': left_id,
|
|
'right_backup_id': right_id,
|
|
'left_file_name': left.file_name,
|
|
'right_file_name': right.file_name,
|
|
'diff_text': '\n'.join(diff_lines),
|
|
'diff_html': diff_html,
|
|
'stats': stats,
|
|
'lines': structured_lines,
|
|
}
|
|
|
|
def email_backup(self, db: Session, user: User, backup_id: int):
|
|
backup = self.get_backup_for_user(db, user, backup_id)
|
|
settings = settings_service.get_or_create(db)
|
|
noun = 'Export' if backup.backup_type == 'export' else 'Backup'
|
|
subject = f'RouterOS {noun}: {backup.file_name}'
|
|
body = f'Sending {backup.file_name} from router {backup.router.name}.'
|
|
notification_service.send_email(settings, subject, body, backup.file_path)
|
|
log_service.add(db, f'Email sent for backup {backup.file_name}')
|
|
|
|
def export_all(self, db: Session, user: User):
|
|
routers = db.query(Router).filter(Router.owner_id == user.id).all()
|
|
result = []
|
|
for router in routers:
|
|
try:
|
|
backup = self.export_router(db, user, router.id)
|
|
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
|
|
except Exception as exc:
|
|
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
|
|
return result
|
|
|
|
def binary_all(self, db: Session, user: User):
|
|
routers = db.query(Router).filter(Router.owner_id == user.id).all()
|
|
result = []
|
|
for router in routers:
|
|
try:
|
|
backup = self.binary_backup(db, user, router.id)
|
|
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
|
|
except Exception as exc:
|
|
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
|
|
return result
|
|
|
|
def cleanup_old_backups(self, db: Session):
|
|
settings = settings_service.get_or_create(db)
|
|
cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=settings.backup_retention_days)
|
|
old_backups = db.query(Backup).filter(Backup.created_at < cutoff).all()
|
|
deleted_count = 0
|
|
for backup in old_backups:
|
|
path = Path(backup.file_path)
|
|
if path.exists():
|
|
path.unlink()
|
|
db.delete(backup)
|
|
deleted_count += 1
|
|
db.commit()
|
|
log_service.add(db, f'Retention cleanup removed {deleted_count} backups older than {settings.backup_retention_days} days')
|
|
return deleted_count
|
|
|
|
|
|
backup_service = BackupService()
|