first commit
This commit is contained in:
4
backend/.env.example
Normal file
4
backend/.env.example
Normal file
@@ -0,0 +1,4 @@
|
||||
APP_PORT=8080
|
||||
API_PREFIX=/api
|
||||
DATA_DIR=/app/storage
|
||||
DATABASE_URL=sqlite:////app/storage/routeros_backup_next.db
|
||||
16
backend/Dockerfile
Normal file
16
backend/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM python:3.14-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends curl build-essential && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY backend/requirements.txt /app/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -r /app/requirements.txt
|
||||
|
||||
COPY backend /app
|
||||
RUN mkdir -p /app/storage
|
||||
|
||||
EXPOSE 8000
|
||||
CMD ["fastapi", "run", "app/main.py", "--host", "0.0.0.0", "--port", "8000"]
|
||||
0
backend/app/__init__.py
Normal file
0
backend/app/__init__.py
Normal file
12
backend/app/api/__init__.py
Normal file
12
backend/app/api/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.api.routes import auth, backups, dashboard, health, logs, routers, settings
|
||||
|
||||
api_router = APIRouter()
|
||||
api_router.include_router(auth.router, prefix='/auth', tags=['auth'])
|
||||
api_router.include_router(dashboard.router, prefix='/dashboard', tags=['dashboard'])
|
||||
api_router.include_router(routers.router, prefix='/routers', tags=['routers'])
|
||||
api_router.include_router(backups.router, prefix='/backups', tags=['backups'])
|
||||
api_router.include_router(settings.router, prefix='/settings', tags=['settings'])
|
||||
api_router.include_router(logs.router, prefix='/logs', tags=['logs'])
|
||||
api_router.include_router(health.router, tags=['health'])
|
||||
40
backend/app/api/deps.py
Normal file
40
backend/app/api/deps.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import Generator
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.config import settings
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)) -> User:
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
try:
|
||||
payload = jwt.decode(token, settings.secret_key, algorithms=[settings.jwt_algorithm])
|
||||
username: str | None = payload.get("sub")
|
||||
if username is None:
|
||||
raise credentials_exception
|
||||
except JWTError as exc:
|
||||
raise credentials_exception from exc
|
||||
user = db.query(User).filter(User.username == username).first()
|
||||
if not user:
|
||||
raise credentials_exception
|
||||
return user
|
||||
106
backend/app/api/routes/auth.py
Normal file
106
backend/app/api/routes/auth.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.core.config import settings
|
||||
from app.core.security import create_access_token, get_password_hash, verify_password
|
||||
from app.models.user import User
|
||||
from app.schemas.auth import (
|
||||
ChangePasswordRequest,
|
||||
RegisterRequest,
|
||||
TokenResponse,
|
||||
UpdateUserPreferencesRequest,
|
||||
UserResponse,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/register", response_model=UserResponse)
|
||||
def register(payload: RegisterRequest, db: Session = Depends(get_db)):
|
||||
if not settings.allow_registration:
|
||||
raise HTTPException(status_code=403, detail="Registration is disabled")
|
||||
existing = db.query(User).filter(User.username == payload.username).first()
|
||||
if existing:
|
||||
raise HTTPException(status_code=409, detail="Username already exists")
|
||||
user = User(username=payload.username, password_hash=get_password_hash(payload.password))
|
||||
db.add(user)
|
||||
db.commit()
|
||||
db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@router.post("/login", response_model=TokenResponse)
|
||||
async def login(request: Request, db: Session = Depends(get_db)):
|
||||
username = None
|
||||
password = None
|
||||
content_type = (request.headers.get("content-type") or "").lower()
|
||||
|
||||
if "application/json" in content_type:
|
||||
try:
|
||||
payload = await request.json()
|
||||
except Exception:
|
||||
payload = {}
|
||||
username = payload.get("username")
|
||||
password = payload.get("password")
|
||||
else:
|
||||
form_data = await request.form()
|
||||
username = form_data.get("username")
|
||||
password = form_data.get("password")
|
||||
|
||||
if not username or not password:
|
||||
raise HTTPException(status_code=422, detail="Username and password are required")
|
||||
|
||||
user = db.query(User).filter(User.username == username).first()
|
||||
if not user or not verify_password(password, user.password_hash):
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||
token = create_access_token(
|
||||
subject=user.username,
|
||||
expires_delta=timedelta(minutes=settings.access_token_expire_minutes),
|
||||
)
|
||||
return TokenResponse(access_token=token, user=UserResponse.model_validate(user))
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
def me(current_user: User = Depends(get_current_user)):
|
||||
return current_user
|
||||
|
||||
|
||||
|
||||
|
||||
@router.put("/preferences", response_model=UserResponse)
|
||||
def update_preferences(
|
||||
payload: UpdateUserPreferencesRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
preferred_language = (payload.preferred_language or 'pl').strip().lower()
|
||||
preferred_font = (payload.preferred_font or 'default').strip().lower()
|
||||
|
||||
if preferred_language not in {'pl', 'en', 'es', 'no'}:
|
||||
raise HTTPException(status_code=422, detail='Unsupported language')
|
||||
if preferred_font not in {'default', 'adwaita_mono', 'hack'}:
|
||||
raise HTTPException(status_code=422, detail='Unsupported font')
|
||||
|
||||
current_user.preferred_language = preferred_language
|
||||
current_user.preferred_font = preferred_font
|
||||
db.add(current_user)
|
||||
db.commit()
|
||||
db.refresh(current_user)
|
||||
return current_user
|
||||
|
||||
|
||||
@router.post("/change-password")
|
||||
def change_password(
|
||||
payload: ChangePasswordRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
if not verify_password(payload.current_password, current_user.password_hash):
|
||||
raise HTTPException(status_code=400, detail="Current password is invalid")
|
||||
current_user.password_hash = get_password_hash(payload.new_password)
|
||||
db.add(current_user)
|
||||
db.commit()
|
||||
return {"message": "Password changed successfully"}
|
||||
128
backend/app/api/routes/backups.py
Normal file
128
backend/app/api/routes/backups.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from datetime import date
|
||||
import io
|
||||
import zipfile
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi.responses import FileResponse, HTMLResponse, StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.backup import BackupDiffResponse, BackupResponse, BulkActionRequest
|
||||
from app.services.backup_service import backup_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("", response_model=list[BackupResponse])
|
||||
def list_backups(
|
||||
search: str | None = Query(default=None),
|
||||
backup_type: str | None = Query(default=None, pattern="^(export|binary)$"),
|
||||
router_id: int | None = Query(default=None),
|
||||
created_on: date | None = Query(default=None),
|
||||
sort_by: str = Query(default="created_at"),
|
||||
order: str = Query(default="desc", pattern="^(asc|desc)$"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
return backup_service.list_backups(
|
||||
db,
|
||||
current_user,
|
||||
search=search,
|
||||
backup_type=backup_type,
|
||||
router_id=router_id,
|
||||
created_on=created_on,
|
||||
sort_by=sort_by,
|
||||
order=order,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/router/{router_id}", response_model=list[BackupResponse])
|
||||
def list_router_backups(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.list_router_backups(db, current_user, router_id)
|
||||
|
||||
|
||||
@router.post("/routers/export-all")
|
||||
def export_all(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.export_all(db, current_user)
|
||||
|
||||
|
||||
@router.post("/routers/binary-all")
|
||||
def binary_all(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.binary_all(db, current_user)
|
||||
|
||||
|
||||
@router.post("/router/{router_id}/export", response_model=BackupResponse)
|
||||
def export_router(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.export_router(db, current_user, router_id)
|
||||
|
||||
|
||||
@router.post("/router/{router_id}/binary", response_model=BackupResponse)
|
||||
def binary_backup(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.binary_backup(db, current_user, router_id)
|
||||
|
||||
|
||||
@router.post("/router/{router_id}/upload/{backup_id}")
|
||||
def upload_to_router(router_id: int, backup_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backup_service.upload_backup_to_router(db, current_user, router_id, backup_id)
|
||||
return {"message": "Backup uploaded to router"}
|
||||
|
||||
|
||||
@router.delete("/{backup_id}")
|
||||
def delete_backup(backup_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backup_service.delete_backup(db, current_user, backup_id)
|
||||
return {"message": "Backup deleted"}
|
||||
|
||||
|
||||
@router.get("/{backup_id}/download")
|
||||
def download_backup(backup_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backup = backup_service.get_backup_for_user(db, current_user, backup_id)
|
||||
return FileResponse(path=backup.file_path, filename=backup.file_name)
|
||||
|
||||
|
||||
@router.get("/{backup_id}/view")
|
||||
def view_export(backup_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backup = backup_service.get_backup_for_user(db, current_user, backup_id)
|
||||
if backup.backup_type != "export":
|
||||
raise HTTPException(status_code=400, detail="Only export backups can be viewed")
|
||||
with open(backup.file_path, "r", encoding="utf-8", errors="ignore") as handle:
|
||||
return {"content": handle.read(), "backup": BackupResponse.model_validate(backup_service._serialize_backup(backup))}
|
||||
|
||||
|
||||
@router.post("/{backup_id}/email")
|
||||
def email_backup(backup_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backup_service.email_backup(db, current_user, backup_id)
|
||||
return {"message": "Backup sent by email"}
|
||||
|
||||
|
||||
@router.get("/{left_id}/diff/{right_id}", response_model=BackupDiffResponse)
|
||||
def diff_backups(left_id: int, right_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
return backup_service.diff_backups(db, current_user, left_id, right_id)
|
||||
|
||||
|
||||
@router.get("/{left_id}/diff/{right_id}/html", response_class=HTMLResponse)
|
||||
def diff_backups_html(left_id: int, right_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
result = backup_service.diff_backups(db, current_user, left_id, right_id)
|
||||
return HTMLResponse(result["diff_html"])
|
||||
|
||||
|
||||
@router.post("/bulk")
|
||||
def bulk_action(payload: BulkActionRequest, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
backups = [backup_service.get_backup_for_user(db, current_user, backup_id) for backup_id in payload.backup_ids]
|
||||
if payload.action == "delete":
|
||||
for backup in backups:
|
||||
backup_service.delete_backup(db, current_user, backup.id, commit=False)
|
||||
db.commit()
|
||||
return {"message": f"Deleted {len(backups)} backups"}
|
||||
if payload.action == "download":
|
||||
stream = io.BytesIO()
|
||||
with zipfile.ZipFile(stream, "w") as archive:
|
||||
for backup in backups:
|
||||
archive.write(backup.file_path, backup.file_name)
|
||||
stream.seek(0)
|
||||
return StreamingResponse(
|
||||
stream,
|
||||
media_type="application/zip",
|
||||
headers={"Content-Disposition": 'attachment; filename="backups.zip"'},
|
||||
)
|
||||
raise HTTPException(status_code=400, detail="Unsupported bulk action")
|
||||
42
backend/app/api/routes/dashboard.py
Normal file
42
backend/app/api/routes/dashboard.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.models.backup import Backup
|
||||
from app.models.log import OperationLog
|
||||
from app.models.router import Router
|
||||
from app.models.user import User
|
||||
from app.schemas.dashboard import DashboardResponse
|
||||
from app.services.file_service import get_storage_stats
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("", response_model=DashboardResponse)
|
||||
def get_dashboard(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
routers_count = db.query(func.count(Router.id)).filter(Router.owner_id == current_user.id).scalar() or 0
|
||||
export_count = (
|
||||
db.query(func.count(Backup.id))
|
||||
.join(Router)
|
||||
.filter(Router.owner_id == current_user.id, Backup.backup_type == "export")
|
||||
.scalar()
|
||||
or 0
|
||||
)
|
||||
binary_count = (
|
||||
db.query(func.count(Backup.id))
|
||||
.join(Router)
|
||||
.filter(Router.owner_id == current_user.id, Backup.backup_type == "binary")
|
||||
.scalar()
|
||||
or 0
|
||||
)
|
||||
recent_logs = db.query(OperationLog).order_by(OperationLog.timestamp.desc()).limit(10).all()
|
||||
storage = get_storage_stats()
|
||||
return DashboardResponse(
|
||||
routers_count=routers_count,
|
||||
export_count=export_count,
|
||||
binary_count=binary_count,
|
||||
total_backups=export_count + binary_count,
|
||||
storage=storage,
|
||||
recent_logs=recent_logs,
|
||||
)
|
||||
19
backend/app/api/routes/health.py
Normal file
19
backend/app/api/routes/health.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
def health(db: Session = Depends(get_db)):
|
||||
status = "ok"
|
||||
try:
|
||||
db.execute(text("SELECT 1"))
|
||||
except Exception:
|
||||
status = "error"
|
||||
return {"status": status, "timestamp": datetime.now(timezone.utc).isoformat()}
|
||||
32
backend/app/api/routes/logs.py
Normal file
32
backend/app/api/routes/logs.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.models.log import OperationLog
|
||||
from app.models.user import User
|
||||
from app.services.log_service import log_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
def get_logs(
|
||||
limit: int = Query(100, ge=1, le=500),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
_ = current_user
|
||||
return db.query(OperationLog).order_by(OperationLog.timestamp.desc()).limit(limit).all()
|
||||
|
||||
|
||||
@router.delete("/older-than/{days}")
|
||||
def delete_logs(
|
||||
days: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
_ = current_user
|
||||
if days < 1:
|
||||
raise HTTPException(status_code=400, detail="Days must be >= 1")
|
||||
deleted = log_service.delete_older_than(db, days)
|
||||
return {"message": f"Deleted {deleted} logs", "deleted": deleted}
|
||||
120
backend/app/api/routes/routers.py
Normal file
120
backend/app/api/routes/routers.py
Normal file
@@ -0,0 +1,120 @@
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.models.router import Router
|
||||
from app.models.user import User
|
||||
from app.schemas.router import RouterCreate, RouterResponse, RouterTestConnection, RouterUpdate
|
||||
from app.services.router_service import router_service
|
||||
from app.services.settings_service import settings_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def serialize_router(router: Router, global_settings) -> RouterResponse:
|
||||
has_router_key = bool((router.ssh_key or '').strip())
|
||||
has_global_key = bool((global_settings.global_ssh_key or '').strip())
|
||||
router_user = (router.ssh_user or '').strip() or None
|
||||
router_password = (router.ssh_password or '').strip() or None
|
||||
default_swos_user = (global_settings.default_switchos_username or '').strip() or None
|
||||
default_swos_password = (global_settings.default_switchos_password or '').strip() or None
|
||||
effective_username = router_user
|
||||
uses_global_switchos_credentials = False
|
||||
has_effective_password = bool(router_password)
|
||||
|
||||
if router.device_type == 'switchos':
|
||||
effective_username = router_user or default_swos_user
|
||||
uses_global_switchos_credentials = bool(
|
||||
(not router_user and default_swos_user) or (not router_password and default_swos_password)
|
||||
)
|
||||
has_effective_password = bool(router_password or default_swos_password)
|
||||
|
||||
payload = RouterResponse.model_validate(router, from_attributes=True).model_dump()
|
||||
payload['effective_username'] = effective_username
|
||||
payload['uses_global_ssh_key'] = router.device_type == 'routeros' and has_global_key and not has_router_key
|
||||
payload['has_effective_ssh_key'] = router.device_type == 'routeros' and (has_router_key or has_global_key)
|
||||
payload['uses_global_switchos_credentials'] = uses_global_switchos_credentials
|
||||
payload['has_effective_password'] = has_effective_password
|
||||
payload['supports_export'] = router.device_type == 'routeros'
|
||||
payload['supports_restore_upload'] = router.device_type == 'routeros'
|
||||
return RouterResponse.model_validate(payload)
|
||||
|
||||
|
||||
@router.get('', response_model=list[RouterResponse])
|
||||
def list_routers(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
global_settings = settings_service.get_or_create(db)
|
||||
routers = db.query(Router).filter(Router.owner_id == current_user.id).order_by(Router.created_at.desc()).all()
|
||||
return [serialize_router(router, global_settings) for router in routers]
|
||||
|
||||
|
||||
@router.post('', response_model=RouterResponse)
|
||||
def create_router(payload: RouterCreate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
router_data = payload.model_dump()
|
||||
if router_data.get('device_type') == 'switchos' and router_data.get('ssh_user') is None:
|
||||
router_data['ssh_user'] = ''
|
||||
router = Router(**router_data, owner_id=current_user.id)
|
||||
db.add(router)
|
||||
db.commit()
|
||||
db.refresh(router)
|
||||
global_settings = settings_service.get_or_create(db)
|
||||
return serialize_router(router, global_settings)
|
||||
|
||||
|
||||
@router.get('/{router_id}', response_model=RouterResponse)
|
||||
def get_router(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == current_user.id).first()
|
||||
if not router:
|
||||
raise HTTPException(status_code=404, detail='Device not found')
|
||||
global_settings = settings_service.get_or_create(db)
|
||||
return serialize_router(router, global_settings)
|
||||
|
||||
|
||||
@router.put('/{router_id}', response_model=RouterResponse)
|
||||
def update_router(router_id: int, payload: RouterUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == current_user.id).first()
|
||||
if not router:
|
||||
raise HTTPException(status_code=404, detail='Device not found')
|
||||
changes = payload.model_dump(exclude_unset=True)
|
||||
target_device_type = changes.get('device_type', router.device_type)
|
||||
if target_device_type == 'switchos':
|
||||
changes['ssh_key'] = None
|
||||
if 'port' not in changes:
|
||||
changes['port'] = 80
|
||||
if changes.get('ssh_user') is None:
|
||||
changes['ssh_user'] = ''
|
||||
elif target_device_type == 'routeros' and 'port' not in changes and router.device_type != 'routeros':
|
||||
changes['port'] = 22
|
||||
if not changes.get('ssh_user'):
|
||||
changes['ssh_user'] = router.ssh_user or 'admin'
|
||||
for key, value in changes.items():
|
||||
setattr(router, key, value)
|
||||
db.add(router)
|
||||
db.commit()
|
||||
db.refresh(router)
|
||||
global_settings = settings_service.get_or_create(db)
|
||||
return serialize_router(router, global_settings)
|
||||
|
||||
|
||||
@router.delete('/{router_id}')
|
||||
def delete_router(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == current_user.id).first()
|
||||
if not router:
|
||||
raise HTTPException(status_code=404, detail='Device not found')
|
||||
for backup in list(router.backups):
|
||||
path = Path(backup.file_path)
|
||||
if path.exists():
|
||||
path.unlink()
|
||||
db.delete(router)
|
||||
db.commit()
|
||||
return {'message': 'Device deleted'}
|
||||
|
||||
|
||||
@router.get('/{router_id}/test-connection', response_model=RouterTestConnection)
|
||||
def test_connection(router_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == current_user.id).first()
|
||||
if not router:
|
||||
raise HTTPException(status_code=404, detail='Device not found')
|
||||
global_settings = settings_service.get_or_create(db)
|
||||
return router_service.test_connection(db, router, global_settings)
|
||||
78
backend/app/api/routes/settings.py
Normal file
78
backend/app/api/routes/settings.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.api.deps import get_current_user, get_db
|
||||
from app.core.security import verify_password
|
||||
from app.models.settings import GlobalSettings
|
||||
from app.models.user import User
|
||||
from app.schemas.settings import (
|
||||
RevealSshKeyRequest,
|
||||
RevealSshKeyResponse,
|
||||
SchedulerStatusResponse,
|
||||
SettingsResponse,
|
||||
SettingsUpdate,
|
||||
)
|
||||
from app.services.notification_service import notification_service
|
||||
from app.services.scheduler import scheduler_service
|
||||
from app.services.settings_service import settings_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def serialize_settings(settings: GlobalSettings) -> SettingsResponse:
|
||||
payload = SettingsResponse.model_validate(settings, from_attributes=True).model_dump()
|
||||
payload['global_ssh_key'] = None
|
||||
payload['has_global_ssh_key'] = bool((settings.global_ssh_key or '').strip())
|
||||
payload['has_default_switchos_credentials'] = bool(
|
||||
(settings.default_switchos_username or '').strip() or (settings.default_switchos_password or '').strip()
|
||||
)
|
||||
return SettingsResponse.model_validate(payload)
|
||||
|
||||
|
||||
@router.get('', response_model=SettingsResponse)
|
||||
def get_settings(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
_ = current_user
|
||||
settings = settings_service.get_or_create(db)
|
||||
return serialize_settings(settings)
|
||||
|
||||
|
||||
@router.get('/scheduler-status', response_model=SchedulerStatusResponse)
|
||||
def get_scheduler_status(current_user: User = Depends(get_current_user)):
|
||||
_ = current_user
|
||||
return scheduler_service.scheduler_status()
|
||||
|
||||
|
||||
@router.put('', response_model=SettingsResponse)
|
||||
def update_settings(payload: SettingsUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
_ = current_user
|
||||
settings = settings_service.update(db, payload)
|
||||
scheduler_service.reschedule()
|
||||
return serialize_settings(settings)
|
||||
|
||||
|
||||
@router.post('/reveal-ssh-key', response_model=RevealSshKeyResponse)
|
||||
def reveal_ssh_key(
|
||||
payload: RevealSshKeyRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
if not verify_password(payload.password, current_user.password_hash):
|
||||
raise HTTPException(status_code=400, detail='Current password is invalid')
|
||||
settings = settings_service.get_or_create(db)
|
||||
return {'global_ssh_key': settings.global_ssh_key}
|
||||
|
||||
|
||||
@router.post('/test-email')
|
||||
def test_email(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
_ = current_user
|
||||
settings = settings_service.get_or_create(db)
|
||||
notification_service.send_test_email(settings)
|
||||
return {'message': 'Test email sent'}
|
||||
|
||||
|
||||
@router.post('/test-pushover')
|
||||
def test_pushover(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
_ = current_user
|
||||
settings = settings_service.get_or_create(db)
|
||||
notification_service.send_test_pushover(settings)
|
||||
return {'message': 'Test pushover sent'}
|
||||
33
backend/app/api/routes/swos_beta.py
Normal file
33
backend/app/api/routes/swos_beta.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from app.api.deps import get_current_user
|
||||
from app.models.user import User
|
||||
from app.schemas.swos_beta import SwosBetaCredentials, SwosBetaProbeResponse
|
||||
from app.services.swos_beta_service import swos_beta_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post('/probe', response_model=SwosBetaProbeResponse)
|
||||
def probe_swos(payload: SwosBetaCredentials, current_user: User = Depends(get_current_user)):
|
||||
del current_user
|
||||
try:
|
||||
return swos_beta_service.probe(payload)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
|
||||
|
||||
@router.post('/download')
|
||||
def download_swos_backup(payload: SwosBetaCredentials, current_user: User = Depends(get_current_user)):
|
||||
del current_user
|
||||
try:
|
||||
backup = swos_beta_service.download_backup(payload)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
|
||||
return StreamingResponse(
|
||||
iter([backup.content]),
|
||||
media_type=backup.content_type,
|
||||
headers={'Content-Disposition': f'attachment; filename="{backup.filename}"'},
|
||||
)
|
||||
38
backend/app/core/config.py
Normal file
38
backend/app/core/config.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
app_name: str = 'Mikrotik Backup System'
|
||||
app_env: str = 'development'
|
||||
secret_key: str = 'change-me'
|
||||
jwt_algorithm: str = 'HS256'
|
||||
access_token_expire_minutes: int = 1440
|
||||
database_url: str = 'sqlite:///./storage/routeros_backup_next.db'
|
||||
data_dir: str = './storage'
|
||||
allow_registration: bool = True
|
||||
api_prefix: str = '/api'
|
||||
timezone: str = 'Europe/Warsaw'
|
||||
default_admin_username: str = 'admin'
|
||||
default_admin_password: str = 'admin'
|
||||
smtp_starttls: bool = True
|
||||
smtp_timeout_seconds: int = 20
|
||||
cors_origins: list[str] = Field(default_factory=lambda: ['http://localhost:4200', 'http://127.0.0.1:4200'])
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file='.env',
|
||||
env_file_encoding='utf-8',
|
||||
extra='ignore',
|
||||
env_nested_delimiter='__',
|
||||
)
|
||||
|
||||
@property
|
||||
def data_path(self) -> Path:
|
||||
path = Path(self.data_dir)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
settings = Settings()
|
||||
88
backend/app/core/cron_utils.py
Normal file
88
backend/app/core/cron_utils.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
WEEKDAY_LABELS = {
|
||||
'0': 'Sunday',
|
||||
'1': 'Monday',
|
||||
'2': 'Tuesday',
|
||||
'3': 'Wednesday',
|
||||
'4': 'Thursday',
|
||||
'5': 'Friday',
|
||||
'6': 'Saturday',
|
||||
'7': 'Sunday',
|
||||
'sun': 'Sunday',
|
||||
'mon': 'Monday',
|
||||
'tue': 'Tuesday',
|
||||
'wed': 'Wednesday',
|
||||
'thu': 'Thursday',
|
||||
'fri': 'Friday',
|
||||
'sat': 'Saturday',
|
||||
}
|
||||
|
||||
|
||||
class CronValidationError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def parse_cron_expression(expr: str, timezone_str: str) -> CronTrigger:
|
||||
expr = (expr or '').strip()
|
||||
if not expr:
|
||||
raise CronValidationError('Cron expression cannot be empty')
|
||||
|
||||
parts = expr.split()
|
||||
if len(parts) != 5:
|
||||
raise CronValidationError('Cron expression must contain exactly 5 fields')
|
||||
|
||||
minute, hour, day, month, day_of_week = parts
|
||||
try:
|
||||
return CronTrigger(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day=day,
|
||||
month=month,
|
||||
day_of_week=day_of_week,
|
||||
timezone=ZoneInfo(timezone_str),
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - APScheduler formats messages
|
||||
raise CronValidationError(str(exc)) from exc
|
||||
|
||||
|
||||
def validate_cron_expression(expr: str, timezone_str: str) -> None:
|
||||
parse_cron_expression(expr, timezone_str)
|
||||
|
||||
|
||||
def preview_next_runs(expr: str, timezone_str: str, count: int = 3) -> list[datetime]:
|
||||
trigger = parse_cron_expression(expr, timezone_str)
|
||||
now = datetime.now(ZoneInfo(timezone_str))
|
||||
previous = None
|
||||
runs: list[datetime] = []
|
||||
for _ in range(max(count, 0)):
|
||||
next_run = trigger.get_next_fire_time(previous, now)
|
||||
if not next_run:
|
||||
break
|
||||
runs.append(next_run)
|
||||
previous = next_run
|
||||
now = next_run
|
||||
return runs
|
||||
|
||||
|
||||
def describe_cron_expression(expr: str) -> str:
|
||||
expr = (expr or '').strip()
|
||||
if not expr:
|
||||
return 'Disabled'
|
||||
|
||||
parts = expr.split()
|
||||
if len(parts) != 5:
|
||||
return 'Custom cron'
|
||||
|
||||
minute, hour, day, month, day_of_week = parts
|
||||
if minute.isdigit() and hour.isdigit() and day == '*' and month == '*' and day_of_week == '*':
|
||||
return f'Every day at {int(hour):02d}:{int(minute):02d}'
|
||||
if minute.isdigit() and hour.isdigit() and day == '*' and month == '*' and day_of_week.lower() in WEEKDAY_LABELS:
|
||||
weekday = WEEKDAY_LABELS[day_of_week.lower()]
|
||||
return f'Every {weekday} at {int(hour):02d}:{int(minute):02d}'
|
||||
return 'Custom cron'
|
||||
22
backend/app/core/security.py
Normal file
22
backend/app/core/security.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from jose import jwt
|
||||
from passlib.context import CryptContext
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password: str) -> str:
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_access_token(subject: str, expires_delta: timedelta) -> str:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode = {"sub": subject, "exp": expire}
|
||||
return jwt.encode(to_encode, settings.secret_key, algorithm=settings.jwt_algorithm)
|
||||
7
backend/app/db/base.py
Normal file
7
backend/app/db/base.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from app.models.backup import Backup
|
||||
from app.models.log import OperationLog
|
||||
from app.models.router import Router
|
||||
from app.models.settings import GlobalSettings
|
||||
from app.models.user import User
|
||||
|
||||
__all__ = ["User", "Router", "Backup", "OperationLog", "GlobalSettings"]
|
||||
84
backend/app/db/session.py
Normal file
84
backend/app/db/session.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.security import get_password_hash
|
||||
|
||||
|
||||
def _ensure_sqlite_parent(database_url: str) -> None:
|
||||
if not database_url.startswith('sqlite:///'):
|
||||
return
|
||||
relative_path = database_url.removeprefix('sqlite:///')
|
||||
if not relative_path or relative_path == ':memory:':
|
||||
return
|
||||
db_path = Path(relative_path)
|
||||
if not db_path.is_absolute():
|
||||
db_path = Path.cwd() / db_path
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
_ensure_sqlite_parent(settings.database_url)
|
||||
|
||||
engine = create_engine(
|
||||
settings.database_url,
|
||||
connect_args={'check_same_thread': False} if settings.database_url.startswith('sqlite') else {},
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
def _ensure_column(table_name: str, column_name: str, ddl: str) -> None:
|
||||
inspector = inspect(engine)
|
||||
existing = {column['name'] for column in inspector.get_columns(table_name)}
|
||||
if column_name in existing:
|
||||
return
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f'ALTER TABLE {table_name} ADD COLUMN {column_name} {ddl}'))
|
||||
|
||||
|
||||
def _run_lightweight_migrations() -> None:
|
||||
tables = set(inspect(engine).get_table_names())
|
||||
if 'global_settings' in tables:
|
||||
_ensure_column('global_settings', 'connection_test_interval_minutes', 'INTEGER DEFAULT 0')
|
||||
_ensure_column('global_settings', 'default_switchos_username', 'VARCHAR(120)')
|
||||
_ensure_column('global_settings', 'default_switchos_password', 'VARCHAR(255)')
|
||||
if 'users' in tables:
|
||||
_ensure_column('users', 'preferred_language', "VARCHAR(8) DEFAULT 'pl' NOT NULL")
|
||||
_ensure_column('users', 'preferred_font', "VARCHAR(32) DEFAULT 'default' NOT NULL")
|
||||
if 'routers' in tables:
|
||||
_ensure_column('routers', 'device_type', "VARCHAR(32) DEFAULT 'routeros' NOT NULL")
|
||||
_ensure_column('routers', 'last_connection_status', 'BOOLEAN')
|
||||
_ensure_column('routers', 'last_connection_tested_at', 'DATETIME')
|
||||
_ensure_column('routers', 'last_connection_error', 'TEXT')
|
||||
_ensure_column('routers', 'last_connection_hostname', 'VARCHAR(255)')
|
||||
_ensure_column('routers', 'last_connection_model', 'VARCHAR(255)')
|
||||
_ensure_column('routers', 'last_connection_version', 'VARCHAR(255)')
|
||||
_ensure_column('routers', 'last_connection_uptime', 'VARCHAR(255)')
|
||||
_ensure_column('routers', 'last_connection_transport', 'VARCHAR(32)')
|
||||
_ensure_column('routers', 'last_connection_server', 'VARCHAR(255)')
|
||||
_ensure_column('routers', 'last_connection_auth_mode', 'VARCHAR(64)')
|
||||
_ensure_column('routers', 'last_connection_http_status', 'VARCHAR(32)')
|
||||
_ensure_column('routers', 'last_connection_backup_available', 'BOOLEAN')
|
||||
|
||||
|
||||
def init_db():
|
||||
import app.db.base # noqa: F401
|
||||
from app.models.settings import GlobalSettings
|
||||
from app.models.user import User
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
_run_lightweight_migrations()
|
||||
with SessionLocal() as db:
|
||||
if not db.query(User).first():
|
||||
db.add(
|
||||
User(
|
||||
username=settings.default_admin_username,
|
||||
password_hash=get_password_hash(settings.default_admin_password),
|
||||
)
|
||||
)
|
||||
db.commit()
|
||||
if not db.query(GlobalSettings).first():
|
||||
db.add(GlobalSettings())
|
||||
db.commit()
|
||||
30
backend/app/main.py
Normal file
30
backend/app/main.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.api import api_router
|
||||
from app.core.config import settings
|
||||
from app.db.session import init_db
|
||||
from app.services.scheduler import scheduler_service
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
init_db()
|
||||
scheduler_service.start()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
scheduler_service.shutdown()
|
||||
|
||||
|
||||
app = FastAPI(title=settings.app_name, version="1.1.0", lifespan=lifespan)
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.cors_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
app.include_router(api_router, prefix=settings.api_prefix)
|
||||
19
backend/app/models/backup.py
Normal file
19
backend/app/models/backup.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class Backup(Base):
|
||||
__tablename__ = "backups"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
router_id = Column(Integer, ForeignKey("routers.id"), nullable=False, index=True)
|
||||
file_path = Column(String(500), nullable=False)
|
||||
file_name = Column(String(255), nullable=False)
|
||||
backup_type = Column(String(50), nullable=False, default="export")
|
||||
checksum = Column(String(64), nullable=True)
|
||||
created_at = Column(DateTime, server_default=func.now(), nullable=False)
|
||||
|
||||
router = relationship("Router", back_populates="backups")
|
||||
12
backend/app/models/log.py
Normal file
12
backend/app/models/log.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from sqlalchemy import Column, DateTime, Integer, Text
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class OperationLog(Base):
|
||||
__tablename__ = "operation_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
message = Column(Text, nullable=False)
|
||||
timestamp = Column(DateTime, server_default=func.now(), nullable=False)
|
||||
34
backend/app/models/router.py
Normal file
34
backend/app/models/router.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class Router(Base):
|
||||
__tablename__ = "routers"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
owner_id = Column(Integer, ForeignKey("users.id"), nullable=False, index=True)
|
||||
name = Column(String(120), nullable=False)
|
||||
device_type = Column(String(32), nullable=False, default="routeros")
|
||||
host = Column(String(255), nullable=False)
|
||||
port = Column(Integer, nullable=False, default=22)
|
||||
ssh_user = Column(String(120), nullable=False, default="admin")
|
||||
ssh_key = Column(Text, nullable=True)
|
||||
ssh_password = Column(String(255), nullable=True)
|
||||
last_connection_status = Column(Boolean, nullable=True)
|
||||
last_connection_tested_at = Column(DateTime, nullable=True)
|
||||
last_connection_error = Column(Text, nullable=True)
|
||||
last_connection_hostname = Column(String(255), nullable=True)
|
||||
last_connection_model = Column(String(255), nullable=True)
|
||||
last_connection_version = Column(String(255), nullable=True)
|
||||
last_connection_uptime = Column(String(255), nullable=True)
|
||||
last_connection_transport = Column(String(32), nullable=True)
|
||||
last_connection_server = Column(String(255), nullable=True)
|
||||
last_connection_auth_mode = Column(String(64), nullable=True)
|
||||
last_connection_http_status = Column(String(32), nullable=True)
|
||||
last_connection_backup_available = Column(Boolean, nullable=True)
|
||||
created_at = Column(DateTime, server_default=func.now(), nullable=False)
|
||||
|
||||
backups = relationship("Backup", back_populates="router", cascade="all, delete-orphan")
|
||||
28
backend/app/models/settings.py
Normal file
28
backend/app/models/settings.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from sqlalchemy import Boolean, Column, Integer, String, Text
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class GlobalSettings(Base):
|
||||
__tablename__ = "global_settings"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
backup_retention_days = Column(Integer, default=7)
|
||||
log_retention_days = Column(Integer, default=7)
|
||||
export_cron = Column(String(64), default="")
|
||||
binary_cron = Column(String(64), default="")
|
||||
retention_cron = Column(String(64), default="")
|
||||
enable_auto_export = Column(Boolean, default=False)
|
||||
connection_test_interval_minutes = Column(Integer, default=0)
|
||||
global_ssh_key = Column(Text, nullable=True)
|
||||
default_switchos_username = Column(String(120), nullable=True)
|
||||
default_switchos_password = Column(String(255), nullable=True)
|
||||
pushover_token = Column(String(255), nullable=True)
|
||||
pushover_userkey = Column(String(255), nullable=True)
|
||||
notify_failures_only = Column(Boolean, default=True)
|
||||
smtp_host = Column(String(255), nullable=True)
|
||||
smtp_port = Column(Integer, default=587)
|
||||
smtp_login = Column(String(255), nullable=True)
|
||||
smtp_password = Column(String(255), nullable=True)
|
||||
smtp_notifications_enabled = Column(Boolean, default=False)
|
||||
recipient_email = Column(String(255), nullable=True)
|
||||
15
backend/app/models/user.py
Normal file
15
backend/app/models/user.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from sqlalchemy import Column, DateTime, Integer, String
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
username = Column(String(120), unique=True, nullable=False, index=True)
|
||||
password_hash = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, server_default=func.now(), nullable=False)
|
||||
preferred_language = Column(String(8), nullable=False, default='pl')
|
||||
preferred_font = Column(String(32), nullable=False, default='default')
|
||||
31
backend/app/schemas/auth.py
Normal file
31
backend/app/schemas/auth.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: int
|
||||
username: str
|
||||
preferred_language: str = 'pl'
|
||||
preferred_font: str = 'default'
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class TokenResponse(BaseModel):
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
user: UserResponse
|
||||
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
username: str = Field(min_length=3, max_length=120)
|
||||
password: str = Field(min_length=4, max_length=128)
|
||||
|
||||
|
||||
class ChangePasswordRequest(BaseModel):
|
||||
current_password: str
|
||||
new_password: str = Field(min_length=4, max_length=128)
|
||||
|
||||
|
||||
class UpdateUserPreferencesRequest(BaseModel):
|
||||
preferred_language: str = Field(default='pl', min_length=2, max_length=8)
|
||||
preferred_font: str = Field(default='default', min_length=2, max_length=32)
|
||||
50
backend/app/schemas/backup.py
Normal file
50
backend/app/schemas/backup.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class BackupResponse(BaseModel):
|
||||
id: int
|
||||
router_id: int
|
||||
router_name: str | None = None
|
||||
device_type: str = "routeros"
|
||||
file_path: str
|
||||
file_name: str
|
||||
backup_type: str
|
||||
checksum: str | None = None
|
||||
file_size: int | None = None
|
||||
created_at: datetime
|
||||
|
||||
model_config = {'from_attributes': True}
|
||||
|
||||
|
||||
class BackupDiffLine(BaseModel):
|
||||
type: Literal['context', 'added', 'removed', 'modified']
|
||||
left_number: int | None = None
|
||||
right_number: int | None = None
|
||||
left_text: str = ''
|
||||
right_text: str = ''
|
||||
|
||||
|
||||
class BackupDiffStats(BaseModel):
|
||||
added: int = 0
|
||||
removed: int = 0
|
||||
modified: int = 0
|
||||
context: int = 0
|
||||
|
||||
|
||||
class BackupDiffResponse(BaseModel):
|
||||
left_backup_id: int
|
||||
right_backup_id: int
|
||||
left_file_name: str | None = None
|
||||
right_file_name: str | None = None
|
||||
diff_text: str
|
||||
diff_html: str | None = None
|
||||
stats: BackupDiffStats | None = None
|
||||
lines: list[BackupDiffLine] = []
|
||||
|
||||
|
||||
class BulkActionRequest(BaseModel):
|
||||
action: Literal['download', 'delete']
|
||||
backup_ids: list[int]
|
||||
28
backend/app/schemas/dashboard.py
Normal file
28
backend/app/schemas/dashboard.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class StorageStats(BaseModel):
|
||||
total: int
|
||||
used: int
|
||||
free: int
|
||||
folder_used: int
|
||||
usage_percent: float
|
||||
|
||||
|
||||
class OperationLogResponse(BaseModel):
|
||||
id: int
|
||||
message: str
|
||||
timestamp: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class DashboardResponse(BaseModel):
|
||||
routers_count: int
|
||||
export_count: int
|
||||
binary_count: int
|
||||
total_backups: int
|
||||
storage: StorageStats
|
||||
recent_logs: list[OperationLogResponse]
|
||||
104
backend/app/schemas/router.py
Normal file
104
backend/app/schemas/router.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||
|
||||
ALLOWED_NAME_REGEX = re.compile(r"^[A-Za-z0-9_-]+$")
|
||||
DeviceType = Literal["routeros", "switchos"]
|
||||
|
||||
|
||||
class RouterBase(BaseModel):
|
||||
name: str = Field(min_length=1, max_length=120)
|
||||
device_type: DeviceType = "routeros"
|
||||
host: str = Field(min_length=1, max_length=255)
|
||||
port: int | None = Field(default=None, ge=1, le=65535)
|
||||
ssh_user: str | None = Field(default=None, max_length=120)
|
||||
ssh_key: str | None = None
|
||||
ssh_password: str | None = None
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, value: str) -> str:
|
||||
if not ALLOWED_NAME_REGEX.match(value):
|
||||
raise ValueError("Only letters, digits, dashes and underscores are allowed")
|
||||
return value
|
||||
|
||||
@field_validator("host", "ssh_user", "ssh_key", "ssh_password", mode="before")
|
||||
@classmethod
|
||||
def normalize_text(cls, value: str | None) -> str | None:
|
||||
normalized = (value or "").strip()
|
||||
return normalized or None
|
||||
|
||||
@model_validator(mode="after")
|
||||
def apply_device_defaults(self):
|
||||
if self.device_type == "routeros":
|
||||
self.port = self.port or 22
|
||||
self.ssh_user = self.ssh_user or "admin"
|
||||
return self
|
||||
|
||||
self.port = self.port or 80
|
||||
self.ssh_key = None
|
||||
return self
|
||||
|
||||
|
||||
class RouterCreate(RouterBase):
|
||||
pass
|
||||
|
||||
|
||||
class RouterUpdate(BaseModel):
|
||||
name: str | None = None
|
||||
device_type: DeviceType | None = None
|
||||
host: str | None = None
|
||||
port: int | None = Field(default=None, ge=1, le=65535)
|
||||
ssh_user: str | None = None
|
||||
ssh_key: str | None = None
|
||||
ssh_password: str | None = None
|
||||
|
||||
@field_validator("name", "host", "ssh_user", "ssh_key", "ssh_password", mode="before")
|
||||
@classmethod
|
||||
def normalize_text(cls, value: str | None) -> str | None:
|
||||
normalized = (value or "").strip()
|
||||
return normalized or None
|
||||
|
||||
|
||||
class RouterResponse(RouterBase):
|
||||
id: int
|
||||
owner_id: int
|
||||
effective_username: str | None = None
|
||||
uses_global_ssh_key: bool = False
|
||||
has_effective_ssh_key: bool = False
|
||||
uses_global_switchos_credentials: bool = False
|
||||
has_effective_password: bool = False
|
||||
supports_export: bool = False
|
||||
supports_restore_upload: bool = False
|
||||
last_connection_status: bool | None = None
|
||||
last_connection_tested_at: datetime | None = None
|
||||
last_connection_error: str | None = None
|
||||
last_connection_hostname: str | None = None
|
||||
last_connection_model: str | None = None
|
||||
last_connection_version: str | None = None
|
||||
last_connection_uptime: str | None = None
|
||||
last_connection_transport: str | None = None
|
||||
last_connection_server: str | None = None
|
||||
last_connection_auth_mode: str | None = None
|
||||
last_connection_http_status: str | None = None
|
||||
last_connection_backup_available: bool | None = None
|
||||
created_at: datetime | None = None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class RouterTestConnection(BaseModel):
|
||||
success: bool
|
||||
tested_at: datetime
|
||||
model: str
|
||||
uptime: str
|
||||
hostname: str
|
||||
version: str | None = None
|
||||
error: str | None = None
|
||||
transport: str | None = None
|
||||
server: str | None = None
|
||||
auth_mode: str | None = None
|
||||
http_status: str | None = None
|
||||
backup_available: bool | None = None
|
||||
88
backend/app/schemas/settings.py
Normal file
88
backend/app/schemas/settings.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, EmailStr, Field, field_validator
|
||||
|
||||
from app.core.config import settings as app_settings
|
||||
from app.core.cron_utils import CronValidationError, validate_cron_expression
|
||||
|
||||
|
||||
class SettingsBase(BaseModel):
|
||||
backup_retention_days: int = 7
|
||||
log_retention_days: int = 7
|
||||
export_cron: str = ''
|
||||
binary_cron: str = ''
|
||||
retention_cron: str = ''
|
||||
enable_auto_export: bool = False
|
||||
connection_test_interval_minutes: int = Field(default=0, ge=0, le=1440)
|
||||
global_ssh_key: str | None = None
|
||||
default_switchos_username: str | None = None
|
||||
default_switchos_password: str | None = None
|
||||
pushover_token: str | None = None
|
||||
pushover_userkey: str | None = None
|
||||
notify_failures_only: bool = True
|
||||
smtp_host: str | None = None
|
||||
smtp_port: int = 587
|
||||
smtp_login: str | None = None
|
||||
smtp_password: str | None = None
|
||||
smtp_notifications_enabled: bool = False
|
||||
recipient_email: EmailStr | None = None
|
||||
|
||||
@field_validator('export_cron', 'binary_cron', 'retention_cron', mode='before')
|
||||
@classmethod
|
||||
def normalize_cron(cls, value: str | None) -> str:
|
||||
return (value or '').strip()
|
||||
|
||||
@field_validator('global_ssh_key', 'default_switchos_username', 'default_switchos_password', mode='before')
|
||||
@classmethod
|
||||
def normalize_secret_text(cls, value: str | None) -> str | None:
|
||||
normalized = (value or '').strip()
|
||||
return normalized or None
|
||||
|
||||
@field_validator('export_cron', 'binary_cron', 'retention_cron')
|
||||
@classmethod
|
||||
def validate_cron(cls, value: str) -> str:
|
||||
if not value:
|
||||
return value
|
||||
try:
|
||||
validate_cron_expression(value, app_settings.timezone)
|
||||
except CronValidationError as exc:
|
||||
raise ValueError(f'Invalid cron expression: {exc}') from exc
|
||||
return value
|
||||
|
||||
|
||||
class SettingsUpdate(SettingsBase):
|
||||
clear_global_ssh_key: bool = False
|
||||
|
||||
|
||||
class SettingsResponse(SettingsBase):
|
||||
id: int
|
||||
has_global_ssh_key: bool = False
|
||||
has_default_switchos_credentials: bool = False
|
||||
|
||||
model_config = {'from_attributes': True}
|
||||
|
||||
|
||||
class RevealSshKeyRequest(BaseModel):
|
||||
password: str
|
||||
|
||||
|
||||
class RevealSshKeyResponse(BaseModel):
|
||||
global_ssh_key: str | None = None
|
||||
|
||||
|
||||
class SchedulerJobStatus(BaseModel):
|
||||
key: str
|
||||
label: str
|
||||
enabled: bool
|
||||
cron: str | None = None
|
||||
description: str
|
||||
description_params: dict[str, str | int] | None = None
|
||||
valid: bool
|
||||
next_runs: list[datetime] = []
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class SchedulerStatusResponse(BaseModel):
|
||||
timezone: str
|
||||
running: bool
|
||||
jobs: list[SchedulerJobStatus]
|
||||
33
backend/app/schemas/swos_beta.py
Normal file
33
backend/app/schemas/swos_beta.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
class SwosBetaCredentials(BaseModel):
|
||||
host: str = Field(min_length=1, max_length=255)
|
||||
port: int = Field(default=80, ge=1, le=65535)
|
||||
username: str = Field(default='admin', min_length=1, max_length=120)
|
||||
password: str = Field(default='', max_length=255)
|
||||
label: str | None = Field(default=None, max_length=120)
|
||||
|
||||
@field_validator('host', 'username', 'password', mode='before')
|
||||
@classmethod
|
||||
def normalize_text(cls, value: str | None) -> str:
|
||||
return (value or '').strip()
|
||||
|
||||
@field_validator('label', mode='before')
|
||||
@classmethod
|
||||
def normalize_label(cls, value: str | None) -> str | None:
|
||||
normalized = (value or '').strip()
|
||||
return normalized or None
|
||||
|
||||
|
||||
class SwosBetaProbeResponse(BaseModel):
|
||||
success: bool
|
||||
base_url: str
|
||||
status_code: int
|
||||
auth_mode: str
|
||||
page_title: str | None = None
|
||||
content_type: str | None = None
|
||||
server: str | None = None
|
||||
save_backup_visible: bool = False
|
||||
backup_endpoint_ok: bool = False
|
||||
note: str | None = None
|
||||
350
backend/app/services/backup_service.py
Normal file
350
backend/app/services/backup_service.py
Normal file
@@ -0,0 +1,350 @@
|
||||
import difflib
|
||||
from datetime import date, datetime, time, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session, joinedload
|
||||
|
||||
from app.models.backup import Backup
|
||||
from app.models.router import Router
|
||||
from app.models.user import User
|
||||
from app.services.file_service import compute_checksum, ensure_data_dir
|
||||
from app.services.log_service import log_service
|
||||
from app.services.notification_service import notification_service
|
||||
from app.services.router_service import router_service
|
||||
from app.services.settings_service import settings_service
|
||||
|
||||
|
||||
class BackupService:
|
||||
def _device_label(self, router: Router) -> str:
|
||||
platform = 'SwitchOS' if router.device_type == 'switchos' else 'RouterOS'
|
||||
return f'{platform} device {router.name}'
|
||||
|
||||
def _router_for_user(self, db: Session, user: User, router_id: int) -> Router:
|
||||
router = db.query(Router).filter(Router.id == router_id, Router.owner_id == user.id).first()
|
||||
if not router:
|
||||
raise HTTPException(status_code=404, detail='Device not found')
|
||||
return router
|
||||
|
||||
def _serialize_backup(self, backup: Backup):
|
||||
file_path = Path(backup.file_path)
|
||||
return {
|
||||
'id': backup.id,
|
||||
'router_id': backup.router_id,
|
||||
'router_name': backup.router.name if backup.router else None,
|
||||
'device_type': backup.router.device_type if backup.router else 'routeros',
|
||||
'file_path': backup.file_path,
|
||||
'file_name': backup.file_name,
|
||||
'backup_type': backup.backup_type,
|
||||
'checksum': backup.checksum,
|
||||
'file_size': file_path.stat().st_size if file_path.exists() else None,
|
||||
'created_at': backup.created_at,
|
||||
}
|
||||
|
||||
def _build_structured_diff(self, left_lines: list[str], right_lines: list[str]):
|
||||
matcher = difflib.SequenceMatcher(a=left_lines, b=right_lines)
|
||||
rows = []
|
||||
stats = {'added': 0, 'removed': 0, 'modified': 0, 'context': 0}
|
||||
left_number = 1
|
||||
right_number = 1
|
||||
|
||||
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
|
||||
if tag == 'equal':
|
||||
for left_text, right_text in zip(left_lines[i1:i2], right_lines[j1:j2]):
|
||||
rows.append(
|
||||
{
|
||||
'type': 'context',
|
||||
'left_number': left_number,
|
||||
'right_number': right_number,
|
||||
'left_text': left_text,
|
||||
'right_text': right_text,
|
||||
}
|
||||
)
|
||||
stats['context'] += 1
|
||||
left_number += 1
|
||||
right_number += 1
|
||||
continue
|
||||
|
||||
if tag == 'delete':
|
||||
for left_text in left_lines[i1:i2]:
|
||||
rows.append(
|
||||
{
|
||||
'type': 'removed',
|
||||
'left_number': left_number,
|
||||
'right_number': None,
|
||||
'left_text': left_text,
|
||||
'right_text': '',
|
||||
}
|
||||
)
|
||||
stats['removed'] += 1
|
||||
left_number += 1
|
||||
continue
|
||||
|
||||
if tag == 'insert':
|
||||
for right_text in right_lines[j1:j2]:
|
||||
rows.append(
|
||||
{
|
||||
'type': 'added',
|
||||
'left_number': None,
|
||||
'right_number': right_number,
|
||||
'left_text': '',
|
||||
'right_text': right_text,
|
||||
}
|
||||
)
|
||||
stats['added'] += 1
|
||||
right_number += 1
|
||||
continue
|
||||
|
||||
block_left = left_lines[i1:i2]
|
||||
block_right = right_lines[j1:j2]
|
||||
block_size = max(len(block_left), len(block_right))
|
||||
for index in range(block_size):
|
||||
left_text = block_left[index] if index < len(block_left) else ''
|
||||
right_text = block_right[index] if index < len(block_right) else ''
|
||||
row_type = 'modified'
|
||||
if left_text and not right_text:
|
||||
row_type = 'removed'
|
||||
stats['removed'] += 1
|
||||
elif right_text and not left_text:
|
||||
row_type = 'added'
|
||||
stats['added'] += 1
|
||||
else:
|
||||
stats['modified'] += 1
|
||||
|
||||
rows.append(
|
||||
{
|
||||
'type': row_type,
|
||||
'left_number': left_number if left_text else None,
|
||||
'right_number': right_number if right_text else None,
|
||||
'left_text': left_text,
|
||||
'right_text': right_text,
|
||||
}
|
||||
)
|
||||
if left_text:
|
||||
left_number += 1
|
||||
if right_text:
|
||||
right_number += 1
|
||||
|
||||
return rows, stats
|
||||
|
||||
def get_backup_for_user(self, db: Session, user: User, backup_id: int) -> Backup:
|
||||
backup = (
|
||||
db.query(Backup)
|
||||
.options(joinedload(Backup.router))
|
||||
.join(Router)
|
||||
.filter(Backup.id == backup_id, Router.owner_id == user.id)
|
||||
.first()
|
||||
)
|
||||
if not backup:
|
||||
raise HTTPException(status_code=404, detail='Backup not found')
|
||||
return backup
|
||||
|
||||
def list_backups(
|
||||
self,
|
||||
db: Session,
|
||||
user: User,
|
||||
search: str | None = None,
|
||||
backup_type: str | None = None,
|
||||
router_id: int | None = None,
|
||||
created_on: date | None = None,
|
||||
sort_by: str = 'created_at',
|
||||
order: str = 'desc',
|
||||
):
|
||||
query = db.query(Backup).options(joinedload(Backup.router)).join(Router).filter(Router.owner_id == user.id)
|
||||
if search:
|
||||
query = query.filter(
|
||||
Backup.file_name.ilike(f'%{search}%')
|
||||
| Router.name.ilike(f'%{search}%')
|
||||
| Router.host.ilike(f'%{search}%')
|
||||
)
|
||||
if backup_type:
|
||||
query = query.filter(Backup.backup_type == backup_type)
|
||||
if router_id:
|
||||
query = query.filter(Backup.router_id == router_id)
|
||||
if created_on:
|
||||
day_start = datetime.combine(created_on, time.min)
|
||||
next_day = day_start + timedelta(days=1)
|
||||
query = query.filter(Backup.created_at >= day_start, Backup.created_at < next_day)
|
||||
|
||||
sort_map = {
|
||||
'created_at': Backup.created_at,
|
||||
'file_name': Backup.file_name,
|
||||
'backup_type': Backup.backup_type,
|
||||
'router_name': Router.name,
|
||||
}
|
||||
sort_column = sort_map.get(sort_by, Backup.created_at)
|
||||
query = query.order_by(sort_column.asc() if order == 'asc' else sort_column.desc())
|
||||
return [self._serialize_backup(backup) for backup in query.all()]
|
||||
|
||||
def list_router_backups(self, db: Session, user: User, router_id: int):
|
||||
router = self._router_for_user(db, user, router_id)
|
||||
backups = (
|
||||
db.query(Backup)
|
||||
.options(joinedload(Backup.router))
|
||||
.filter(Backup.router_id == router.id)
|
||||
.order_by(Backup.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
return [self._serialize_backup(backup) for backup in backups]
|
||||
|
||||
def export_router(self, db: Session, user: User, router_id: int) -> Backup:
|
||||
router = self._router_for_user(db, user, router_id)
|
||||
if router.device_type != 'routeros':
|
||||
raise HTTPException(status_code=400, detail='Text export is available only for RouterOS devices')
|
||||
settings = settings_service.get_or_create(db)
|
||||
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
name = f'{router.name}_{router.id}_{stamp}.rsc'
|
||||
file_path = ensure_data_dir() / name
|
||||
try:
|
||||
content = router_service.export(router, settings.global_ssh_key)
|
||||
file_path.write_text(content, encoding='utf-8')
|
||||
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='export')
|
||||
db.add(backup)
|
||||
db.commit()
|
||||
db.refresh(backup)
|
||||
log_service.add(db, f'Export OK for device {router.name}')
|
||||
notification_service.notify(settings, f'Export {router.name} OK', True)
|
||||
return backup
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
notification_service.notify(settings, f'Export {router.name} FAIL: {exc}', False)
|
||||
log_service.add(db, f'Export FAILED for device {router.name}: {exc}')
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
|
||||
def binary_backup(self, db: Session, user: User, router_id: int) -> Backup:
|
||||
router = self._router_for_user(db, user, router_id)
|
||||
settings = settings_service.get_or_create(db)
|
||||
stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
base_name = f'{router.name}_{router.id}_{stamp}'
|
||||
extension = '.swb' if router.device_type == 'switchos' else '.backup'
|
||||
name = f'{base_name}{extension}'
|
||||
file_path = ensure_data_dir() / name
|
||||
try:
|
||||
router_service.binary_backup(router, base_name, str(file_path), settings.global_ssh_key, settings)
|
||||
checksum = compute_checksum(str(file_path))
|
||||
backup = Backup(router_id=router.id, file_path=str(file_path), file_name=name, backup_type='binary', checksum=checksum)
|
||||
db.add(backup)
|
||||
db.commit()
|
||||
db.refresh(backup)
|
||||
log_service.add(db, f'Binary backup OK for {self._device_label(router)}')
|
||||
notification_service.notify(settings, f'Backup {router.name} OK', True)
|
||||
return backup
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
notification_service.notify(settings, f'Backup {router.name} FAIL: {exc}', False)
|
||||
log_service.add(db, f'Binary backup FAILED for {self._device_label(router)}: {exc}')
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
|
||||
def upload_backup_to_router(self, db: Session, user: User, router_id: int, backup_id: int):
|
||||
router = self._router_for_user(db, user, router_id)
|
||||
if router.device_type != 'routeros':
|
||||
raise HTTPException(status_code=400, detail='Restore upload is available only for RouterOS devices')
|
||||
backup = self.get_backup_for_user(db, user, backup_id)
|
||||
if backup.backup_type != 'binary':
|
||||
raise HTTPException(status_code=400, detail='Only binary backups can be uploaded')
|
||||
if backup.router and backup.router.device_type != 'routeros':
|
||||
raise HTTPException(status_code=400, detail='SwitchOS backup files cannot be restored over SSH upload')
|
||||
checksum = compute_checksum(backup.file_path)
|
||||
if backup.checksum and checksum != backup.checksum:
|
||||
raise HTTPException(status_code=400, detail='Checksum mismatch')
|
||||
settings = settings_service.get_or_create(db)
|
||||
router_service.upload_backup(router, backup.file_path, settings.global_ssh_key)
|
||||
log_service.add(db, f'Upload backup OK for device {router.name}')
|
||||
|
||||
def delete_backup(self, db: Session, user: User, backup_id: int, commit: bool = True):
|
||||
backup = self.get_backup_for_user(db, user, backup_id)
|
||||
path = Path(backup.file_path)
|
||||
if path.exists():
|
||||
path.unlink()
|
||||
db.delete(backup)
|
||||
if commit:
|
||||
db.commit()
|
||||
|
||||
def diff_backups(self, db: Session, user: User, left_id: int, right_id: int):
|
||||
left = self.get_backup_for_user(db, user, left_id)
|
||||
right = self.get_backup_for_user(db, user, right_id)
|
||||
if left.backup_type != 'export' or right.backup_type != 'export':
|
||||
raise HTTPException(status_code=400, detail='Diff is supported only for export backups')
|
||||
left_lines = Path(left.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
|
||||
right_lines = Path(right.file_path).read_text(encoding='utf-8', errors='ignore').splitlines()
|
||||
diff_lines = list(
|
||||
difflib.unified_diff(left_lines, right_lines, fromfile=left.file_name, tofile=right.file_name, lineterm='')
|
||||
)
|
||||
diff_html = difflib.HtmlDiff(wrapcolumn=120).make_file(
|
||||
left_lines,
|
||||
right_lines,
|
||||
fromdesc=left.file_name,
|
||||
todesc=right.file_name,
|
||||
context=True,
|
||||
numlines=2,
|
||||
)
|
||||
structured_lines, stats = self._build_structured_diff(left_lines, right_lines)
|
||||
return {
|
||||
'left_backup_id': left_id,
|
||||
'right_backup_id': right_id,
|
||||
'left_file_name': left.file_name,
|
||||
'right_file_name': right.file_name,
|
||||
'diff_text': '\n'.join(diff_lines),
|
||||
'diff_html': diff_html,
|
||||
'stats': stats,
|
||||
'lines': structured_lines,
|
||||
}
|
||||
|
||||
def email_backup(self, db: Session, user: User, backup_id: int):
|
||||
backup = self.get_backup_for_user(db, user, backup_id)
|
||||
settings = settings_service.get_or_create(db)
|
||||
platform_name = 'SwitchOS' if backup.router and backup.router.device_type == 'switchos' else 'RouterOS'
|
||||
noun = 'Export' if backup.backup_type == 'export' else 'Backup'
|
||||
subject = f'{platform_name} {noun}: {backup.file_name}'
|
||||
body = f'Sending {backup.file_name} from device {backup.router.name}.'
|
||||
notification_service.send_email(settings, subject, body, backup.file_path)
|
||||
log_service.add(db, f'Email sent for backup {backup.file_name}')
|
||||
|
||||
def export_all(self, db: Session, user: User):
|
||||
routers = db.query(Router).filter(Router.owner_id == user.id).all()
|
||||
result = []
|
||||
for router in routers:
|
||||
if (router.device_type or 'routeros').lower() != 'routeros':
|
||||
result.append({
|
||||
'router': router.name,
|
||||
'status': 'skipped',
|
||||
'message': 'Text export is available only for RouterOS devices',
|
||||
})
|
||||
continue
|
||||
try:
|
||||
backup = self.export_router(db, user, router.id)
|
||||
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
|
||||
except Exception as exc:
|
||||
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
|
||||
return result
|
||||
|
||||
def binary_all(self, db: Session, user: User):
|
||||
routers = db.query(Router).filter(Router.owner_id == user.id).all()
|
||||
result = []
|
||||
for router in routers:
|
||||
try:
|
||||
backup = self.binary_backup(db, user, router.id)
|
||||
result.append({'router': router.name, 'status': 'ok', 'backup_id': backup.id})
|
||||
except Exception as exc:
|
||||
result.append({'router': router.name, 'status': 'error', 'message': str(exc)})
|
||||
return result
|
||||
|
||||
def cleanup_old_backups(self, db: Session):
|
||||
settings = settings_service.get_or_create(db)
|
||||
cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=settings.backup_retention_days)
|
||||
old_backups = db.query(Backup).filter(Backup.created_at < cutoff).all()
|
||||
deleted_count = 0
|
||||
for backup in old_backups:
|
||||
path = Path(backup.file_path)
|
||||
if path.exists():
|
||||
path.unlink()
|
||||
db.delete(backup)
|
||||
deleted_count += 1
|
||||
db.commit()
|
||||
log_service.add(db, f'Retention cleanup removed {deleted_count} backups older than {settings.backup_retention_days} days')
|
||||
return deleted_count
|
||||
|
||||
|
||||
backup_service = BackupService()
|
||||
38
backend/app/services/file_service.py
Normal file
38
backend/app/services/file_service.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import hashlib
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.config import settings
|
||||
from app.schemas.dashboard import StorageStats
|
||||
|
||||
|
||||
def compute_checksum(file_path: str) -> str:
|
||||
sha256 = hashlib.sha256()
|
||||
with open(file_path, "rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(4096), b""):
|
||||
sha256.update(chunk)
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
def ensure_data_dir() -> Path:
|
||||
return settings.data_path
|
||||
|
||||
|
||||
def get_folder_size() -> int:
|
||||
total = 0
|
||||
for dirpath, _, filenames in os.walk(ensure_data_dir()):
|
||||
for filename in filenames:
|
||||
try:
|
||||
total += os.path.getsize(Path(dirpath) / filename)
|
||||
except OSError:
|
||||
pass
|
||||
return total
|
||||
|
||||
|
||||
def get_storage_stats() -> StorageStats:
|
||||
ensure_data_dir()
|
||||
disk = shutil.disk_usage(ensure_data_dir())
|
||||
folder_used = get_folder_size()
|
||||
usage_percent = (folder_used / disk.total) * 100 if disk.total else 0
|
||||
return StorageStats(total=disk.total, used=disk.used, free=disk.free, folder_used=folder_used, usage_percent=usage_percent)
|
||||
24
backend/app/services/log_service.py
Normal file
24
backend/app/services/log_service.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.models.log import OperationLog
|
||||
|
||||
|
||||
class LogService:
|
||||
def add(self, db: Session, message: str, commit: bool = True) -> None:
|
||||
db.add(OperationLog(message=message))
|
||||
if commit:
|
||||
db.commit()
|
||||
|
||||
def delete_older_than(self, db: Session, days: int) -> int:
|
||||
cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=days)
|
||||
logs = db.query(OperationLog).filter(OperationLog.timestamp < cutoff).all()
|
||||
count = len(logs)
|
||||
for log in logs:
|
||||
db.delete(log)
|
||||
db.commit()
|
||||
return count
|
||||
|
||||
|
||||
log_service = LogService()
|
||||
78
backend/app/services/notification_service.py
Normal file
78
backend/app/services/notification_service.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import smtplib
|
||||
from email import encoders
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
from app.core.config import settings as app_settings
|
||||
from app.models.settings import GlobalSettings
|
||||
|
||||
|
||||
class NotificationService:
|
||||
def send_pushover(self, token: str, user_key: str, message: str, title: str = "RouterOS Backup") -> bool:
|
||||
response = requests.post(
|
||||
"https://api.pushover.net/1/messages.json",
|
||||
data={"token": token, "user": user_key, "message": message, "title": title},
|
||||
timeout=15,
|
||||
)
|
||||
return response.ok
|
||||
|
||||
def send_email(self, settings: GlobalSettings, subject: str, body: str, attachment_path: str | None = None):
|
||||
if not (settings.smtp_host and settings.smtp_login and settings.smtp_password):
|
||||
raise ValueError("SMTP is not configured")
|
||||
recipient = (settings.recipient_email or settings.smtp_login or "").strip()
|
||||
if not recipient:
|
||||
raise ValueError("Recipient email is empty")
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg["From"] = settings.smtp_login
|
||||
msg["To"] = recipient
|
||||
msg["Subject"] = subject
|
||||
msg.attach(MIMEText(body, "plain", "utf-8"))
|
||||
|
||||
if attachment_path:
|
||||
attachment = Path(attachment_path)
|
||||
with attachment.open("rb") as handle:
|
||||
part = MIMEBase("application", "octet-stream")
|
||||
part.set_payload(handle.read())
|
||||
encoders.encode_base64(part)
|
||||
part.add_header("Content-Disposition", f'attachment; filename="{attachment.name}"')
|
||||
msg.attach(part)
|
||||
|
||||
with smtplib.SMTP(settings.smtp_host, settings.smtp_port, timeout=app_settings.smtp_timeout_seconds) as server:
|
||||
if app_settings.smtp_starttls:
|
||||
server.starttls()
|
||||
server.login(settings.smtp_login, settings.smtp_password)
|
||||
server.sendmail(settings.smtp_login, [recipient], msg.as_string())
|
||||
|
||||
def notify(self, settings: GlobalSettings, message: str, success: bool):
|
||||
if settings.notify_failures_only and success:
|
||||
return
|
||||
if settings.smtp_notifications_enabled:
|
||||
try:
|
||||
self.send_email(settings, "Mikrotik Backup System notification", message)
|
||||
except Exception:
|
||||
pass
|
||||
if settings.pushover_token and settings.pushover_userkey:
|
||||
try:
|
||||
self.send_pushover(settings.pushover_token, settings.pushover_userkey, message)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def send_test_email(self, settings: GlobalSettings):
|
||||
self.send_email(settings, "Mikrotik Backup System test", "This is a test email from Mikrotik Backup System")
|
||||
|
||||
def send_test_pushover(self, settings: GlobalSettings):
|
||||
if not (settings.pushover_token and settings.pushover_userkey):
|
||||
raise ValueError("Pushover is not configured")
|
||||
self.send_pushover(
|
||||
settings.pushover_token,
|
||||
settings.pushover_userkey,
|
||||
"Test pushover from Mikrotik Backup System",
|
||||
)
|
||||
|
||||
|
||||
notification_service = NotificationService()
|
||||
220
backend/app/services/router_service.py
Normal file
220
backend/app/services/router_service.py
Normal file
@@ -0,0 +1,220 @@
|
||||
from datetime import datetime
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
import paramiko
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.models.router import Router
|
||||
from app.services.log_service import log_service
|
||||
from app.services.swos_beta_service import swos_beta_service
|
||||
|
||||
|
||||
class RouterService:
|
||||
def _load_pkey(self, ssh_key_str: str):
|
||||
key_str = (ssh_key_str or "").strip()
|
||||
key_buffer = io.StringIO(key_str)
|
||||
loaders = [
|
||||
paramiko.RSAKey.from_private_key,
|
||||
paramiko.Ed25519Key.from_private_key,
|
||||
paramiko.ECDSAKey.from_private_key,
|
||||
]
|
||||
last_error = None
|
||||
for loader in loaders:
|
||||
key_buffer.seek(0)
|
||||
try:
|
||||
return loader(key_buffer)
|
||||
except Exception as exc:
|
||||
last_error = exc
|
||||
raise ValueError("Failed to load SSH private key") from last_error
|
||||
|
||||
def _connect(self, router: Router, global_ssh_key: str | None = None):
|
||||
client = paramiko.SSHClient()
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
key_source = router.ssh_key.strip() if router.ssh_key and router.ssh_key.strip() else (global_ssh_key or "")
|
||||
if key_source:
|
||||
pkey = self._load_pkey(key_source)
|
||||
client.connect(router.host, port=router.port, username=router.ssh_user, pkey=pkey, timeout=10)
|
||||
else:
|
||||
client.connect(
|
||||
router.host,
|
||||
port=router.port,
|
||||
username=router.ssh_user,
|
||||
password=router.ssh_password,
|
||||
timeout=10,
|
||||
allow_agent=False,
|
||||
look_for_keys=False,
|
||||
banner_timeout=10,
|
||||
)
|
||||
return client
|
||||
|
||||
def export(self, router: Router, global_ssh_key: str | None = None) -> str:
|
||||
if router.device_type != 'routeros':
|
||||
raise ValueError('Export tekstowy jest dostępny tylko dla RouterOS.')
|
||||
client = self._connect(router, global_ssh_key)
|
||||
_, stdout, _ = client.exec_command('/export')
|
||||
output = stdout.read().decode('utf-8', errors='ignore')
|
||||
client.close()
|
||||
return output
|
||||
|
||||
def binary_backup(self, router: Router, backup_name: str, local_path: str, global_ssh_key: str | None = None, global_settings=None) -> str:
|
||||
if router.device_type == 'switchos':
|
||||
downloaded = swos_beta_service.download_backup_for_router(router, global_settings)
|
||||
Path(local_path).write_bytes(downloaded.content)
|
||||
return local_path
|
||||
|
||||
client = self._connect(router, global_ssh_key)
|
||||
_, stdout, _ = client.exec_command(f'/system backup save name={backup_name}')
|
||||
stdout.channel.recv_exit_status()
|
||||
sftp = client.open_sftp()
|
||||
remote_file = f'{backup_name}.backup'
|
||||
sftp.get(remote_file, local_path)
|
||||
try:
|
||||
sftp.remove(remote_file)
|
||||
except Exception:
|
||||
pass
|
||||
sftp.close()
|
||||
client.close()
|
||||
return local_path
|
||||
|
||||
def upload_backup(self, router: Router, local_backup_path: str, global_ssh_key: str | None = None):
|
||||
if router.device_type != 'routeros':
|
||||
raise ValueError('Przywracanie plików jest dostępne tylko dla RouterOS.')
|
||||
client = self._connect(router, global_ssh_key)
|
||||
sftp = client.open_sftp()
|
||||
target_name = Path(local_backup_path).name
|
||||
sftp.put(local_backup_path, target_name)
|
||||
sftp.close()
|
||||
client.close()
|
||||
|
||||
def _probe_routeros_connection(self, router: Router, global_ssh_key: str | None = None):
|
||||
tested_at = datetime.utcnow()
|
||||
try:
|
||||
client = self._connect(router, global_ssh_key)
|
||||
_, stdout, _ = client.exec_command('/system resource print without-paging')
|
||||
resource_output = stdout.read().decode('utf-8', errors='ignore')
|
||||
_, stdout, _ = client.exec_command('/system identity print')
|
||||
identity_output = stdout.read().decode('utf-8', errors='ignore')
|
||||
client.close()
|
||||
model = 'Unknown'
|
||||
uptime = 'Unknown'
|
||||
hostname = 'Unknown'
|
||||
version = 'Unknown'
|
||||
for line in resource_output.splitlines():
|
||||
if 'board-name' in line:
|
||||
model = line.split(':', 1)[1].strip()
|
||||
if 'uptime' in line:
|
||||
uptime = line.split(':', 1)[1].strip()
|
||||
if 'version' in line:
|
||||
version = line.split(':', 1)[1].strip()
|
||||
for line in identity_output.splitlines():
|
||||
if 'name' in line:
|
||||
hostname = line.split(':', 1)[1].strip()
|
||||
return {
|
||||
'success': True,
|
||||
'tested_at': tested_at,
|
||||
'model': model,
|
||||
'uptime': uptime,
|
||||
'hostname': hostname,
|
||||
'version': version,
|
||||
'error': None,
|
||||
'transport': 'ssh',
|
||||
'server': None,
|
||||
'auth_mode': 'ssh',
|
||||
'http_status': None,
|
||||
'backup_available': None,
|
||||
}
|
||||
except Exception as exc:
|
||||
return {
|
||||
'success': False,
|
||||
'tested_at': tested_at,
|
||||
'model': 'Unknown',
|
||||
'uptime': 'Unknown',
|
||||
'hostname': router.name,
|
||||
'version': None,
|
||||
'error': str(exc),
|
||||
'transport': 'ssh',
|
||||
'server': None,
|
||||
'auth_mode': 'ssh',
|
||||
'http_status': None,
|
||||
'backup_available': None,
|
||||
}
|
||||
|
||||
def probe_connection(self, router: Router, global_ssh_key: str | None = None, global_settings=None):
|
||||
if router.device_type == 'switchos':
|
||||
return swos_beta_service.probe_router(router, global_settings)
|
||||
return self._probe_routeros_connection(router, global_ssh_key)
|
||||
|
||||
def _store_connection_result(self, db: Session, router: Router, result: dict):
|
||||
router.last_connection_status = result['success']
|
||||
router.last_connection_tested_at = result['tested_at']
|
||||
router.last_connection_error = result.get('error')
|
||||
router.last_connection_hostname = result.get('hostname')
|
||||
router.last_connection_model = result.get('model')
|
||||
router.last_connection_version = result.get('version')
|
||||
router.last_connection_uptime = result.get('uptime')
|
||||
router.last_connection_transport = result.get('transport')
|
||||
router.last_connection_server = result.get('server')
|
||||
router.last_connection_auth_mode = result.get('auth_mode')
|
||||
router.last_connection_http_status = result.get('http_status')
|
||||
router.last_connection_backup_available = result.get('backup_available')
|
||||
db.add(router)
|
||||
db.commit()
|
||||
db.refresh(router)
|
||||
return result
|
||||
|
||||
def _device_label(self, router: Router) -> str:
|
||||
platform = 'SwitchOS' if router.device_type == 'switchos' else 'RouterOS'
|
||||
return f'{platform} device {router.name}'
|
||||
|
||||
def _build_connection_log_message(self, router: Router, result: dict) -> str:
|
||||
device_label = self._device_label(router)
|
||||
transport = result.get('transport') or 'unknown transport'
|
||||
auth_mode = result.get('auth_mode')
|
||||
http_status = result.get('http_status')
|
||||
backup_available = result.get('backup_available')
|
||||
hostname = result.get('hostname')
|
||||
model = result.get('model')
|
||||
version = result.get('version')
|
||||
uptime = result.get('uptime')
|
||||
server = result.get('server')
|
||||
|
||||
details = [f'via {transport}', f'target={router.host}:{router.port}']
|
||||
if router.device_type == 'routeros':
|
||||
if router.ssh_user:
|
||||
details.append(f'user={router.ssh_user}')
|
||||
if hostname:
|
||||
details.append(f'hostname={hostname}')
|
||||
if model and model != 'Unknown':
|
||||
details.append(f'model={model}')
|
||||
if version and version != 'Unknown':
|
||||
details.append(f'version={version}')
|
||||
if uptime and uptime != 'Unknown':
|
||||
details.append(f'uptime={uptime}')
|
||||
else:
|
||||
if auth_mode:
|
||||
details.append(f'auth={auth_mode}')
|
||||
if http_status:
|
||||
details.append(f'http={http_status}')
|
||||
if server:
|
||||
details.append(f'server={server}')
|
||||
if backup_available is not None:
|
||||
details.append(f'backup_available={"yes" if backup_available else "no"}')
|
||||
if hostname:
|
||||
details.append(f'hostname={hostname}')
|
||||
|
||||
detail_suffix = f' ({", ".join(details)})' if details else ''
|
||||
if result.get('success'):
|
||||
return f'Connection test OK for {device_label}{detail_suffix}'
|
||||
|
||||
error = result.get('error') or 'Unknown error'
|
||||
return f'Connection test FAILED for {device_label}{detail_suffix}: {error}'
|
||||
|
||||
def test_connection(self, db: Session, router: Router, global_settings):
|
||||
result = self.probe_connection(router, global_settings.global_ssh_key, global_settings)
|
||||
stored_result = self._store_connection_result(db, router, result)
|
||||
log_service.add(db, self._build_connection_log_message(router, stored_result))
|
||||
return stored_result
|
||||
|
||||
|
||||
router_service = RouterService()
|
||||
249
backend/app/services/scheduler.py
Normal file
249
backend/app/services/scheduler.py
Normal file
@@ -0,0 +1,249 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app.core.config import settings as app_settings
|
||||
from app.core.cron_utils import CronValidationError, describe_cron_expression, parse_cron_expression, preview_next_runs
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.router import Router
|
||||
from app.services.backup_service import backup_service
|
||||
from app.services.log_service import log_service
|
||||
from app.services.router_service import router_service
|
||||
from app.services.settings_service import settings_service
|
||||
|
||||
|
||||
class SchedulerService:
|
||||
def __init__(self):
|
||||
self.scheduler = BackgroundScheduler(timezone=app_settings.timezone)
|
||||
self.started = False
|
||||
|
||||
def start(self):
|
||||
if self.started:
|
||||
return
|
||||
self.reschedule()
|
||||
self.scheduler.start()
|
||||
self.started = True
|
||||
|
||||
def shutdown(self):
|
||||
if self.started:
|
||||
self.scheduler.shutdown(wait=False)
|
||||
self.started = False
|
||||
|
||||
def _parse_cron(self, expr: str):
|
||||
return parse_cron_expression(expr, app_settings.timezone)
|
||||
|
||||
def validate_cron(self, expr: str):
|
||||
return self._parse_cron(expr)
|
||||
|
||||
def _interval_next_runs(self, minutes: int, count: int = 3):
|
||||
now = datetime.now()
|
||||
return [now + timedelta(minutes=minutes * index) for index in range(1, count + 1)]
|
||||
|
||||
def scheduler_status(self):
|
||||
with SessionLocal() as db:
|
||||
settings = settings_service.get_or_create(db)
|
||||
return {
|
||||
'timezone': app_settings.timezone,
|
||||
'running': self.started,
|
||||
'jobs': [
|
||||
self._describe_job(
|
||||
key='auto_export',
|
||||
label='settings.schedulerAutoExportLabel',
|
||||
enabled=settings.enable_auto_export,
|
||||
cron=settings.export_cron,
|
||||
),
|
||||
self._describe_job(
|
||||
key='auto_binary',
|
||||
label='settings.schedulerBinaryLabel',
|
||||
enabled=bool(settings.binary_cron),
|
||||
cron=settings.binary_cron,
|
||||
),
|
||||
self._describe_job(
|
||||
key='retention',
|
||||
label='settings.schedulerRetentionLabel',
|
||||
enabled=bool(settings.retention_cron),
|
||||
cron=settings.retention_cron,
|
||||
),
|
||||
self._describe_interval_job(
|
||||
key='connection_probe',
|
||||
label='settings.schedulerConnectionLabel',
|
||||
minutes=settings.connection_test_interval_minutes,
|
||||
),
|
||||
{
|
||||
'key': 'log_cleanup',
|
||||
'label': 'settings.schedulerLogsLabel',
|
||||
'enabled': True,
|
||||
'cron': None,
|
||||
'description': 'settings.schedulerLogsDescription',
|
||||
'description_params': None,
|
||||
'valid': True,
|
||||
'next_runs': [],
|
||||
'error': None,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
def _describe_job(self, key: str, label: str, enabled: bool, cron: str | None):
|
||||
cron = (cron or '').strip()
|
||||
if not enabled or not cron:
|
||||
return {
|
||||
'key': key,
|
||||
'label': label,
|
||||
'enabled': False,
|
||||
'cron': cron or None,
|
||||
'description': 'settings.scheduleDisabledHint',
|
||||
'description_params': None,
|
||||
'valid': True,
|
||||
'next_runs': [],
|
||||
'error': None,
|
||||
}
|
||||
try:
|
||||
next_runs = preview_next_runs(cron, app_settings.timezone, count=3)
|
||||
return {
|
||||
'key': key,
|
||||
'label': label,
|
||||
'enabled': True,
|
||||
'cron': cron,
|
||||
'description': 'settings.schedulerCronDescription',
|
||||
'description_params': {'description': describe_cron_expression(cron)},
|
||||
'valid': True,
|
||||
'next_runs': next_runs,
|
||||
'error': None,
|
||||
}
|
||||
except CronValidationError as exc:
|
||||
return {
|
||||
'key': key,
|
||||
'label': label,
|
||||
'enabled': True,
|
||||
'cron': cron,
|
||||
'description': 'settings.schedulerInvalidCron',
|
||||
'description_params': None,
|
||||
'valid': False,
|
||||
'next_runs': [],
|
||||
'error': str(exc),
|
||||
}
|
||||
|
||||
def _describe_interval_job(self, key: str, label: str, minutes: int):
|
||||
minutes = int(minutes or 0)
|
||||
if minutes <= 0:
|
||||
return {
|
||||
'key': key,
|
||||
'label': label,
|
||||
'enabled': False,
|
||||
'cron': None,
|
||||
'description': 'settings.connectionTestsDisabledHint',
|
||||
'description_params': None,
|
||||
'valid': True,
|
||||
'next_runs': [],
|
||||
'error': None,
|
||||
}
|
||||
return {
|
||||
'key': key,
|
||||
'label': label,
|
||||
'enabled': True,
|
||||
'cron': None,
|
||||
'description': 'settings.connectionTestsEverySummary',
|
||||
'description_params': {'minutes': minutes},
|
||||
'valid': True,
|
||||
'next_runs': self._interval_next_runs(minutes),
|
||||
'error': None,
|
||||
}
|
||||
|
||||
def reschedule(self):
|
||||
self.scheduler.remove_all_jobs()
|
||||
with SessionLocal() as db:
|
||||
settings = settings_service.get_or_create(db)
|
||||
job_definitions = [
|
||||
('auto_export', settings.enable_auto_export, settings.export_cron, self._run_auto_export, 'auto export'),
|
||||
('auto_binary', bool(settings.binary_cron), settings.binary_cron, self._run_binary_backup, 'binary backup'),
|
||||
('retention', bool(settings.retention_cron), settings.retention_cron, self._run_retention, 'retention cleanup'),
|
||||
]
|
||||
|
||||
pending_logs: list[str] = []
|
||||
for job_id, enabled, cron, callback, label in job_definitions:
|
||||
cron = (cron or '').strip()
|
||||
if not enabled or not cron:
|
||||
continue
|
||||
try:
|
||||
trigger = self._parse_cron(cron)
|
||||
self.scheduler.add_job(
|
||||
callback,
|
||||
trigger=trigger,
|
||||
id=job_id,
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
except Exception as exc:
|
||||
pending_logs.append(f'Scheduler skipped invalid {label} cron ({cron}): {exc}')
|
||||
|
||||
if int(settings.connection_test_interval_minutes or 0) > 0:
|
||||
self.scheduler.add_job(
|
||||
self._run_connection_probes,
|
||||
trigger='interval',
|
||||
minutes=int(settings.connection_test_interval_minutes),
|
||||
id='connection_probe',
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
self.scheduler.add_job(
|
||||
self._run_log_cleanup,
|
||||
trigger='interval',
|
||||
days=1,
|
||||
id='log_cleanup',
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
for message in pending_logs:
|
||||
log_service.add(db, message, commit=False)
|
||||
if pending_logs:
|
||||
db.commit()
|
||||
|
||||
def _run_auto_export(self):
|
||||
with SessionLocal() as db:
|
||||
routers = db.query(Router).all()
|
||||
for router in routers:
|
||||
try:
|
||||
backup_service.export_router(db, type('U', (), {'id': router.owner_id})(), router.id)
|
||||
except Exception as exc:
|
||||
log_service.add(db, f'Scheduled export failed for {router.name}: {exc}')
|
||||
|
||||
def _run_binary_backup(self):
|
||||
with SessionLocal() as db:
|
||||
routers = db.query(Router).all()
|
||||
for router in routers:
|
||||
try:
|
||||
backup_service.binary_backup(db, type('U', (), {'id': router.owner_id})(), router.id)
|
||||
except Exception as exc:
|
||||
log_service.add(db, f'Scheduled binary backup failed for {router.name}: {exc}')
|
||||
|
||||
def _run_retention(self):
|
||||
with SessionLocal() as db:
|
||||
backup_service.cleanup_old_backups(db)
|
||||
|
||||
def _run_connection_probes(self):
|
||||
with SessionLocal() as db:
|
||||
settings = settings_service.get_or_create(db)
|
||||
routers = db.query(Router).all()
|
||||
for router in routers:
|
||||
result = router_service.test_connection(db, router, settings.global_ssh_key)
|
||||
if not result['success']:
|
||||
log_service.add(db, f'Scheduled connection test failed for {router.name}: {result.get("error") or "Unknown error"}')
|
||||
|
||||
def _run_log_cleanup(self):
|
||||
with SessionLocal() as db:
|
||||
settings = settings_service.get_or_create(db)
|
||||
deleted = log_service.delete_older_than(db, settings.log_retention_days)
|
||||
log_service.add(db, f'Log retention cleanup removed {deleted} entries older than {settings.log_retention_days} days')
|
||||
|
||||
|
||||
scheduler_service = SchedulerService()
|
||||
32
backend/app/services/settings_service.py
Normal file
32
backend/app/services/settings_service.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.models.settings import GlobalSettings
|
||||
from app.schemas.settings import SettingsUpdate
|
||||
|
||||
|
||||
class SettingsService:
|
||||
def get_or_create(self, db: Session) -> GlobalSettings:
|
||||
settings = db.query(GlobalSettings).first()
|
||||
if not settings:
|
||||
settings = GlobalSettings()
|
||||
db.add(settings)
|
||||
db.commit()
|
||||
db.refresh(settings)
|
||||
return settings
|
||||
|
||||
def update(self, db: Session, payload: SettingsUpdate) -> GlobalSettings:
|
||||
settings = self.get_or_create(db)
|
||||
data = payload.model_dump(exclude={'clear_global_ssh_key'})
|
||||
for key, value in data.items():
|
||||
if key == 'global_ssh_key' and value is None and not payload.clear_global_ssh_key:
|
||||
continue
|
||||
setattr(settings, key, value)
|
||||
if payload.clear_global_ssh_key:
|
||||
settings.global_ssh_key = None
|
||||
db.add(settings)
|
||||
db.commit()
|
||||
db.refresh(settings)
|
||||
return settings
|
||||
|
||||
|
||||
settings_service = SettingsService()
|
||||
174
backend/app/services/swos_beta_service.py
Normal file
174
backend/app/services/swos_beta_service.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
|
||||
|
||||
from app.schemas.swos_beta import SwosBetaCredentials, SwosBetaProbeResponse
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadedSwosBackup:
|
||||
filename: str
|
||||
content: bytes
|
||||
content_type: str
|
||||
auth_mode: str
|
||||
base_url: str
|
||||
|
||||
|
||||
class SwosBetaService:
|
||||
timeout_seconds = 12
|
||||
|
||||
def probe(self, payload: SwosBetaCredentials) -> SwosBetaProbeResponse:
|
||||
base_url = self._build_base_url(payload.host, payload.port)
|
||||
response, auth_mode = self._request_with_fallback('GET', base_url, payload)
|
||||
html = response.text if 'text' in (response.headers.get('content-type') or '').lower() else ''
|
||||
title = self._extract_title(html)
|
||||
|
||||
backup_response, _ = self._request_with_fallback('GET', f'{base_url}/backup.swb', payload, allow_text_fallback=False)
|
||||
backup_ok = backup_response.status_code == 200 and len(backup_response.content) > 0
|
||||
|
||||
return SwosBetaProbeResponse(
|
||||
success=response.ok,
|
||||
base_url=base_url,
|
||||
status_code=response.status_code,
|
||||
auth_mode=auth_mode,
|
||||
page_title=title,
|
||||
content_type=response.headers.get('content-type'),
|
||||
server=response.headers.get('server'),
|
||||
save_backup_visible='save backup' in html.lower(),
|
||||
backup_endpoint_ok=backup_ok,
|
||||
note='SwitchOS jest obsługiwany bezpośrednio w liście urządzeń.'
|
||||
)
|
||||
|
||||
def probe_router(self, router, global_settings) -> dict:
|
||||
payload = self.credentials_from_router(router, global_settings)
|
||||
tested_at = datetime.utcnow()
|
||||
try:
|
||||
result = self.probe(payload)
|
||||
return {
|
||||
'success': result.success,
|
||||
'tested_at': tested_at,
|
||||
'model': 'SwitchOS',
|
||||
'uptime': f'HTTP {result.status_code}',
|
||||
'hostname': result.page_title or router.name,
|
||||
'version': None,
|
||||
'error': None,
|
||||
'transport': 'http',
|
||||
'server': result.server,
|
||||
'auth_mode': result.auth_mode,
|
||||
'http_status': str(result.status_code),
|
||||
'backup_available': result.backup_endpoint_ok,
|
||||
}
|
||||
except Exception as exc:
|
||||
return {
|
||||
'success': False,
|
||||
'tested_at': tested_at,
|
||||
'model': 'SwitchOS',
|
||||
'uptime': 'HTTP',
|
||||
'hostname': router.name,
|
||||
'version': None,
|
||||
'error': str(exc),
|
||||
'transport': 'http',
|
||||
'server': None,
|
||||
'auth_mode': None,
|
||||
'http_status': None,
|
||||
'backup_available': None,
|
||||
}
|
||||
|
||||
def credentials_from_router(self, router, global_settings) -> SwosBetaCredentials:
|
||||
username = (getattr(router, 'ssh_user', None) or '').strip() or (getattr(global_settings, 'default_switchos_username', None) or '').strip()
|
||||
password = (getattr(router, 'ssh_password', None) or '').strip() or (getattr(global_settings, 'default_switchos_password', None) or '').strip()
|
||||
if not username:
|
||||
raise ValueError('Brak użytkownika SwitchOS. Ustaw dane w urządzeniu albo w ustawieniach globalnych.')
|
||||
return SwosBetaCredentials(
|
||||
host=router.host,
|
||||
port=router.port or 80,
|
||||
username=username,
|
||||
password=password,
|
||||
label=router.name,
|
||||
)
|
||||
|
||||
def download_backup(self, payload: SwosBetaCredentials) -> DownloadedSwosBackup:
|
||||
base_url = self._build_base_url(payload.host, payload.port)
|
||||
response, auth_mode = self._request_with_fallback('GET', f'{base_url}/backup.swb', payload, allow_text_fallback=False)
|
||||
if response.status_code != 200:
|
||||
raise ValueError(f'Urządzenie zwróciło kod HTTP {response.status_code} dla /backup.swb.')
|
||||
if not response.content:
|
||||
raise ValueError('Urządzenie zwróciło pusty plik backupu.')
|
||||
|
||||
filename = self._build_filename(payload)
|
||||
content_type = response.headers.get('content-type') or 'application/octet-stream'
|
||||
return DownloadedSwosBackup(
|
||||
filename=filename,
|
||||
content=response.content,
|
||||
content_type=content_type,
|
||||
auth_mode=auth_mode,
|
||||
base_url=base_url,
|
||||
)
|
||||
|
||||
def download_backup_for_router(self, router, global_settings) -> DownloadedSwosBackup:
|
||||
return self.download_backup(self.credentials_from_router(router, global_settings))
|
||||
|
||||
def _request_with_fallback(self, method: str, url: str, payload: SwosBetaCredentials, allow_text_fallback: bool = True):
|
||||
attempts = []
|
||||
auth_variants = [
|
||||
('digest', HTTPDigestAuth(payload.username, payload.password)),
|
||||
('basic', HTTPBasicAuth(payload.username, payload.password)),
|
||||
]
|
||||
if allow_text_fallback:
|
||||
auth_variants.append(('none', None))
|
||||
|
||||
last_response = None
|
||||
for label, auth in auth_variants:
|
||||
try:
|
||||
response = requests.request(
|
||||
method,
|
||||
url,
|
||||
auth=auth,
|
||||
timeout=self.timeout_seconds,
|
||||
allow_redirects=True,
|
||||
)
|
||||
last_response = response
|
||||
if response.status_code < 400:
|
||||
return response, label
|
||||
attempts.append(f'{label}:{response.status_code}')
|
||||
except requests.RequestException as exc:
|
||||
attempts.append(f'{label}:{exc.__class__.__name__}')
|
||||
|
||||
if last_response is not None:
|
||||
raise ValueError(f'Nie udało się połączyć ze SwitchOS ({", ".join(attempts)}).')
|
||||
raise ValueError('Nie udało się połączyć ze SwitchOS.')
|
||||
|
||||
def _build_base_url(self, host: str, port: int) -> str:
|
||||
raw = host.strip()
|
||||
parsed = urlparse(raw if '://' in raw else f'http://{raw}')
|
||||
scheme = parsed.scheme or 'http'
|
||||
if scheme not in {'http', 'https'}:
|
||||
raise ValueError('Dozwolone są tylko adresy HTTP lub HTTPS.')
|
||||
if not parsed.hostname:
|
||||
raise ValueError('Nieprawidłowy adres hosta.')
|
||||
resolved_port = parsed.port or port
|
||||
base = f'{scheme}://{parsed.hostname}'
|
||||
if resolved_port not in {80, 443} or (scheme == 'http' and resolved_port != 80) or (scheme == 'https' and resolved_port != 443):
|
||||
base = f'{base}:{resolved_port}'
|
||||
return base.rstrip('/')
|
||||
|
||||
def _extract_title(self, html: str) -> str | None:
|
||||
if not html:
|
||||
return None
|
||||
match = re.search(r'<title>(.*?)</title>', html, flags=re.IGNORECASE | re.DOTALL)
|
||||
if not match:
|
||||
return None
|
||||
return re.sub(r'\s+', ' ', match.group(1)).strip() or None
|
||||
|
||||
def _build_filename(self, payload: SwosBetaCredentials) -> str:
|
||||
label = payload.label or payload.host
|
||||
safe = re.sub(r'[^A-Za-z0-9._-]+', '-', label).strip('-') or 'switchos'
|
||||
timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
|
||||
return f'{safe}-switchos-{timestamp}.swb'
|
||||
|
||||
|
||||
swos_beta_service = SwosBetaService()
|
||||
14
backend/requirements.txt
Normal file
14
backend/requirements.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
fastapi[standard]==0.135.2
|
||||
sqlalchemy==2.0.49
|
||||
pydantic==2.12.5
|
||||
pydantic-settings==2.13.1
|
||||
python-jose[cryptography]==3.5.0
|
||||
passlib==1.7.4
|
||||
python-multipart==0.0.20
|
||||
paramiko==3.5.1
|
||||
apscheduler==3.11.0
|
||||
requests==2.32.3
|
||||
alembic==1.15.2
|
||||
email-validator==2.2.0
|
||||
pytest==8.3.5
|
||||
httpx==0.28.1
|
||||
117
backend/scripts/migrate_legacy_sqlite.py
Executable file
117
backend/scripts/migrate_legacy_sqlite.py
Executable file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Import data from the original Flask SQLite database into the new schema.
|
||||
|
||||
Usage:
|
||||
python backend/scripts/migrate_legacy_sqlite.py /path/to/backup_routeros.db
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT))
|
||||
|
||||
from app.core.security import get_password_hash # noqa: E402
|
||||
from app.db.session import SessionLocal, init_db # noqa: E402
|
||||
from app.models.backup import Backup # noqa: E402
|
||||
from app.models.log import OperationLog # noqa: E402
|
||||
from app.models.router import Router # noqa: E402
|
||||
from app.models.settings import GlobalSettings # noqa: E402
|
||||
from app.models.user import User # noqa: E402
|
||||
|
||||
|
||||
def parse_dt(value):
|
||||
if not value:
|
||||
return None
|
||||
for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
|
||||
try:
|
||||
return datetime.strptime(value, fmt)
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python backend/scripts/migrate_legacy_sqlite.py /path/to/legacy.db")
|
||||
return 1
|
||||
|
||||
source_path = Path(sys.argv[1]).resolve()
|
||||
if not source_path.exists():
|
||||
print(f"Legacy DB not found: {source_path}")
|
||||
return 1
|
||||
|
||||
init_db()
|
||||
source = sqlite3.connect(str(source_path))
|
||||
source.row_factory = sqlite3.Row
|
||||
dest = SessionLocal()
|
||||
|
||||
try:
|
||||
user_map: dict[int, int] = {}
|
||||
for row in source.execute("SELECT id, username, password_hash FROM users ORDER BY id"):
|
||||
existing = dest.query(User).filter(User.username == row["username"]).first()
|
||||
if existing:
|
||||
user_map[row["id"]] = existing.id
|
||||
continue
|
||||
user = User(username=row["username"], password_hash=row["password_hash"] or get_password_hash("admin"))
|
||||
dest.add(user)
|
||||
dest.flush()
|
||||
user_map[row["id"]] = user.id
|
||||
|
||||
router_map: dict[int, int] = {}
|
||||
for row in source.execute(
|
||||
"SELECT id, owner_id, name, host, port, ssh_user, ssh_key, ssh_password, created_at FROM routers ORDER BY id"
|
||||
):
|
||||
router = Router(
|
||||
owner_id=user_map.get(row["owner_id"], next(iter(user_map.values()), 1)),
|
||||
name=row["name"],
|
||||
host=row["host"],
|
||||
port=row["port"] or 22,
|
||||
ssh_user=row["ssh_user"] or "admin",
|
||||
ssh_key=row["ssh_key"],
|
||||
ssh_password=row["ssh_password"],
|
||||
created_at=parse_dt(row["created_at"]),
|
||||
)
|
||||
dest.add(router)
|
||||
dest.flush()
|
||||
router_map[row["id"]] = router.id
|
||||
|
||||
for row in source.execute(
|
||||
"SELECT router_id, file_path, backup_type, created_at, checksum FROM backups ORDER BY id"
|
||||
):
|
||||
file_name = Path(row["file_path"] or "backup").name
|
||||
backup = Backup(
|
||||
router_id=router_map[row["router_id"]],
|
||||
file_path=row["file_path"],
|
||||
file_name=file_name,
|
||||
backup_type=row["backup_type"] or "export",
|
||||
created_at=parse_dt(row["created_at"]),
|
||||
checksum=row["checksum"],
|
||||
)
|
||||
dest.add(backup)
|
||||
|
||||
for row in source.execute("SELECT message, timestamp FROM operation_logs ORDER BY id"):
|
||||
dest.add(OperationLog(message=row["message"], timestamp=parse_dt(row["timestamp"])))
|
||||
|
||||
legacy_settings = source.execute("SELECT * FROM global_settings ORDER BY id LIMIT 1").fetchone()
|
||||
if legacy_settings:
|
||||
settings = dest.query(GlobalSettings).first() or GlobalSettings()
|
||||
for key in legacy_settings.keys():
|
||||
if hasattr(settings, key):
|
||||
setattr(settings, key, legacy_settings[key])
|
||||
dest.add(settings)
|
||||
|
||||
dest.commit()
|
||||
print("Migration completed")
|
||||
return 0
|
||||
finally:
|
||||
source.close()
|
||||
dest.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
35
backend/tests/test_auth.py
Normal file
35
backend/tests/test_auth.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
|
||||
def test_login_accepts_form_and_json(monkeypatch, tmp_path):
|
||||
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path / 'auth.db'}")
|
||||
monkeypatch.setenv("DATA_DIR", str(tmp_path / 'data'))
|
||||
monkeypatch.setenv("SECRET_KEY", "test-secret")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_USERNAME", "admin")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_PASSWORD", "admin")
|
||||
|
||||
with TestClient(app) as client:
|
||||
form_response = client.post("/api/auth/login", data={"username": "admin", "password": "admin"})
|
||||
assert form_response.status_code == 200
|
||||
assert "access_token" in form_response.json()
|
||||
|
||||
json_response = client.post("/api/auth/login", json={"username": "admin", "password": "admin"})
|
||||
assert json_response.status_code == 200
|
||||
assert "access_token" in json_response.json()
|
||||
|
||||
|
||||
def test_auth_me(monkeypatch, tmp_path):
|
||||
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path / 'me.db'}")
|
||||
monkeypatch.setenv("DATA_DIR", str(tmp_path / 'data'))
|
||||
monkeypatch.setenv("SECRET_KEY", "test-secret")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_USERNAME", "admin")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_PASSWORD", "admin")
|
||||
|
||||
with TestClient(app) as client:
|
||||
login_response = client.post("/api/auth/login", data={"username": "admin", "password": "admin"})
|
||||
token = login_response.json()["access_token"]
|
||||
me_response = client.get("/api/auth/me", headers={"Authorization": f"Bearer {token}"})
|
||||
assert me_response.status_code == 200
|
||||
assert me_response.json()["username"] == "admin"
|
||||
10
backend/tests/test_health.py
Normal file
10
backend/tests/test_health.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
|
||||
def test_health_endpoint():
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/health")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] in {"ok", "error"}
|
||||
125
backend/tests/test_routeros_logging_and_files_filters.py
Normal file
125
backend/tests/test_routeros_logging_and_files_filters.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from datetime import datetime
|
||||
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.backup import Backup
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
|
||||
def _login(client: TestClient) -> dict[str, str]:
|
||||
response = client.post('/api/auth/login', data={'username': 'admin', 'password': 'admin'})
|
||||
token = response.json()['access_token']
|
||||
return {'Authorization': f'Bearer {token}'}
|
||||
|
||||
|
||||
def test_routeros_connection_test_creates_verbose_operation_log(monkeypatch):
|
||||
from app.services import router_service as router_service_module
|
||||
|
||||
monkeypatch.setattr(
|
||||
router_service_module.router_service,
|
||||
'probe_connection',
|
||||
lambda router, global_ssh_key=None, global_settings=None: {
|
||||
'success': True,
|
||||
'tested_at': datetime(2026, 4, 13, 10, 30, 0),
|
||||
'model': 'RB5009UG+S+',
|
||||
'uptime': '1d2h',
|
||||
'hostname': 'rb5009-core',
|
||||
'version': '7.18.2',
|
||||
'error': None,
|
||||
'transport': 'ssh',
|
||||
'server': None,
|
||||
'auth_mode': 'ssh',
|
||||
'http_status': None,
|
||||
'backup_available': None,
|
||||
},
|
||||
)
|
||||
|
||||
with TestClient(app) as client:
|
||||
headers = _login(client)
|
||||
create_response = client.post(
|
||||
'/api/routers',
|
||||
json={
|
||||
'name': 'core01',
|
||||
'device_type': 'routeros',
|
||||
'host': '10.10.10.1',
|
||||
'port': 2222,
|
||||
'ssh_user': 'backup',
|
||||
'ssh_password': 'secret',
|
||||
'ssh_key': None,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
assert create_response.status_code == 200
|
||||
device_id = create_response.json()['id']
|
||||
|
||||
response = client.get(f'/api/routers/{device_id}/test-connection', headers=headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
logs_response = client.get('/api/logs', headers=headers)
|
||||
assert logs_response.status_code == 200
|
||||
assert any(
|
||||
'Connection test OK for RouterOS device core01' in item['message']
|
||||
and 'via ssh' in item['message']
|
||||
and 'target=10.10.10.1:2222' in item['message']
|
||||
and 'user=backup' in item['message']
|
||||
and 'hostname=rb5009-core' in item['message']
|
||||
and 'model=RB5009UG+S+' in item['message']
|
||||
and 'version=7.18.2' in item['message']
|
||||
and 'uptime=1d2h' in item['message']
|
||||
for item in logs_response.json()
|
||||
)
|
||||
|
||||
|
||||
def test_files_endpoint_filters_backups_by_created_on_date(monkeypatch):
|
||||
from app.services import router_service as router_service_module
|
||||
|
||||
monkeypatch.setattr(
|
||||
router_service_module.router_service,
|
||||
'export',
|
||||
lambda router, global_ssh_key=None: f'/system identity set name={router.name}',
|
||||
)
|
||||
|
||||
with TestClient(app) as client:
|
||||
headers = _login(client)
|
||||
create_response = client.post(
|
||||
'/api/routers',
|
||||
json={
|
||||
'name': 'archive01',
|
||||
'device_type': 'routeros',
|
||||
'host': '10.10.10.2',
|
||||
'port': 22,
|
||||
'ssh_user': 'admin',
|
||||
'ssh_password': 'secret',
|
||||
'ssh_key': None,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
assert create_response.status_code == 200
|
||||
device_id = create_response.json()['id']
|
||||
|
||||
first = client.post(f'/api/backups/router/{device_id}/export', headers=headers)
|
||||
second = client.post(f'/api/backups/router/{device_id}/export', headers=headers)
|
||||
assert first.status_code == 200
|
||||
assert second.status_code == 200
|
||||
|
||||
with SessionLocal() as db:
|
||||
first_backup = db.query(Backup).filter(Backup.id == first.json()['id']).first()
|
||||
second_backup = db.query(Backup).filter(Backup.id == second.json()['id']).first()
|
||||
first_backup.created_at = datetime(2026, 4, 12, 9, 15, 0)
|
||||
second_backup.created_at = datetime(2026, 4, 13, 11, 45, 0)
|
||||
db.add(first_backup)
|
||||
db.add(second_backup)
|
||||
db.commit()
|
||||
|
||||
filtered = client.get(f'/api/backups?router_id={device_id}&created_on=2026-04-13', headers=headers)
|
||||
assert filtered.status_code == 200
|
||||
payload = filtered.json()
|
||||
assert len(payload) == 1
|
||||
assert payload[0]['created_at'].startswith('2026-04-13T11:45:00')
|
||||
|
||||
previous_day = client.get(f'/api/backups?router_id={device_id}&created_on=2026-04-12', headers=headers)
|
||||
assert previous_day.status_code == 200
|
||||
assert len(previous_day.json()) == 1
|
||||
assert previous_day.json()[0]['created_at'].startswith('2026-04-12T09:15:00')
|
||||
62
backend/tests/test_routers.py
Normal file
62
backend/tests/test_routers.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
|
||||
def test_router_list_marks_global_ssh_key_usage(monkeypatch, tmp_path):
|
||||
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path / 'routers.db'}")
|
||||
monkeypatch.setenv("DATA_DIR", str(tmp_path / 'data'))
|
||||
monkeypatch.setenv("SECRET_KEY", "test-secret")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_USERNAME", "admin")
|
||||
monkeypatch.setenv("DEFAULT_ADMIN_PASSWORD", "admin")
|
||||
|
||||
with TestClient(app) as client:
|
||||
login_response = client.post("/api/auth/login", data={"username": "admin", "password": "admin"})
|
||||
token = login_response.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
settings_response = client.put(
|
||||
"/api/settings",
|
||||
json={
|
||||
"backup_retention_days": 7,
|
||||
"log_retention_days": 7,
|
||||
"export_cron": "",
|
||||
"binary_cron": "",
|
||||
"retention_cron": "",
|
||||
"enable_auto_export": False,
|
||||
"connection_test_interval_minutes": 0,
|
||||
"global_ssh_key": "-----BEGIN OPENSSH PRIVATE KEY-----\nabc\n-----END OPENSSH PRIVATE KEY-----",
|
||||
"pushover_token": None,
|
||||
"pushover_userkey": None,
|
||||
"notify_failures_only": True,
|
||||
"smtp_host": None,
|
||||
"smtp_port": 587,
|
||||
"smtp_login": None,
|
||||
"smtp_password": None,
|
||||
"smtp_notifications_enabled": False,
|
||||
"recipient_email": None,
|
||||
"clear_global_ssh_key": False
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
assert settings_response.status_code == 200
|
||||
|
||||
create_response = client.post(
|
||||
"/api/routers",
|
||||
json={
|
||||
"name": "edge01",
|
||||
"host": "10.0.0.1",
|
||||
"port": 22,
|
||||
"ssh_user": "admin",
|
||||
"ssh_password": None,
|
||||
"ssh_key": None
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
assert create_response.status_code == 200
|
||||
|
||||
list_response = client.get("/api/routers", headers=headers)
|
||||
assert list_response.status_code == 200
|
||||
payload = list_response.json()
|
||||
assert payload[0]["uses_global_ssh_key"] is True
|
||||
assert payload[0]["has_effective_ssh_key"] is True
|
||||
24
backend/tests/test_scheduler.py
Normal file
24
backend/tests/test_scheduler.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from app.core.cron_utils import CronValidationError, describe_cron_expression, preview_next_runs, validate_cron_expression
|
||||
|
||||
|
||||
def test_validate_cron_expression_accepts_daily_schedule():
|
||||
validate_cron_expression('15 2 * * *', 'Europe/Warsaw')
|
||||
|
||||
|
||||
def test_validate_cron_expression_rejects_invalid_schedule():
|
||||
try:
|
||||
validate_cron_expression('bad cron', 'Europe/Warsaw')
|
||||
except CronValidationError:
|
||||
assert True
|
||||
return
|
||||
assert False, 'invalid cron should raise'
|
||||
|
||||
|
||||
def test_preview_next_runs_returns_future_datetimes():
|
||||
runs = preview_next_runs('0 3 * * 1', 'Europe/Warsaw', count=2)
|
||||
assert len(runs) == 2
|
||||
assert runs[0] < runs[1]
|
||||
|
||||
|
||||
def test_describe_cron_expression_humanizes_common_patterns():
|
||||
assert describe_cron_expression('0 2 * * *') == 'Every day at 02:00'
|
||||
Reference in New Issue
Block a user