first commit
This commit is contained in:
13
backend/.dockerignore
Normal file
13
backend/.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
||||
.venv
|
||||
venv
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.log
|
||||
.env
|
||||
data
|
||||
*.sqlite3
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
15
backend/Dockerfile
Normal file
15
backend/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM python:3.14-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt /app/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -r /app/requirements.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 8105
|
||||
|
||||
CMD ["python", "run_prod.py"]
|
||||
12
backend/Dockerfile.dev
Normal file
12
backend/Dockerfile.dev
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM python:3.14-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt /app/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -r /app/requirements.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 8105
|
||||
|
||||
CMD ["python", "run.py"]
|
||||
3
backend/app/__init__.py
Normal file
3
backend/app/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from app.main import app
|
||||
|
||||
__all__ = ["app"]
|
||||
193
backend/app/app_factory.py
Normal file
193
backend/app/app_factory.py
Normal file
@@ -0,0 +1,193 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from logging.config import dictConfig
|
||||
|
||||
import click
|
||||
from flask import Flask, jsonify, make_response, request, session
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
from app.core_settings import get_settings
|
||||
from app.routes import (
|
||||
analytics_blueprint,
|
||||
auth_blueprint,
|
||||
dashboard_blueprint,
|
||||
health_blueprint,
|
||||
historical_blueprint,
|
||||
realtime_blueprint,
|
||||
)
|
||||
from app.services.auth import get_auth_service
|
||||
from app.services.historical_sync import get_historical_sync_service
|
||||
|
||||
|
||||
def configure_logging(debug: bool) -> None:
|
||||
level = "DEBUG" if debug else "INFO"
|
||||
dictConfig(
|
||||
{
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"default": {"format": "%(asctime)s | %(levelname)s | %(name)s | %(message)s"}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "default",
|
||||
"level": level,
|
||||
}
|
||||
},
|
||||
"root": {"handlers": ["console"], "level": level},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def create_app() -> Flask:
|
||||
settings = get_settings()
|
||||
configure_logging(settings.debug)
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config["JSON_SORT_KEYS"] = False
|
||||
get_auth_service().configure_app(app)
|
||||
|
||||
app.register_blueprint(health_blueprint)
|
||||
app.register_blueprint(auth_blueprint, url_prefix=settings.api_prefix)
|
||||
app.register_blueprint(dashboard_blueprint, url_prefix=settings.api_prefix)
|
||||
app.register_blueprint(realtime_blueprint, url_prefix=settings.api_prefix)
|
||||
app.register_blueprint(analytics_blueprint, url_prefix=settings.api_prefix)
|
||||
app.register_blueprint(historical_blueprint, url_prefix=settings.api_prefix)
|
||||
|
||||
@app.get("/")
|
||||
def index():
|
||||
return {
|
||||
"app": settings.app_name,
|
||||
"version": settings.version,
|
||||
"api_prefix": settings.api_prefix,
|
||||
"message": "PV Insight backend is running",
|
||||
}
|
||||
|
||||
@app.before_request
|
||||
def handle_preflight_and_auth():
|
||||
if request.method == "OPTIONS":
|
||||
response = make_response("", 204)
|
||||
return _apply_cors(response)
|
||||
|
||||
if not settings.auth["enabled"]:
|
||||
return None
|
||||
|
||||
if request.path in {"/", "/health", "/favicon.ico"}:
|
||||
return None
|
||||
|
||||
if request.path.startswith(f"{settings.api_prefix}/auth/"):
|
||||
return None
|
||||
|
||||
public_kiosk = request.args.get("publicKiosk") == "1"
|
||||
public_kiosk_allowed_paths = {
|
||||
f"{settings.api_prefix}/dashboard/config",
|
||||
f"{settings.api_prefix}/dashboard/kiosk-settings",
|
||||
f"{settings.api_prefix}/realtime/snapshot",
|
||||
f"{settings.api_prefix}/realtime/history",
|
||||
f"{settings.api_prefix}/analytics/production",
|
||||
f"{settings.api_prefix}/analytics/distribution",
|
||||
}
|
||||
|
||||
if public_kiosk and request.method == "GET" and request.path in public_kiosk_allowed_paths:
|
||||
return None
|
||||
|
||||
if request.path.startswith(settings.api_prefix) and "auth_user" not in session:
|
||||
return _apply_cors(make_response(jsonify({"detail": "Authentication required"}), 401))
|
||||
|
||||
return None
|
||||
|
||||
@app.after_request
|
||||
def append_cors_headers(response):
|
||||
return _apply_cors(response)
|
||||
|
||||
@app.errorhandler(HTTPException)
|
||||
def handle_http_exception(exc: HTTPException):
|
||||
response = jsonify({"detail": exc.description})
|
||||
return _apply_cors(make_response(response, exc.code or 500))
|
||||
|
||||
@app.errorhandler(Exception)
|
||||
def handle_exception(exc: Exception):
|
||||
logging.getLogger(__name__).exception("Unhandled application error")
|
||||
response = {"detail": str(exc) if settings.debug else "Internal server error"}
|
||||
return _apply_cors(make_response(response, 500))
|
||||
|
||||
_register_cli_commands(app)
|
||||
_bootstrap_background_services(settings.debug)
|
||||
return app
|
||||
|
||||
|
||||
def _register_cli_commands(app: Flask) -> None:
|
||||
auth_service = get_auth_service()
|
||||
|
||||
@app.cli.command("create-admin")
|
||||
@click.option("--username", required=True, help="Login")
|
||||
@click.option("--password", required=True, hide_input=True, confirmation_prompt=True, help="Password")
|
||||
@click.option("--display-name", default=None, help="Name")
|
||||
def create_admin_command(username: str, password: str, display_name: str | None):
|
||||
try:
|
||||
user = auth_service.create_user(
|
||||
username=username,
|
||||
password=password,
|
||||
role="admin",
|
||||
display_name=display_name,
|
||||
)
|
||||
click.echo(f"Utworzono konto admina: {user.username}")
|
||||
except ValueError as exc:
|
||||
raise click.ClickException(str(exc)) from exc
|
||||
except Exception as exc: # pragma: no cover
|
||||
raise click.ClickException(f"Cant create admin account: {exc}") from exc
|
||||
|
||||
@app.cli.command("create-user")
|
||||
@click.option("--username", required=True, help="Login")
|
||||
@click.option("--password", required=True, hide_input=True, confirmation_prompt=True, help="Password")
|
||||
@click.option("--display-name", default=None, help="Name")
|
||||
def create_user_command(username: str, password: str, display_name: str | None):
|
||||
try:
|
||||
user = auth_service.create_user(
|
||||
username=username,
|
||||
password=password,
|
||||
role="user",
|
||||
display_name=display_name,
|
||||
)
|
||||
click.echo(f"Admin created: {user.username}")
|
||||
except ValueError as exc:
|
||||
raise click.ClickException(str(exc)) from exc
|
||||
except Exception as exc: # pragma: no cover
|
||||
raise click.ClickException(f"Cant create user account: {exc}") from exc
|
||||
|
||||
@app.cli.command("reset-password")
|
||||
@click.option("--username", required=True, help="Login")
|
||||
@click.option("--password", required=True, hide_input=True, confirmation_prompt=True, help="Password")
|
||||
def reset_password_command(username: str, password: str):
|
||||
try:
|
||||
user = auth_service.reset_password(username=username, new_password=password)
|
||||
click.echo(f"Passowrd reseted for: {user.username}")
|
||||
except ValueError as exc:
|
||||
raise click.ClickException(str(exc)) from exc
|
||||
except Exception as exc: # pragma: no cover
|
||||
raise click.ClickException(f"Can't password reset: {exc}") from exc
|
||||
|
||||
|
||||
def _bootstrap_background_services(debug: bool) -> None:
|
||||
should_run = (not debug) or os.environ.get("WERKZEUG_RUN_MAIN") == "true"
|
||||
if not should_run:
|
||||
return
|
||||
get_historical_sync_service().start_scheduler_if_enabled()
|
||||
|
||||
|
||||
def _apply_cors(response):
|
||||
settings = get_settings()
|
||||
origin = request.headers.get("Origin")
|
||||
allowed = settings.cors_origins
|
||||
if origin and (origin in allowed or "*" in allowed):
|
||||
response.headers["Access-Control-Allow-Origin"] = origin
|
||||
response.headers.add("Vary", "Origin")
|
||||
elif "*" in allowed:
|
||||
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
response.headers["Access-Control-Allow-Headers"] = "Content-Type, Authorization"
|
||||
response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS, PUT"
|
||||
response.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
return response
|
||||
71
backend/app/core_settings.py
Normal file
71
backend/app/core_settings.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
|
||||
import config
|
||||
from app.models.definitions import MetricDefinition
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AppSettings:
|
||||
app_name: str
|
||||
version: str
|
||||
debug: bool
|
||||
api_prefix: str
|
||||
timezone: str
|
||||
host: str
|
||||
port: int
|
||||
site_name: str
|
||||
installed_power_kwp: float
|
||||
co2_factor: float
|
||||
influx: dict
|
||||
storage: dict
|
||||
cors_origins: list[str]
|
||||
modules: dict
|
||||
realtime: dict
|
||||
time_ranges: dict
|
||||
analytics: dict
|
||||
history: dict
|
||||
strings: list[dict]
|
||||
status_metrics: list[str]
|
||||
visible_entity_table: list[str]
|
||||
frontend_defaults: dict
|
||||
auth: dict
|
||||
i18n: dict
|
||||
metrics: dict[str, MetricDefinition]
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_settings() -> AppSettings:
|
||||
metric_catalog = {
|
||||
metric_id: MetricDefinition(id=metric_id, **payload)
|
||||
for metric_id, payload in config.METRICS.items()
|
||||
}
|
||||
return AppSettings(
|
||||
app_name=config.APP_CONFIG["name"],
|
||||
version=config.APP_CONFIG["version"],
|
||||
debug=config.APP_CONFIG["debug"],
|
||||
api_prefix=config.APP_CONFIG["api_prefix"],
|
||||
timezone=config.APP_CONFIG["timezone"],
|
||||
host=config.APP_CONFIG["host"],
|
||||
port=config.APP_CONFIG["port"],
|
||||
site_name=config.SITE_CONFIG["site_name"],
|
||||
installed_power_kwp=config.SITE_CONFIG["installed_power_kwp"],
|
||||
co2_factor=config.SITE_CONFIG["co2_factor_kg_per_kwh"],
|
||||
influx=config.INFLUXDB_CONFIG,
|
||||
storage=config.STORAGE_CONFIG,
|
||||
cors_origins=config.CORS_ORIGINS,
|
||||
modules=config.MODULES,
|
||||
realtime=config.REALTIME,
|
||||
time_ranges=config.TIME_RANGES,
|
||||
analytics=config.ANALYTICS,
|
||||
history=config.HISTORY,
|
||||
strings=config.STRINGS,
|
||||
status_metrics=config.STATUS_METRICS,
|
||||
visible_entity_table=config.VISIBLE_ENTITY_TABLE,
|
||||
frontend_defaults=config.FRONTEND_DEFAULTS,
|
||||
auth=config.AUTH_CONFIG,
|
||||
i18n=config.I18N,
|
||||
metrics=metric_catalog,
|
||||
)
|
||||
5
backend/app/main.py
Normal file
5
backend/app/main.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.app_factory import create_app
|
||||
|
||||
app = create_app()
|
||||
33
backend/app/models/__init__.py
Normal file
33
backend/app/models/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from .definitions import (
|
||||
AnalyticsSummary,
|
||||
BucketPoint,
|
||||
DailyEnergyRecord,
|
||||
HeroCard,
|
||||
HistoricalActivityEvent,
|
||||
HistoricalChunkProgress,
|
||||
HistoricalCoverage,
|
||||
HistoricalImportStatus,
|
||||
MetricDefinition,
|
||||
MetricValue,
|
||||
SeriesPayload,
|
||||
SeriesPoint,
|
||||
SnapshotGroupRow,
|
||||
SnapshotPayload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AnalyticsSummary",
|
||||
"BucketPoint",
|
||||
"DailyEnergyRecord",
|
||||
"HeroCard",
|
||||
"HistoricalActivityEvent",
|
||||
"HistoricalChunkProgress",
|
||||
"HistoricalCoverage",
|
||||
"HistoricalImportStatus",
|
||||
"MetricDefinition",
|
||||
"MetricValue",
|
||||
"SeriesPayload",
|
||||
"SeriesPoint",
|
||||
"SnapshotGroupRow",
|
||||
"SnapshotPayload",
|
||||
]
|
||||
174
backend/app/models/definitions.py
Normal file
174
backend/app/models/definitions.py
Normal file
@@ -0,0 +1,174 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date, datetime
|
||||
from typing import Any, Literal
|
||||
|
||||
MetricKind = Literal["gauge", "counter", "text"]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MetricDefinition:
|
||||
id: str
|
||||
entity_id: str
|
||||
measurement: str
|
||||
unit: str = ""
|
||||
label: str = ""
|
||||
kind: MetricKind = "gauge"
|
||||
precision: int = 2
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricValue:
|
||||
metric_id: str
|
||||
label: str
|
||||
unit: str = ""
|
||||
value: float | str | None = None
|
||||
timestamp: datetime | None = None
|
||||
precision: int = 2
|
||||
kind: MetricKind = "gauge"
|
||||
status: str = "neutral"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeriesPoint:
|
||||
timestamp: datetime
|
||||
value: float | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeriesPayload:
|
||||
metric_id: str
|
||||
label: str
|
||||
unit: str
|
||||
points: list[SeriesPoint] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotGroupRow:
|
||||
id: str
|
||||
label: str
|
||||
values: dict[str, MetricValue] = field(default_factory=dict)
|
||||
meta: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HeroCard:
|
||||
metric_id: str
|
||||
label: str
|
||||
value: float | str | None = None
|
||||
unit: str = ""
|
||||
accent: str = "neutral"
|
||||
subtitle: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotPayload:
|
||||
updated_at: datetime | None = None
|
||||
hero_cards: list[HeroCard] = field(default_factory=list)
|
||||
kpis: dict[str, MetricValue] = field(default_factory=dict)
|
||||
strings: list[SnapshotGroupRow] = field(default_factory=list)
|
||||
phases: list[SnapshotGroupRow] = field(default_factory=list)
|
||||
status: list[MetricValue] = field(default_factory=list)
|
||||
faults: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BucketPoint:
|
||||
label: str
|
||||
start: datetime
|
||||
end: datetime
|
||||
value: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsSummary:
|
||||
total: float = 0.0
|
||||
unit: str = "kWh"
|
||||
average_bucket: float = 0.0
|
||||
best_bucket_label: str = ""
|
||||
best_bucket_value: float = 0.0
|
||||
co2_saved_kg: float = 0.0
|
||||
comparison_total: float | None = None
|
||||
comparison_delta_pct: float | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyEnergyRecord:
|
||||
day: date
|
||||
energy_kwh: float
|
||||
source: str
|
||||
samples_count: int
|
||||
imported_at: datetime | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class HistoricalCoverage:
|
||||
imported_days: int = 0
|
||||
first_day: date | None = None
|
||||
last_day: date | None = None
|
||||
total_energy_kwh: float = 0.0
|
||||
available_days: int = 0
|
||||
missing_days: int = 0
|
||||
coverage_pct: float | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class HistoricalChunkProgress:
|
||||
chunk_index: int
|
||||
total_chunks: int
|
||||
start_date: date
|
||||
end_date: date
|
||||
processed_days: int = 0
|
||||
imported_days: int = 0
|
||||
skipped_days: int = 0
|
||||
energy_kwh: float = 0.0
|
||||
state: str = "pending"
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
duration_seconds: float | None = None
|
||||
note: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class HistoricalActivityEvent:
|
||||
timestamp: datetime
|
||||
level: str = "info"
|
||||
title: str = ""
|
||||
message: str = ""
|
||||
day: date | None = None
|
||||
chunk_index: int | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class HistoricalImportStatus:
|
||||
enabled: bool = True
|
||||
running: bool = False
|
||||
state: str = "idle"
|
||||
job_id: str | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
requested_start_date: date | None = None
|
||||
requested_end_date: date | None = None
|
||||
total_days: int = 0
|
||||
processed_days: int = 0
|
||||
imported_days: int = 0
|
||||
skipped_days: int = 0
|
||||
chunk_days: int = 1
|
||||
total_chunks: int = 0
|
||||
active_chunk_index: int = 0
|
||||
current_date: date | None = None
|
||||
current_chunk_start: date | None = None
|
||||
current_chunk_end: date | None = None
|
||||
elapsed_seconds: float | None = None
|
||||
estimated_remaining_seconds: float | None = None
|
||||
avg_days_per_minute: float | None = None
|
||||
last_error: str | None = None
|
||||
message: str = ""
|
||||
coverage: HistoricalCoverage = field(default_factory=HistoricalCoverage)
|
||||
available_start_date: date | None = None
|
||||
available_end_date: date | None = None
|
||||
default_chunk_days: int = 1
|
||||
recent_chunks: list[HistoricalChunkProgress] = field(default_factory=list)
|
||||
recent_events: list[HistoricalActivityEvent] = field(default_factory=list)
|
||||
15
backend/app/routes/__init__.py
Normal file
15
backend/app/routes/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from .analytics import analytics_blueprint
|
||||
from .auth import auth_blueprint
|
||||
from .dashboard import dashboard_blueprint
|
||||
from .health import health_blueprint
|
||||
from .historical import historical_blueprint
|
||||
from .realtime import realtime_blueprint
|
||||
|
||||
__all__ = [
|
||||
"auth_blueprint",
|
||||
"analytics_blueprint",
|
||||
"dashboard_blueprint",
|
||||
"health_blueprint",
|
||||
"historical_blueprint",
|
||||
"realtime_blueprint",
|
||||
]
|
||||
63
backend/app/routes/analytics.py
Normal file
63
backend/app/routes/analytics.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.services.analytics import AnalyticsService
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
analytics_blueprint = Blueprint("analytics", __name__)
|
||||
service = AnalyticsService()
|
||||
|
||||
|
||||
@analytics_blueprint.get("/analytics/production")
|
||||
def production_analytics():
|
||||
range_key = request.args.get("range", "30d")
|
||||
bucket = request.args.get("bucket", "day")
|
||||
compare = request.args.get("compare", "none")
|
||||
start = request.args.get("start")
|
||||
end = request.args.get("end")
|
||||
compare_ranges_raw = request.args.get("compare_ranges", "")
|
||||
compare_ranges = []
|
||||
if compare_ranges_raw:
|
||||
try:
|
||||
compare_ranges = json.loads(compare_ranges_raw)
|
||||
except json.JSONDecodeError as exc:
|
||||
return jsonify({"detail": f"Invalid compare_ranges payload: {exc}"}), 400
|
||||
try:
|
||||
return jsonify(
|
||||
to_plain(
|
||||
service.production(
|
||||
range_key=range_key,
|
||||
bucket=bucket,
|
||||
compare_mode=compare,
|
||||
start=start,
|
||||
end=end,
|
||||
compare_ranges=compare_ranges,
|
||||
)
|
||||
)
|
||||
)
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
|
||||
|
||||
@analytics_blueprint.get("/analytics/distribution")
|
||||
def production_distribution():
|
||||
range_key = request.args.get("range", "30d")
|
||||
bucket = request.args.get("bucket", "day")
|
||||
start = request.args.get("start")
|
||||
end = request.args.get("end")
|
||||
try:
|
||||
return jsonify(
|
||||
to_plain(
|
||||
service.distribution(
|
||||
range_key=range_key,
|
||||
bucket=bucket,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
)
|
||||
)
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
80
backend/app/routes/auth.py
Normal file
80
backend/app/routes/auth.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.services.auth import get_auth_service
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
|
||||
auth_blueprint = Blueprint("auth", __name__)
|
||||
service = get_auth_service()
|
||||
|
||||
|
||||
@auth_blueprint.get("/auth/status")
|
||||
def auth_status():
|
||||
return jsonify(to_plain(service.status()))
|
||||
|
||||
|
||||
@auth_blueprint.post("/auth/login")
|
||||
def auth_login():
|
||||
payload = request.get_json(silent=True) or {}
|
||||
try:
|
||||
status = service.login(payload.get("username", ""), payload.get("password", ""))
|
||||
return jsonify(to_plain(status))
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 401
|
||||
|
||||
|
||||
@auth_blueprint.post("/auth/logout")
|
||||
def auth_logout():
|
||||
return jsonify(to_plain(service.logout()))
|
||||
|
||||
|
||||
@auth_blueprint.get("/auth/users")
|
||||
def list_users():
|
||||
try:
|
||||
service.require_admin()
|
||||
return jsonify(to_plain({"items": service.list_users()}))
|
||||
except PermissionError as exc:
|
||||
return jsonify({"detail": str(exc)}), 403
|
||||
|
||||
|
||||
@auth_blueprint.post("/auth/users")
|
||||
def create_user():
|
||||
payload = request.get_json(silent=True) or {}
|
||||
try:
|
||||
service.require_admin()
|
||||
user = service.create_user(
|
||||
username=payload.get("username", ""),
|
||||
password=payload.get("password", ""),
|
||||
role=payload.get("role", "user"),
|
||||
display_name=payload.get("display_name") or payload.get("username") or "",
|
||||
)
|
||||
return jsonify(to_plain({
|
||||
"username": user.username,
|
||||
"display_name": user.display_name,
|
||||
"role": user.role,
|
||||
"is_active": user.is_active,
|
||||
}))
|
||||
except PermissionError as exc:
|
||||
return jsonify({"detail": str(exc)}), 403
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
|
||||
|
||||
@auth_blueprint.post("/auth/users/<username>/reset-password")
|
||||
def reset_password(username: str):
|
||||
payload = request.get_json(silent=True) or {}
|
||||
try:
|
||||
service.require_admin()
|
||||
user = service.reset_password(username=username, new_password=payload.get("password", ""))
|
||||
return jsonify(to_plain({
|
||||
"username": user.username,
|
||||
"display_name": user.display_name,
|
||||
"role": user.role,
|
||||
"is_active": user.is_active,
|
||||
}))
|
||||
except PermissionError as exc:
|
||||
return jsonify({"detail": str(exc)}), 403
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
80
backend/app/routes/dashboard.py
Normal file
80
backend/app/routes/dashboard.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.core_settings import get_settings
|
||||
from app.services.capabilities import build_capabilities
|
||||
from app.services.catalog import get_catalog
|
||||
from app.services.kiosk_settings import get_kiosk_settings_service
|
||||
from app.services.auth import get_auth_service
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
|
||||
dashboard_blueprint = Blueprint("dashboard", __name__)
|
||||
|
||||
|
||||
@dashboard_blueprint.get("/dashboard/config")
|
||||
def dashboard_config():
|
||||
settings = get_settings()
|
||||
catalog = get_catalog()
|
||||
capabilities = build_capabilities(catalog)
|
||||
|
||||
payload = {
|
||||
"app": {
|
||||
"name": settings.app_name,
|
||||
"version": settings.version,
|
||||
"site_name": settings.site_name,
|
||||
"timezone": settings.timezone,
|
||||
"installed_power_kwp": settings.installed_power_kwp,
|
||||
},
|
||||
"defaults": {
|
||||
"realtime_range": settings.realtime["history_default_range"],
|
||||
"analytics_range": settings.analytics["default_range"],
|
||||
"analytics_bucket": settings.analytics["default_bucket"],
|
||||
"tab": settings.frontend_defaults["tab"],
|
||||
"theme": settings.frontend_defaults["theme"],
|
||||
"language": settings.frontend_defaults["language"],
|
||||
},
|
||||
"auth": {
|
||||
"enabled": settings.auth["enabled"],
|
||||
},
|
||||
"i18n": settings.i18n,
|
||||
"capabilities": capabilities,
|
||||
"visible_entities": [
|
||||
{
|
||||
"metric_id": metric.id,
|
||||
"label": metric.label,
|
||||
"entity_id": metric.entity_id,
|
||||
"measurement": metric.measurement,
|
||||
"unit": metric.unit,
|
||||
"kind": metric.kind,
|
||||
}
|
||||
for metric in catalog.visible_entities()
|
||||
],
|
||||
}
|
||||
return jsonify(to_plain(payload))
|
||||
|
||||
|
||||
@dashboard_blueprint.get("/dashboard/kiosk-settings")
|
||||
def dashboard_kiosk_settings():
|
||||
requested_mode = request.args.get("mode") or ("public" if request.args.get("publicKiosk") == "1" else "private")
|
||||
try:
|
||||
payload = get_kiosk_settings_service().get(requested_mode)
|
||||
return jsonify(to_plain(payload))
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
|
||||
|
||||
@dashboard_blueprint.put("/dashboard/kiosk-settings")
|
||||
def update_dashboard_kiosk_settings():
|
||||
payload = request.get_json(silent=True) or {}
|
||||
mode = payload.get("mode", "private")
|
||||
auth_service = get_auth_service()
|
||||
try:
|
||||
auth_service.require_admin()
|
||||
updated = get_kiosk_settings_service().update_from_session(mode, payload)
|
||||
return jsonify(to_plain(updated))
|
||||
except PermissionError as exc:
|
||||
return jsonify({"detail": str(exc)}), 403
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
17
backend/app/routes/health.py
Normal file
17
backend/app/routes/health.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, jsonify
|
||||
|
||||
from app.core_settings import get_settings
|
||||
|
||||
health_blueprint = Blueprint("health", __name__)
|
||||
|
||||
|
||||
@health_blueprint.get("/health")
|
||||
def health():
|
||||
settings = get_settings()
|
||||
return jsonify({
|
||||
"status": "ok",
|
||||
"app": settings.app_name,
|
||||
"version": settings.version,
|
||||
})
|
||||
54
backend/app/routes/historical.py
Normal file
54
backend/app/routes/historical.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.services.historical_sync import get_historical_sync_service
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
historical_blueprint = Blueprint("historical", __name__)
|
||||
service = get_historical_sync_service()
|
||||
|
||||
|
||||
@historical_blueprint.get("/historical/status")
|
||||
def historical_status():
|
||||
return jsonify(to_plain(service.status()))
|
||||
|
||||
|
||||
@historical_blueprint.post("/historical/start")
|
||||
def historical_start():
|
||||
payload = request.get_json(silent=True) or {}
|
||||
try:
|
||||
status = service.start(
|
||||
start_date=_parse_date(payload.get("start_date")),
|
||||
end_date=_parse_date(payload.get("end_date")),
|
||||
chunk_days=payload.get("chunk_days"),
|
||||
force=bool(payload.get("force", False)),
|
||||
)
|
||||
return jsonify(to_plain(status))
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
except RuntimeError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
|
||||
|
||||
@historical_blueprint.post("/historical/sync-now")
|
||||
def historical_sync_now():
|
||||
try:
|
||||
status = service.start(auto=True)
|
||||
return jsonify(to_plain(status))
|
||||
except RuntimeError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
|
||||
|
||||
@historical_blueprint.post("/historical/cancel")
|
||||
def historical_cancel():
|
||||
return jsonify(to_plain(service.cancel()))
|
||||
|
||||
|
||||
|
||||
def _parse_date(value: str | None) -> date | None:
|
||||
if not value:
|
||||
return None
|
||||
return date.fromisoformat(value)
|
||||
26
backend/app/routes/realtime.py
Normal file
26
backend/app/routes/realtime.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.services.realtime import RealtimeService
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
realtime_blueprint = Blueprint("realtime", __name__)
|
||||
service = RealtimeService()
|
||||
|
||||
|
||||
@realtime_blueprint.get("/realtime/snapshot")
|
||||
def realtime_snapshot():
|
||||
return jsonify(to_plain(service.snapshot()))
|
||||
|
||||
|
||||
@realtime_blueprint.get("/realtime/history")
|
||||
def realtime_history():
|
||||
range_key = request.args.get("range", "6h")
|
||||
start = request.args.get("start")
|
||||
end = request.args.get("end")
|
||||
metrics = [item.strip() for item in request.args.get("metrics", "").split(",") if item.strip()]
|
||||
try:
|
||||
return jsonify(to_plain(service.history(range_key=range_key, start=start, end=end, metric_ids=metrics or None)))
|
||||
except ValueError as exc:
|
||||
return jsonify({"detail": str(exc)}), 400
|
||||
5
backend/app/services/__init__.py
Normal file
5
backend/app/services/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .analytics import AnalyticsService
|
||||
from .historical_sync import HistoricalSyncService
|
||||
from .realtime import RealtimeService
|
||||
|
||||
__all__ = ["AnalyticsService", "HistoricalSyncService", "RealtimeService"]
|
||||
140
backend/app/services/analytics.py
Normal file
140
backend/app/services/analytics.py
Normal file
@@ -0,0 +1,140 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import compare_delta_pct
|
||||
from app.utils.time import resolve_window, shift_window
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
|
||||
def production(
|
||||
self,
|
||||
range_key: str | None = None,
|
||||
bucket: str | None = None,
|
||||
compare_mode: str = "none",
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
compare_ranges: list[dict] | None = None,
|
||||
) -> dict:
|
||||
bucket = bucket or self.settings.analytics["default_bucket"]
|
||||
if bucket not in self.settings.analytics["bucket_labels"]:
|
||||
raise ValueError(f"Unsupported bucket: {bucket}")
|
||||
|
||||
window = resolve_window(range_key=range_key, start=start, end=end)
|
||||
current_days = self.energy.daily_records_for_window(window.start, window.end, persist_missing=True)
|
||||
current = self.energy.bucketize_daily(current_days, bucket)
|
||||
total = round(sum(item.value for item in current), 2)
|
||||
|
||||
comparison = []
|
||||
comparison_total = None
|
||||
comparison_delta_pct = None
|
||||
comparisons = []
|
||||
if compare_mode == "custom_multi":
|
||||
for index, item in enumerate(compare_ranges or []):
|
||||
compare_start = item.get("start")
|
||||
compare_end = item.get("end")
|
||||
if not compare_start or not compare_end:
|
||||
continue
|
||||
compare_window = resolve_window(start=compare_start, end=compare_end)
|
||||
comparison_days = self.energy.daily_records_for_window(compare_window.start, compare_window.end, persist_missing=True)
|
||||
comparison_series = self.energy.bucketize_daily(comparison_days, bucket)
|
||||
comparison_total_value = round(sum(point.value for point in comparison_series), 2)
|
||||
comparisons.append({
|
||||
"key": item.get("key") or f"custom_{index + 1}",
|
||||
"label": item.get("label") or f"Custom {index + 1}",
|
||||
"start": compare_window.start,
|
||||
"end": compare_window.end,
|
||||
"total": comparison_total_value,
|
||||
"delta_pct": compare_delta_pct(total, comparison_total_value),
|
||||
"points": comparison_series,
|
||||
})
|
||||
if comparisons:
|
||||
comparison = comparisons[0]["points"]
|
||||
comparison_total = comparisons[0]["total"]
|
||||
comparison_delta_pct = comparisons[0]["delta_pct"]
|
||||
elif compare_mode != "none":
|
||||
compare_window = shift_window(window, compare_mode)
|
||||
comparison_days = self.energy.daily_records_for_window(compare_window.start, compare_window.end, persist_missing=True)
|
||||
comparison = self.energy.bucketize_daily(comparison_days, bucket)
|
||||
comparison_total = round(sum(item.value for item in comparison), 2)
|
||||
comparison_delta_pct = compare_delta_pct(total, comparison_total)
|
||||
comparisons.append({
|
||||
"key": compare_mode,
|
||||
"label": compare_mode,
|
||||
"start": compare_window.start,
|
||||
"end": compare_window.end,
|
||||
"total": comparison_total,
|
||||
"delta_pct": comparison_delta_pct,
|
||||
"points": comparison,
|
||||
})
|
||||
|
||||
average_bucket = round(total / len(current), 2) if current else 0.0
|
||||
best_bucket = max(current, key=lambda item: item.value, default=None)
|
||||
|
||||
return {
|
||||
"unit": "kWh",
|
||||
"bucket": bucket,
|
||||
"compare_mode": compare_mode,
|
||||
"current": current,
|
||||
"comparison": comparison,
|
||||
"comparisons": comparisons,
|
||||
"summary": {
|
||||
"total": total,
|
||||
"unit": "kWh",
|
||||
"average_bucket": average_bucket,
|
||||
"best_bucket_label": best_bucket.label if best_bucket else "",
|
||||
"best_bucket_value": best_bucket.value if best_bucket else 0.0,
|
||||
"co2_saved_kg": round(total * self.settings.co2_factor, 2),
|
||||
"comparison_total": comparison_total,
|
||||
"comparison_delta_pct": comparison_delta_pct,
|
||||
},
|
||||
"meta": {
|
||||
"window": {
|
||||
"start": window.start,
|
||||
"end": window.end,
|
||||
"range_key": window.key,
|
||||
},
|
||||
"source": "sqlite_cache_plus_live_influx",
|
||||
},
|
||||
}
|
||||
|
||||
def distribution(
|
||||
self,
|
||||
range_key: str | None = None,
|
||||
bucket: str | None = None,
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
) -> dict:
|
||||
payload = self.production(range_key=range_key, bucket=bucket, compare_mode="none", start=start, end=end)
|
||||
current = payload["current"]
|
||||
total = round(sum(item.value for item in current), 2)
|
||||
denominator = total or 1.0
|
||||
return {
|
||||
"unit": payload["unit"],
|
||||
"bucket": payload["bucket"],
|
||||
"total": total,
|
||||
"slices": [
|
||||
{
|
||||
"label": item.label,
|
||||
"value": item.value,
|
||||
"share": round((item.value / denominator) * 100.0, 2),
|
||||
}
|
||||
for item in current
|
||||
if item.value > 0
|
||||
],
|
||||
"meta": payload["meta"],
|
||||
}
|
||||
179
backend/app/services/auth.py
Normal file
179
backend/app/services/auth.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from flask import session
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.storage.auth_users import AuthUser, SQLiteAuthUserRepository
|
||||
|
||||
|
||||
SESSION_USER_KEY = "auth_user"
|
||||
SESSION_DISPLAY_NAME_KEY = "auth_display_name"
|
||||
SESSION_ROLE_KEY = "auth_role"
|
||||
VALID_ROLES = {"admin", "user"}
|
||||
|
||||
|
||||
class AuthService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.user_repository = SQLiteAuthUserRepository(self.settings.storage["sqlite_path"])
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
return bool(self.settings.auth["enabled"])
|
||||
|
||||
def status(self) -> dict[str, Any]:
|
||||
if not self.enabled:
|
||||
return {
|
||||
"enabled": False,
|
||||
"authenticated": True,
|
||||
"user": None,
|
||||
"display_name": None,
|
||||
"role": None,
|
||||
}
|
||||
|
||||
return {
|
||||
"enabled": True,
|
||||
"authenticated": SESSION_USER_KEY in session,
|
||||
"user": session.get(SESSION_USER_KEY),
|
||||
"display_name": session.get(SESSION_DISPLAY_NAME_KEY),
|
||||
"role": session.get(SESSION_ROLE_KEY),
|
||||
}
|
||||
|
||||
def login(self, username: str, password: str) -> dict[str, Any]:
|
||||
if not self.enabled:
|
||||
return self.status()
|
||||
|
||||
username = (username or "").strip()
|
||||
password = password or ""
|
||||
user = self.user_repository.get_by_username(username)
|
||||
|
||||
if user is None:
|
||||
self._login_legacy_user(username, password)
|
||||
else:
|
||||
if not user.is_active:
|
||||
raise ValueError("Konto jest nieaktywne")
|
||||
if not check_password_hash(user.password_hash, password):
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
self._set_session(user.username, user.display_name, user.role)
|
||||
|
||||
return self.status()
|
||||
|
||||
def logout(self) -> dict[str, Any]:
|
||||
session.clear()
|
||||
return self.status()
|
||||
|
||||
def list_users(self) -> list[dict[str, Any]]:
|
||||
users = self.user_repository.list_users()
|
||||
return [
|
||||
{
|
||||
"username": user.username,
|
||||
"display_name": user.display_name,
|
||||
"role": user.role,
|
||||
"is_active": user.is_active,
|
||||
"created_at": user.created_at,
|
||||
"updated_at": user.updated_at,
|
||||
}
|
||||
for user in users
|
||||
]
|
||||
|
||||
def require_admin(self) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
if session.get(SESSION_ROLE_KEY) != "admin":
|
||||
raise PermissionError("Brak uprawnien administratora")
|
||||
|
||||
def configure_app(self, app) -> None:
|
||||
max_age = int(self.settings.auth["session_max_age_seconds"])
|
||||
app.secret_key = self.settings.auth["secret_key"]
|
||||
app.config["PERMANENT_SESSION_LIFETIME"] = timedelta(seconds=max_age)
|
||||
app.config["SESSION_COOKIE_NAME"] = self.settings.auth["session_cookie_name"]
|
||||
app.config["SESSION_COOKIE_HTTPONLY"] = True
|
||||
app.config["SESSION_COOKIE_SAMESITE"] = self.settings.auth.get("cookie_samesite", "Lax")
|
||||
app.config["SESSION_COOKIE_SECURE"] = bool(self.settings.auth.get("cookie_secure", False))
|
||||
|
||||
def create_user(self, *, username: str, password: str, role: str, display_name: str | None = None) -> AuthUser:
|
||||
normalized_username = self._normalize_username(username)
|
||||
normalized_role = self._normalize_role(role)
|
||||
clean_password = self._validate_password(password)
|
||||
resolved_display_name = (display_name or normalized_username).strip()
|
||||
if not resolved_display_name:
|
||||
raise ValueError("Display name nie moze byc pusty")
|
||||
return self.user_repository.upsert_user(
|
||||
username=normalized_username,
|
||||
password_hash=generate_password_hash(clean_password),
|
||||
role=normalized_role,
|
||||
display_name=resolved_display_name,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
def reset_password(self, *, username: str, new_password: str) -> AuthUser:
|
||||
normalized_username = self._normalize_username(username)
|
||||
clean_password = self._validate_password(new_password)
|
||||
user = self.user_repository.update_password(
|
||||
normalized_username,
|
||||
generate_password_hash(clean_password),
|
||||
)
|
||||
if user is None:
|
||||
raise ValueError(f"Uzytkownik '{normalized_username}' nie istnieje")
|
||||
return user
|
||||
|
||||
def _login_legacy_user(self, username: str, password: str) -> None:
|
||||
expected_username = self.settings.auth["username"]
|
||||
expected_password = self.settings.auth["password"]
|
||||
expected_password_hash = self.settings.auth.get("password_hash")
|
||||
|
||||
if username != expected_username:
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
|
||||
if expected_password_hash:
|
||||
password_ok = check_password_hash(expected_password_hash, password)
|
||||
else:
|
||||
password_ok = password == expected_password
|
||||
|
||||
if not password_ok:
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
|
||||
self._set_session(
|
||||
expected_username,
|
||||
self.settings.auth.get("display_name") or expected_username,
|
||||
self.settings.auth.get("role", "admin"),
|
||||
)
|
||||
|
||||
def _set_session(self, username: str, display_name: str, role: str) -> None:
|
||||
session.clear()
|
||||
session.permanent = True
|
||||
session[SESSION_USER_KEY] = username
|
||||
session[SESSION_DISPLAY_NAME_KEY] = display_name
|
||||
session[SESSION_ROLE_KEY] = role
|
||||
|
||||
def _normalize_username(self, username: str) -> str:
|
||||
normalized = (username or "").strip()
|
||||
if not normalized:
|
||||
raise ValueError("Username nie moze byc pusty")
|
||||
return normalized
|
||||
|
||||
def _normalize_role(self, role: str) -> str:
|
||||
normalized = (role or "").strip().lower()
|
||||
if normalized not in VALID_ROLES:
|
||||
raise ValueError("Rola musi byc jedna z: admin, user")
|
||||
return normalized
|
||||
|
||||
def _validate_password(self, password: str) -> str:
|
||||
clean_password = password or ""
|
||||
if len(clean_password) < 8:
|
||||
raise ValueError("Haslo musi miec co najmniej 8 znakow")
|
||||
return clean_password
|
||||
|
||||
|
||||
_auth_service: AuthService | None = None
|
||||
|
||||
|
||||
def get_auth_service() -> AuthService:
|
||||
global _auth_service
|
||||
if _auth_service is None:
|
||||
_auth_service = AuthService()
|
||||
return _auth_service
|
||||
43
backend/app/services/capabilities.py
Normal file
43
backend/app/services/capabilities.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
|
||||
|
||||
|
||||
def build_capabilities(catalog: MetricCatalog | None = None) -> dict:
|
||||
catalog = catalog or get_catalog()
|
||||
settings = catalog.settings
|
||||
|
||||
string_rows = []
|
||||
for item in settings.strings:
|
||||
metric_ids = list(item.get("metrics", {}).values())
|
||||
if any(catalog.safe_get(metric_id) for metric_id in metric_ids):
|
||||
string_rows.append(item)
|
||||
|
||||
analytics_enabled = settings.modules.get("analytics", False)
|
||||
|
||||
return {
|
||||
"modules": settings.modules,
|
||||
"strings_enabled": settings.modules.get("strings", False) and len(string_rows) > 0,
|
||||
"strings_count": len(string_rows),
|
||||
"phases_enabled": False,
|
||||
"phases_count": 0,
|
||||
"analytics_enabled": analytics_enabled,
|
||||
"realtime_enabled": settings.modules.get("realtime_overview", False),
|
||||
"comparison_modes": list(settings.analytics["compare_modes"].keys()),
|
||||
"ranges": [
|
||||
{"key": key, "label": definition["label"]}
|
||||
for key, definition in settings.time_ranges.items()
|
||||
],
|
||||
"buckets": [
|
||||
{"key": key, "label": label}
|
||||
for key, label in settings.analytics["bucket_labels"].items()
|
||||
],
|
||||
"historical_import_enabled": settings.modules.get("historical_import", False),
|
||||
"history": {
|
||||
"enabled": settings.history.get("enabled", True),
|
||||
"default_chunk_days": settings.history.get("default_chunk_days", 7),
|
||||
"auto_sync_enabled": settings.history.get("auto_sync_enabled", False),
|
||||
"auto_sync_interval_minutes": settings.history.get("auto_sync_interval_minutes", 30),
|
||||
},
|
||||
}
|
||||
27
backend/app/services/catalog.py
Normal file
27
backend/app/services/catalog.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import MetricDefinition
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricCatalog:
|
||||
settings: AppSettings
|
||||
|
||||
def get(self, metric_id: str) -> MetricDefinition:
|
||||
if metric_id not in self.settings.metrics:
|
||||
raise KeyError(f"Unknown metric: {metric_id}")
|
||||
return self.settings.metrics[metric_id]
|
||||
|
||||
def safe_get(self, metric_id: str) -> MetricDefinition | None:
|
||||
return self.settings.metrics.get(metric_id)
|
||||
|
||||
def visible_entities(self) -> list[MetricDefinition]:
|
||||
return [self.get(metric_id) for metric_id in self.settings.visible_entity_table if metric_id in self.settings.metrics]
|
||||
|
||||
|
||||
|
||||
def get_catalog() -> MetricCatalog:
|
||||
return MetricCatalog(get_settings())
|
||||
220
backend/app/services/energy.py
Normal file
220
backend/app/services/energy.py
Normal file
@@ -0,0 +1,220 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import BucketPoint, DailyEnergyRecord, MetricDefinition, SeriesPoint
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import to_float
|
||||
from app.storage import SQLiteEnergyRepository
|
||||
from app.utils.time import (
|
||||
choose_counter_interval,
|
||||
choose_power_interval,
|
||||
duration_to_seconds,
|
||||
now_local,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnergySample:
|
||||
timestamp: datetime
|
||||
delta_kwh: float
|
||||
|
||||
|
||||
class EnergyService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
repository: SQLiteEnergyRepository | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.repository = repository or SQLiteEnergyRepository(self.settings.storage["sqlite_path"])
|
||||
self.tz = ZoneInfo(self.settings.timezone)
|
||||
|
||||
def total_for_window(self, start: datetime, end: datetime) -> float:
|
||||
total, _, _ = self.window_total_with_meta(start, end)
|
||||
return total
|
||||
|
||||
def window_total_with_meta(self, start: datetime, end: datetime) -> tuple[float, str, int]:
|
||||
samples, source, observations_count = self._samples_for_window(start, end)
|
||||
return round(sum(sample.delta_kwh for sample in samples), 2), source, observations_count
|
||||
|
||||
def total_for_full_day(self, day: date) -> tuple[float, str, int]:
|
||||
start = datetime.combine(day, time.min, tzinfo=self.tz)
|
||||
end = start + timedelta(days=1)
|
||||
return self.window_total_with_meta(start, end)
|
||||
|
||||
def samples(self, start: datetime, end: datetime) -> list[EnergySample]:
|
||||
samples, _, _ = self._samples_for_window(start, end)
|
||||
return samples
|
||||
|
||||
def daily_records_for_window(
|
||||
self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
*,
|
||||
persist_missing: bool = True,
|
||||
) -> list[DailyEnergyRecord]:
|
||||
start_local = start.astimezone(self.tz)
|
||||
end_local = end.astimezone(self.tz)
|
||||
if end_local <= start_local:
|
||||
return []
|
||||
|
||||
start_day = start_local.date()
|
||||
end_day = end_local.date()
|
||||
cached = self.repository.fetch_daily_energy(start_day, end_day)
|
||||
today_local = now_local().date()
|
||||
rows: list[DailyEnergyRecord] = []
|
||||
|
||||
current = start_day
|
||||
while current <= end_day:
|
||||
day_start = datetime.combine(current, time.min, tzinfo=self.tz)
|
||||
day_end = day_start + timedelta(days=1)
|
||||
segment_start = max(start_local, day_start)
|
||||
segment_end = min(end_local, day_end)
|
||||
if segment_end <= segment_start:
|
||||
current = current + timedelta(days=1)
|
||||
continue
|
||||
|
||||
is_full_day = segment_start == day_start and segment_end == day_end
|
||||
cached_row = cached.get(current)
|
||||
if is_full_day and cached_row is not None:
|
||||
rows.append(cached_row)
|
||||
else:
|
||||
total, source, observations_count = self.window_total_with_meta(segment_start, segment_end)
|
||||
record = DailyEnergyRecord(
|
||||
day=current,
|
||||
energy_kwh=total,
|
||||
source=source,
|
||||
samples_count=observations_count,
|
||||
)
|
||||
rows.append(record)
|
||||
if is_full_day and persist_missing and current < today_local and observations_count > 0:
|
||||
self.repository.upsert_daily_energy(record)
|
||||
current = current + timedelta(days=1)
|
||||
|
||||
return rows
|
||||
|
||||
def bucketize_daily(self, records: list[DailyEnergyRecord], bucket: str) -> list[BucketPoint]:
|
||||
grouped: dict[str, dict] = defaultdict(lambda: {"value": 0.0, "start": None, "end": None, "label": ""})
|
||||
|
||||
for record in records:
|
||||
start = datetime.combine(record.day, time.min, tzinfo=self.tz)
|
||||
if bucket == "day":
|
||||
bucket_start = start
|
||||
bucket_end = bucket_start + timedelta(days=1)
|
||||
key = bucket_start.strftime("%Y-%m-%d")
|
||||
label = bucket_start.strftime("%d.%m")
|
||||
elif bucket == "week":
|
||||
bucket_start = start - timedelta(days=start.weekday())
|
||||
bucket_end = bucket_start + timedelta(days=7)
|
||||
iso = bucket_start.isocalendar()
|
||||
key = f"{iso.year}-W{iso.week:02d}"
|
||||
label = key
|
||||
elif bucket == "month":
|
||||
bucket_start = start.replace(day=1)
|
||||
if bucket_start.month == 12:
|
||||
bucket_end = bucket_start.replace(year=bucket_start.year + 1, month=1)
|
||||
else:
|
||||
bucket_end = bucket_start.replace(month=bucket_start.month + 1)
|
||||
key = bucket_start.strftime("%Y-%m")
|
||||
label = key
|
||||
elif bucket == "year":
|
||||
bucket_start = start.replace(month=1, day=1)
|
||||
bucket_end = bucket_start.replace(year=bucket_start.year + 1)
|
||||
key = bucket_start.strftime("%Y")
|
||||
label = key
|
||||
else:
|
||||
raise ValueError(f"Unsupported bucket: {bucket}")
|
||||
|
||||
current = grouped[key]
|
||||
current["label"] = label
|
||||
current["value"] += record.energy_kwh
|
||||
current["start"] = bucket_start if current["start"] is None else min(current["start"], bucket_start)
|
||||
current["end"] = bucket_end if current["end"] is None else max(current["end"], bucket_end)
|
||||
|
||||
rows = []
|
||||
for key in sorted(grouped.keys()):
|
||||
item = grouped[key]
|
||||
rows.append(
|
||||
BucketPoint(
|
||||
label=item["label"],
|
||||
start=item["start"],
|
||||
end=item["end"],
|
||||
value=round(item["value"], 2),
|
||||
)
|
||||
)
|
||||
return rows
|
||||
|
||||
def _samples_for_window(self, start: datetime, end: datetime) -> tuple[list[EnergySample], str, int]:
|
||||
counter_metric = self.catalog.safe_get(self.settings.analytics["production_metric_id"])
|
||||
if counter_metric is not None:
|
||||
samples, observations_count = self._samples_from_counter(counter_metric, start, end)
|
||||
return samples, "counter", observations_count
|
||||
|
||||
power_metric = self.catalog.safe_get(self.settings.analytics["fallback_power_metric_id"])
|
||||
if power_metric is not None:
|
||||
samples, observations_count = self._samples_from_power(power_metric, start, end)
|
||||
return samples, "power_estimated", observations_count
|
||||
|
||||
return [], "unavailable", 0
|
||||
|
||||
def _samples_from_counter(self, metric: MetricDefinition, start: datetime, end: datetime) -> tuple[list[EnergySample], int]:
|
||||
interval = choose_counter_interval(start, end)
|
||||
baseline = self.influx.last_before(metric, start)
|
||||
series = self.influx.grouped_last_series(metric, start, end, interval)
|
||||
|
||||
points: list[SeriesPoint] = []
|
||||
if baseline and baseline.value is not None:
|
||||
points.append(SeriesPoint(timestamp=start, value=baseline.value))
|
||||
else:
|
||||
first_value = next((point.value for point in series if point.value is not None), None)
|
||||
if first_value is not None:
|
||||
points.append(SeriesPoint(timestamp=start, value=first_value))
|
||||
points.extend(series)
|
||||
|
||||
samples: list[EnergySample] = []
|
||||
previous_value = None
|
||||
for point in points:
|
||||
current_value = to_float(point.value)
|
||||
if current_value is None:
|
||||
continue
|
||||
if previous_value is None:
|
||||
previous_value = current_value
|
||||
continue
|
||||
|
||||
delta = current_value - previous_value
|
||||
previous_value = current_value
|
||||
if delta <= 0:
|
||||
continue
|
||||
if point.timestamp < start or point.timestamp > end:
|
||||
continue
|
||||
samples.append(EnergySample(timestamp=point.timestamp, delta_kwh=round(delta, 6)))
|
||||
|
||||
observations_count = sum(1 for point in series if to_float(point.value) is not None)
|
||||
return samples, observations_count
|
||||
|
||||
def _samples_from_power(self, metric: MetricDefinition, start: datetime, end: datetime) -> tuple[list[EnergySample], int]:
|
||||
interval = choose_power_interval(start, end)
|
||||
interval_seconds = duration_to_seconds(interval)
|
||||
points = self.influx.gauge_history(metric, start, end, interval, aggregate="mean")
|
||||
samples: list[EnergySample] = []
|
||||
observations_count = 0
|
||||
for point in points:
|
||||
watts = to_float(point.value)
|
||||
if watts is None:
|
||||
continue
|
||||
observations_count += 1
|
||||
if watts <= 0:
|
||||
continue
|
||||
delta_kwh = watts * (interval_seconds / 3600.0) / 1000.0
|
||||
samples.append(EnergySample(timestamp=point.timestamp, delta_kwh=round(delta_kwh, 6)))
|
||||
return samples, observations_count
|
||||
605
backend/app/services/historical_sync.py
Normal file
605
backend/app/services/historical_sync.py
Normal file
@@ -0,0 +1,605 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from datetime import date, datetime, timedelta
|
||||
from functools import lru_cache
|
||||
from math import ceil
|
||||
from typing import Iterable
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models import (
|
||||
DailyEnergyRecord,
|
||||
HistoricalActivityEvent,
|
||||
HistoricalChunkProgress,
|
||||
HistoricalImportStatus,
|
||||
)
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.storage import SQLiteEnergyRepository
|
||||
from app.utils.time import now_local
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HistoricalSyncService:
|
||||
MAX_RECENT_CHUNKS = 18
|
||||
MAX_RECENT_EVENTS = 40
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
repository: SQLiteEnergyRepository | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
self.repository = repository or SQLiteEnergyRepository(self.settings.storage["sqlite_path"])
|
||||
self._state_lock = threading.Lock()
|
||||
self._worker: threading.Thread | None = None
|
||||
self._cancel_event = threading.Event()
|
||||
self._scheduler_stop = threading.Event()
|
||||
self._scheduler: threading.Thread | None = None
|
||||
self._available_bounds_cache: tuple[datetime, date | None, date | None] | None = None
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=self.settings.history.get("enabled", True),
|
||||
state="idle",
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
)
|
||||
self._refresh_coverage()
|
||||
self._refresh_available_bounds()
|
||||
self._refresh_runtime_metrics()
|
||||
|
||||
def status(self) -> HistoricalImportStatus:
|
||||
with self._state_lock:
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
def start(
|
||||
self,
|
||||
*,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
chunk_days: int | None = None,
|
||||
force: bool = False,
|
||||
auto: bool = False,
|
||||
) -> HistoricalImportStatus:
|
||||
if not self.settings.history.get("enabled", True):
|
||||
raise RuntimeError("Historical import is disabled")
|
||||
|
||||
chunk_days = max(int(chunk_days or self.settings.history.get("default_chunk_days", 7)), 1)
|
||||
resolved = self._resolve_range(start_date=start_date, end_date=end_date)
|
||||
if resolved is None:
|
||||
with self._state_lock:
|
||||
self._state.running = False
|
||||
self._state.state = "idle"
|
||||
self._state.message = "Brak brakujacych dni do importu."
|
||||
self._state.finished_at = datetime.utcnow()
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
resolved_start, resolved_end = resolved
|
||||
total_days = (resolved_end - resolved_start).days + 1
|
||||
total_chunks = max(ceil(total_days / chunk_days), 1)
|
||||
start_message = "Start importu archiwalnego" if not auto else "Start automatycznej synchronizacji archiwum"
|
||||
|
||||
with self._state_lock:
|
||||
if self._worker and self._worker.is_alive():
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
self._cancel_event = threading.Event()
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=True,
|
||||
running=True,
|
||||
state="running",
|
||||
job_id=uuid.uuid4().hex[:12],
|
||||
started_at=datetime.utcnow(),
|
||||
requested_start_date=resolved_start,
|
||||
requested_end_date=resolved_end,
|
||||
total_days=total_days,
|
||||
chunk_days=chunk_days,
|
||||
total_chunks=total_chunks,
|
||||
active_chunk_index=1,
|
||||
current_chunk_start=resolved_start,
|
||||
current_chunk_end=min(resolved_start + timedelta(days=chunk_days - 1), resolved_end),
|
||||
message=start_message,
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
recent_chunks=[],
|
||||
recent_events=[],
|
||||
)
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
self._worker = threading.Thread(
|
||||
target=self._run_worker,
|
||||
kwargs={
|
||||
"start_date": resolved_start,
|
||||
"end_date": resolved_end,
|
||||
"chunk_days": chunk_days,
|
||||
"force": force,
|
||||
"auto": auto,
|
||||
},
|
||||
name="pv-historical-backfill",
|
||||
daemon=True,
|
||||
)
|
||||
self._worker.start()
|
||||
|
||||
self._record_event(
|
||||
level="info",
|
||||
title="Uruchomiono zadanie",
|
||||
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
|
||||
)
|
||||
return self.status()
|
||||
|
||||
def cancel(self) -> HistoricalImportStatus:
|
||||
self._cancel_event.set()
|
||||
with self._state_lock:
|
||||
self._state.message = "Anulowanie zadania..."
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
snapshot = copy.deepcopy(self._state)
|
||||
self._record_event(level="warn", title="Anulowanie", message="Uzytkownik poprosil o zatrzymanie zadania.")
|
||||
return snapshot
|
||||
|
||||
def run_blocking(
|
||||
self,
|
||||
*,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
chunk_days: int | None = None,
|
||||
force: bool = False,
|
||||
) -> HistoricalImportStatus:
|
||||
resolved = self._resolve_range(start_date=start_date, end_date=end_date)
|
||||
if resolved is None:
|
||||
return self.status()
|
||||
resolved_start, resolved_end = resolved
|
||||
chunk_days = max(int(chunk_days or self.settings.history.get("default_chunk_days", 7)), 1)
|
||||
total_days = (resolved_end - resolved_start).days + 1
|
||||
total_chunks = max(ceil(total_days / chunk_days), 1)
|
||||
with self._state_lock:
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=True,
|
||||
running=True,
|
||||
state="running",
|
||||
job_id=uuid.uuid4().hex[:12],
|
||||
started_at=datetime.utcnow(),
|
||||
requested_start_date=resolved_start,
|
||||
requested_end_date=resolved_end,
|
||||
total_days=total_days,
|
||||
chunk_days=chunk_days,
|
||||
total_chunks=total_chunks,
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
recent_chunks=[],
|
||||
recent_events=[],
|
||||
)
|
||||
self._record_event(
|
||||
level="info",
|
||||
title="Uruchomiono zadanie",
|
||||
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
|
||||
)
|
||||
self._run_worker(
|
||||
start_date=resolved_start,
|
||||
end_date=resolved_end,
|
||||
chunk_days=chunk_days,
|
||||
force=force,
|
||||
auto=False,
|
||||
)
|
||||
return self.status()
|
||||
|
||||
def start_scheduler_if_enabled(self) -> None:
|
||||
if not self.settings.history.get("enabled", True):
|
||||
return
|
||||
if not self.settings.history.get("auto_sync_enabled", False):
|
||||
return
|
||||
if self._scheduler and self._scheduler.is_alive():
|
||||
return
|
||||
self._scheduler_stop.clear()
|
||||
self._scheduler = threading.Thread(target=self._scheduler_loop, name="pv-history-scheduler", daemon=True)
|
||||
self._scheduler.start()
|
||||
|
||||
def _scheduler_loop(self) -> None:
|
||||
interval_seconds = max(int(self.settings.history.get("auto_sync_interval_minutes", 30)), 1) * 60
|
||||
if self.settings.history.get("auto_sync_on_start", False):
|
||||
try:
|
||||
self.start(auto=True)
|
||||
except Exception as exc:
|
||||
logger.warning("Unable to auto-start historical sync: %s", exc)
|
||||
|
||||
while not self._scheduler_stop.wait(interval_seconds):
|
||||
try:
|
||||
if self._worker and self._worker.is_alive():
|
||||
continue
|
||||
self.start(auto=True)
|
||||
except Exception as exc:
|
||||
logger.warning("Historical scheduler cycle failed: %s", exc)
|
||||
|
||||
def _run_worker(
|
||||
self,
|
||||
*,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
chunk_days: int,
|
||||
force: bool,
|
||||
auto: bool,
|
||||
) -> None:
|
||||
total_chunks = max(ceil(((end_date - start_date).days + 1) / chunk_days), 1)
|
||||
try:
|
||||
chunk_index = 0
|
||||
chunk_start = start_date
|
||||
while chunk_start <= end_date:
|
||||
if self._cancel_event.is_set():
|
||||
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
|
||||
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
|
||||
return
|
||||
|
||||
chunk_index += 1
|
||||
chunk_end = min(chunk_start + timedelta(days=chunk_days - 1), end_date)
|
||||
self._update_chunk(chunk_index, total_chunks, chunk_start, chunk_end)
|
||||
imported, skipped, energy_kwh, cancelled = self._process_chunk(
|
||||
chunk_index=chunk_index,
|
||||
start_day=chunk_start,
|
||||
end_day=chunk_end,
|
||||
force=force,
|
||||
)
|
||||
if cancelled:
|
||||
self._close_chunk(
|
||||
chunk_index,
|
||||
imported_days=imported,
|
||||
skipped_days=skipped,
|
||||
energy_kwh=energy_kwh,
|
||||
state="cancelled",
|
||||
note="Chunk zatrzymany podczas przetwarzania",
|
||||
)
|
||||
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
|
||||
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
|
||||
return
|
||||
|
||||
self._close_chunk(
|
||||
chunk_index,
|
||||
imported_days=imported,
|
||||
skipped_days=skipped,
|
||||
energy_kwh=energy_kwh,
|
||||
state="completed",
|
||||
note=f"Chunk zakonczony: import {imported}, pominiete {skipped}",
|
||||
)
|
||||
self._record_event(
|
||||
level="success",
|
||||
title=f"Chunk {chunk_index}/{total_chunks} zakonczony",
|
||||
message=f"Zakres {chunk_start.isoformat()} -> {chunk_end.isoformat()}, import {imported}, pominiete {skipped}, energia {energy_kwh:.2f} kWh",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
chunk_start = chunk_end + timedelta(days=1)
|
||||
|
||||
final_message = "Synchronizacja archiwalna zakonczona" if auto else "Import archiwalny zakonczony"
|
||||
self._record_event(level="success", title="Zakonczono", message=final_message)
|
||||
self._finish("completed", running=False, message=final_message)
|
||||
except Exception as exc:
|
||||
logger.exception("Historical import failed")
|
||||
self._record_event(level="error", title="Blad importu", message=str(exc))
|
||||
self._finish("failed", running=False, message="Import archiwalny zakonczyl sie bledem.", last_error=str(exc))
|
||||
|
||||
def _process_chunk(self, *, chunk_index: int, start_day: date, end_day: date, force: bool) -> tuple[int, int, float, bool]:
|
||||
imported_days = 0
|
||||
skipped_days = 0
|
||||
energy_kwh = 0.0
|
||||
|
||||
for day in self._date_range(start_day, end_day):
|
||||
if self._cancel_event.is_set():
|
||||
return imported_days, skipped_days, energy_kwh, True
|
||||
|
||||
if not force and self.repository.has_day(day):
|
||||
skipped_days += 1
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=False,
|
||||
message=f"Pominieto {day.isoformat()} - dzien juz istnieje w cache",
|
||||
level="warn",
|
||||
title="Pominieto dzien",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
continue
|
||||
|
||||
total, source, samples_count = self.energy.total_for_full_day(day)
|
||||
if samples_count <= 0:
|
||||
skipped_days += 1
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=False,
|
||||
message=f"Pominieto {day.isoformat()} - brak probek w InfluxDB",
|
||||
level="warn",
|
||||
title="Brak probek",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
continue
|
||||
|
||||
self.repository.upsert_daily_energy(
|
||||
DailyEnergyRecord(
|
||||
day=day,
|
||||
energy_kwh=total,
|
||||
source=source,
|
||||
samples_count=samples_count,
|
||||
)
|
||||
)
|
||||
imported_days += 1
|
||||
energy_kwh += total
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=True,
|
||||
message=f"Zaimportowano {day.isoformat()} ({total:.2f} kWh)",
|
||||
level="success",
|
||||
title="Zaimportowano dzien",
|
||||
chunk_index=chunk_index,
|
||||
energy_kwh=total,
|
||||
)
|
||||
|
||||
return imported_days, skipped_days, round(energy_kwh, 3), False
|
||||
|
||||
def _advance_day(
|
||||
self,
|
||||
day: date,
|
||||
*,
|
||||
imported: bool,
|
||||
message: str,
|
||||
level: str,
|
||||
title: str,
|
||||
chunk_index: int,
|
||||
energy_kwh: float | None = None,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
self._state.processed_days += 1
|
||||
if imported:
|
||||
self._state.imported_days += 1
|
||||
else:
|
||||
self._state.skipped_days += 1
|
||||
self._state.current_date = day
|
||||
self._state.message = message
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
suffix = f" Energia: {energy_kwh:.2f} kWh." if imported and energy_kwh is not None else ""
|
||||
self._record_event(
|
||||
level=level,
|
||||
title=title,
|
||||
message=f"{message}.{suffix}" if not message.endswith(".") else f"{message}{suffix}",
|
||||
day=day,
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
|
||||
def _update_chunk(self, chunk_index: int, total_chunks: int, chunk_start: date, chunk_end: date) -> None:
|
||||
chunk = HistoricalChunkProgress(
|
||||
chunk_index=chunk_index,
|
||||
total_chunks=total_chunks,
|
||||
start_date=chunk_start,
|
||||
end_date=chunk_end,
|
||||
state="running",
|
||||
started_at=datetime.utcnow(),
|
||||
note=f"Aktywny chunk {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
|
||||
)
|
||||
with self._state_lock:
|
||||
self._state.current_chunk_start = chunk_start
|
||||
self._state.current_chunk_end = chunk_end
|
||||
self._state.active_chunk_index = chunk_index
|
||||
self._state.message = f"Przetwarzanie zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}"
|
||||
self._upsert_chunk_locked(chunk)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
self._record_event(
|
||||
level="info",
|
||||
title=f"Chunk {chunk_index}/{total_chunks}",
|
||||
message=f"Start zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
|
||||
def _close_chunk(
|
||||
self,
|
||||
chunk_index: int,
|
||||
*,
|
||||
imported_days: int,
|
||||
skipped_days: int,
|
||||
energy_kwh: float,
|
||||
state: str,
|
||||
note: str,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
existing = self._find_chunk_locked(chunk_index)
|
||||
started_at = existing.started_at if existing and existing.started_at else datetime.utcnow()
|
||||
finished_at = datetime.utcnow()
|
||||
processed_days = imported_days + skipped_days
|
||||
duration_seconds = max((finished_at - started_at).total_seconds(), 0.0)
|
||||
chunk = HistoricalChunkProgress(
|
||||
chunk_index=chunk_index,
|
||||
total_chunks=self._state.total_chunks,
|
||||
start_date=existing.start_date if existing else self._state.current_chunk_start or self._state.requested_start_date or date.today(),
|
||||
end_date=existing.end_date if existing else self._state.current_chunk_end or self._state.requested_end_date or date.today(),
|
||||
processed_days=processed_days,
|
||||
imported_days=imported_days,
|
||||
skipped_days=skipped_days,
|
||||
energy_kwh=round(energy_kwh, 3),
|
||||
state=state,
|
||||
started_at=started_at,
|
||||
finished_at=finished_at,
|
||||
duration_seconds=round(duration_seconds, 2),
|
||||
note=note,
|
||||
)
|
||||
self._upsert_chunk_locked(chunk)
|
||||
if state != "running":
|
||||
self._state.message = note
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
def _finish(
|
||||
self,
|
||||
state: str,
|
||||
*,
|
||||
running: bool,
|
||||
message: str,
|
||||
last_error: str | None = None,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
self._state.running = running
|
||||
self._state.state = state
|
||||
self._state.finished_at = datetime.utcnow()
|
||||
self._state.last_error = last_error
|
||||
self._state.message = message
|
||||
self._state.active_chunk_index = 0
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
def _resolve_range(self, *, start_date: date | None, end_date: date | None) -> tuple[date, date] | None:
|
||||
today = now_local().date()
|
||||
include_today = self.settings.history.get("include_today_in_sync", False)
|
||||
default_end = today if include_today else today - timedelta(days=1)
|
||||
resolved_end = end_date or default_end
|
||||
|
||||
if start_date is None:
|
||||
coverage = self.repository.coverage()
|
||||
if coverage.last_day:
|
||||
resolved_start = coverage.last_day + timedelta(days=1)
|
||||
else:
|
||||
bootstrap_start = self.settings.history.get("bootstrap_start_date")
|
||||
if bootstrap_start:
|
||||
resolved_start = date.fromisoformat(bootstrap_start)
|
||||
else:
|
||||
available_start, _ = self._available_bounds()
|
||||
resolved_start = available_start or resolved_end
|
||||
else:
|
||||
resolved_start = start_date
|
||||
|
||||
if resolved_start > resolved_end:
|
||||
return None
|
||||
return resolved_start, resolved_end
|
||||
|
||||
def _available_bounds(self) -> tuple[date | None, date | None]:
|
||||
now_utc = datetime.utcnow()
|
||||
cached = self._available_bounds_cache
|
||||
if cached and (now_utc - cached[0]).total_seconds() < 300:
|
||||
return cached[1], cached[2]
|
||||
|
||||
available_start: date | None = None
|
||||
available_end: date | None = None
|
||||
metric = self.catalog.safe_get(self.settings.analytics.get("production_metric_id", "energy_total"))
|
||||
fallback = self.catalog.safe_get(self.settings.analytics.get("fallback_power_metric_id", "ac_power"))
|
||||
source_metric = metric or fallback
|
||||
if source_metric is not None:
|
||||
first_point = self.influx.first_value(source_metric)
|
||||
last_point = self.influx.last_value(source_metric)
|
||||
available_start = first_point.timestamp.astimezone(self.energy.tz).date() if first_point else None
|
||||
available_end = last_point.timestamp.astimezone(self.energy.tz).date() if last_point else None
|
||||
self._available_bounds_cache = (now_utc, available_start, available_end)
|
||||
return available_start, available_end
|
||||
|
||||
def _refresh_coverage(self, *, lock_held: bool = False) -> None:
|
||||
coverage = self.repository.coverage()
|
||||
available_start, available_end = self._available_bounds()
|
||||
if available_start and available_end and available_start <= available_end:
|
||||
available_days = (available_end - available_start).days + 1
|
||||
missing_days = self.repository.count_missing_days(available_start, available_end)
|
||||
coverage.available_days = available_days
|
||||
coverage.missing_days = missing_days
|
||||
imported_in_range = max(available_days - missing_days, 0)
|
||||
coverage.coverage_pct = round((imported_in_range / available_days) * 100, 1) if available_days > 0 else None
|
||||
else:
|
||||
coverage.available_days = 0
|
||||
coverage.missing_days = 0
|
||||
coverage.coverage_pct = None
|
||||
|
||||
if lock_held:
|
||||
self._state.coverage = coverage
|
||||
else:
|
||||
with self._state_lock:
|
||||
self._state.coverage = coverage
|
||||
|
||||
def _refresh_available_bounds(self, *, lock_held: bool = False) -> None:
|
||||
available_start, available_end = self._available_bounds()
|
||||
if lock_held:
|
||||
self._state.available_start_date = available_start
|
||||
self._state.available_end_date = available_end
|
||||
else:
|
||||
with self._state_lock:
|
||||
self._state.available_start_date = available_start
|
||||
self._state.available_end_date = available_end
|
||||
|
||||
def _refresh_runtime_metrics(self, *, lock_held: bool = False) -> None:
|
||||
def apply() -> None:
|
||||
if self._state.started_at is None:
|
||||
self._state.elapsed_seconds = None
|
||||
self._state.estimated_remaining_seconds = None
|
||||
self._state.avg_days_per_minute = None
|
||||
return
|
||||
|
||||
end_reference = datetime.utcnow() if self._state.running or self._state.finished_at is None else self._state.finished_at
|
||||
elapsed_seconds = max((end_reference - self._state.started_at).total_seconds(), 0.0)
|
||||
self._state.elapsed_seconds = round(elapsed_seconds, 1)
|
||||
|
||||
if self._state.processed_days > 0 and elapsed_seconds > 0:
|
||||
avg_days_per_minute = (self._state.processed_days / elapsed_seconds) * 60
|
||||
remaining_days = max(self._state.total_days - self._state.processed_days, 0)
|
||||
estimated_remaining = (remaining_days / self._state.processed_days) * elapsed_seconds
|
||||
self._state.avg_days_per_minute = round(avg_days_per_minute, 2)
|
||||
self._state.estimated_remaining_seconds = round(estimated_remaining, 1) if self._state.running else 0.0
|
||||
else:
|
||||
self._state.avg_days_per_minute = None
|
||||
self._state.estimated_remaining_seconds = None if self._state.running else 0.0
|
||||
|
||||
if lock_held:
|
||||
apply()
|
||||
else:
|
||||
with self._state_lock:
|
||||
apply()
|
||||
|
||||
def _record_event(
|
||||
self,
|
||||
*,
|
||||
level: str,
|
||||
title: str,
|
||||
message: str,
|
||||
day: date | None = None,
|
||||
chunk_index: int | None = None,
|
||||
) -> None:
|
||||
event = HistoricalActivityEvent(
|
||||
timestamp=datetime.utcnow(),
|
||||
level=level,
|
||||
title=title,
|
||||
message=message,
|
||||
day=day,
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
with self._state_lock:
|
||||
self._state.recent_events.append(event)
|
||||
self._state.recent_events = self._state.recent_events[-self.MAX_RECENT_EVENTS :]
|
||||
|
||||
def _find_chunk_locked(self, chunk_index: int) -> HistoricalChunkProgress | None:
|
||||
for chunk in self._state.recent_chunks:
|
||||
if chunk.chunk_index == chunk_index:
|
||||
return chunk
|
||||
return None
|
||||
|
||||
def _upsert_chunk_locked(self, chunk: HistoricalChunkProgress) -> None:
|
||||
for index, existing in enumerate(self._state.recent_chunks):
|
||||
if existing.chunk_index == chunk.chunk_index:
|
||||
self._state.recent_chunks[index] = chunk
|
||||
break
|
||||
else:
|
||||
self._state.recent_chunks.append(chunk)
|
||||
self._state.recent_chunks = self._state.recent_chunks[-self.MAX_RECENT_CHUNKS :]
|
||||
|
||||
@staticmethod
|
||||
def _date_range(start_day: date, end_day: date) -> Iterable[date]:
|
||||
current = start_day
|
||||
while current <= end_day:
|
||||
yield current
|
||||
current = current + timedelta(days=1)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_historical_sync_service() -> HistoricalSyncService:
|
||||
return HistoricalSyncService()
|
||||
241
backend/app/services/influx_http.py
Normal file
241
backend/app/services/influx_http.py
Normal file
@@ -0,0 +1,241 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import ssl
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import Iterable
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import MetricDefinition, SeriesPoint
|
||||
from app.services.metrics import to_float
|
||||
from app.utils.time import to_utc_iso
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
def _quote_identifier(value: str) -> str:
|
||||
return '"' + value.replace('"', '\\"') + '"'
|
||||
|
||||
|
||||
|
||||
def _quote_literal(value: str) -> str:
|
||||
return "'" + value.replace("\\", "\\\\").replace("'", "\\'") + "'"
|
||||
|
||||
|
||||
class InfluxHTTPService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
config = self.settings.influx
|
||||
return f"{config['scheme']}://{config['host']}:{config['port']}"
|
||||
|
||||
def latest_values(self, metrics: Iterable[MetricDefinition]) -> dict[str, dict]:
|
||||
grouped: dict[str, list[MetricDefinition]] = defaultdict(list)
|
||||
for metric in metrics:
|
||||
grouped[metric.measurement].append(metric)
|
||||
|
||||
payload: dict[str, dict] = {}
|
||||
for measurement, measurement_metrics in grouped.items():
|
||||
conditions = " OR ".join(
|
||||
f'("entity_id" = {_quote_literal(metric.entity_id)})'
|
||||
for metric in measurement_metrics
|
||||
)
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(measurement)} '
|
||||
f'WHERE {conditions} '
|
||||
f'GROUP BY "entity_id"'
|
||||
)
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
entity_id = (series.get("tags") or {}).get("entity_id")
|
||||
if not entity_id:
|
||||
continue
|
||||
metric = next((item for item in measurement_metrics if item.entity_id == entity_id), None)
|
||||
if metric is None:
|
||||
continue
|
||||
row = self._row_from_series(series)
|
||||
payload[metric.id] = {
|
||||
"value": row.get("value"),
|
||||
"timestamp": _parse_time(row.get("time")),
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("Influx latest_values error for %s: %s", measurement, exc)
|
||||
return payload
|
||||
|
||||
def latest_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self._single_value(
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)}'
|
||||
)
|
||||
|
||||
def first_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self._single_value(
|
||||
f'SELECT FIRST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)}'
|
||||
)
|
||||
|
||||
def last_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self.latest_value(metric)
|
||||
|
||||
def gauge_history(
|
||||
self,
|
||||
metric: MetricDefinition,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
interval: str,
|
||||
aggregate: str = "mean",
|
||||
) -> list[SeriesPoint]:
|
||||
query = (
|
||||
f'SELECT {aggregate}("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time >= {_quote_literal(to_utc_iso(start))} '
|
||||
f'AND time <= {_quote_literal(to_utc_iso(end))} '
|
||||
f'GROUP BY time({interval}) fill(null)'
|
||||
)
|
||||
points: list[SeriesPoint] = []
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
for row in self._rows_from_series(series):
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
if timestamp is None:
|
||||
continue
|
||||
points.append(SeriesPoint(timestamp=timestamp, value=to_float(row.get("value"))))
|
||||
except Exception as exc:
|
||||
logger.warning("Influx gauge_history error for %s: %s", metric.id, exc)
|
||||
return points
|
||||
|
||||
def grouped_last_series(
|
||||
self,
|
||||
metric: MetricDefinition,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
interval: str,
|
||||
) -> list[SeriesPoint]:
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time >= {_quote_literal(to_utc_iso(start))} '
|
||||
f'AND time <= {_quote_literal(to_utc_iso(end))} '
|
||||
f'GROUP BY time({interval}) fill(null)'
|
||||
)
|
||||
points: list[SeriesPoint] = []
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
for row in self._rows_from_series(series):
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
if timestamp is None:
|
||||
continue
|
||||
points.append(SeriesPoint(timestamp=timestamp, value=to_float(row.get("value"))))
|
||||
except Exception as exc:
|
||||
logger.warning("Influx grouped_last_series error for %s: %s", metric.id, exc)
|
||||
return points
|
||||
|
||||
def last_before(self, metric: MetricDefinition, moment: datetime) -> SeriesPoint | None:
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time < {_quote_literal(to_utc_iso(moment))}'
|
||||
)
|
||||
try:
|
||||
series = self._execute(query)
|
||||
if not series:
|
||||
return None
|
||||
row = self._row_from_series(series[0])
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
value = to_float(row.get("value"))
|
||||
if timestamp is None or value is None:
|
||||
return None
|
||||
return SeriesPoint(timestamp=timestamp, value=value)
|
||||
except Exception as exc:
|
||||
logger.warning("Influx last_before error for %s: %s", metric.id, exc)
|
||||
return None
|
||||
|
||||
def _single_value(self, query: str) -> SeriesPoint | None:
|
||||
try:
|
||||
series = self._execute(query)
|
||||
if not series:
|
||||
return None
|
||||
row = self._row_from_series(series[0])
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
value = to_float(row.get("value"))
|
||||
if timestamp is None or value is None:
|
||||
return None
|
||||
return SeriesPoint(timestamp=timestamp, value=value)
|
||||
except Exception as exc:
|
||||
logger.warning("Influx single value query error: %s", exc)
|
||||
return None
|
||||
|
||||
def _execute(self, query: str) -> list[dict]:
|
||||
params = {
|
||||
"db": self.settings.influx["database"],
|
||||
"q": query,
|
||||
}
|
||||
url = f"{self.base_url}/query?{urllib.parse.urlencode(params)}"
|
||||
headers = {"Accept": "application/json"}
|
||||
username = self.settings.influx.get("username") or ""
|
||||
password = self.settings.influx.get("password") or ""
|
||||
if username:
|
||||
token = base64.b64encode(f"{username}:{password}".encode("utf-8")).decode("ascii")
|
||||
headers["Authorization"] = f"Basic {token}"
|
||||
|
||||
request = urllib.request.Request(url, headers=headers, method="GET")
|
||||
verify_ssl = self.settings.influx.get("verify_ssl", False)
|
||||
timeout = self.settings.influx.get("timeout_seconds", 15)
|
||||
context = None
|
||||
if self.settings.influx.get("scheme") == "https" and not verify_ssl:
|
||||
context = ssl._create_unverified_context()
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(request, timeout=timeout, context=context) as response:
|
||||
payload = json.loads(response.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as exc:
|
||||
body = exc.read().decode("utf-8", errors="ignore")
|
||||
raise RuntimeError(f"Influx HTTP {exc.code}: {body}") from exc
|
||||
except urllib.error.URLError as exc:
|
||||
raise RuntimeError(f"Influx connection error: {exc}") from exc
|
||||
|
||||
results = payload.get("results") or []
|
||||
if not results:
|
||||
return []
|
||||
result = results[0]
|
||||
if "error" in result:
|
||||
raise RuntimeError(result["error"])
|
||||
return result.get("series") or []
|
||||
|
||||
@staticmethod
|
||||
def _rows_from_series(series: dict) -> list[dict]:
|
||||
columns = series.get("columns") or []
|
||||
rows = []
|
||||
for values in series.get("values") or []:
|
||||
rows.append(dict(zip(columns, values)))
|
||||
return rows
|
||||
|
||||
@classmethod
|
||||
def _row_from_series(cls, series: dict) -> dict:
|
||||
rows = cls._rows_from_series(series)
|
||||
return rows[0] if rows else {}
|
||||
|
||||
|
||||
|
||||
def _parse_time(value: str | None) -> datetime | None:
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
123
backend/app/services/kiosk_settings.py
Normal file
123
backend/app/services/kiosk_settings.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from flask import session
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.storage.kiosk_settings import SQLiteKioskSettingsRepository
|
||||
|
||||
|
||||
VALID_MODES = {"public", "private"}
|
||||
DEFAULT_WIDGETS = ["hero", "history", "strings", "status", "production", "comparison", "importStatus"]
|
||||
VALID_WIDGETS = {"hero", "quickMetrics", "history", "status", "strings", "production", "comparison", "distribution", "importStatus"}
|
||||
VALID_REALTIME_RANGES = {"today", "yesterday", "6h", "12h", "24h", "48h", "7d"}
|
||||
VALID_ANALYTICS_RANGES = {"today", "yesterday", "7d", "30d", "90d", "365d", "custom"}
|
||||
|
||||
|
||||
class KioskSettingsService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.repository = SQLiteKioskSettingsRepository(self.settings.storage["sqlite_path"])
|
||||
|
||||
def get(self, mode: str) -> dict[str, Any]:
|
||||
normalized_mode = self._normalize_mode(mode)
|
||||
stored = self.repository.get(normalized_mode)
|
||||
if stored is None:
|
||||
return self._default_payload(normalized_mode)
|
||||
return self._sanitize_payload(normalized_mode, stored, persist_if_changed=False)
|
||||
|
||||
def update(self, mode: str, payload: dict[str, Any], updated_by: str | None = None) -> dict[str, Any]:
|
||||
normalized_mode = self._normalize_mode(mode)
|
||||
merged = {**self.get(normalized_mode), **(payload or {})}
|
||||
cleaned = self._sanitize_payload(normalized_mode, merged, persist_if_changed=False)
|
||||
return self.repository.upsert(normalized_mode, cleaned, updated_by=updated_by)
|
||||
|
||||
def update_from_session(self, mode: str, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
updated_by = session.get("auth_user")
|
||||
return self.update(mode, payload, updated_by=updated_by)
|
||||
|
||||
def _default_payload(self, mode: str) -> dict[str, Any]:
|
||||
return {
|
||||
"mode": mode,
|
||||
"widgets": list(DEFAULT_WIDGETS),
|
||||
"realtime_range": self._default_realtime_range(),
|
||||
"analytics_range": self._default_analytics_range(),
|
||||
"analytics_bucket": self._default_analytics_bucket(),
|
||||
"compare_mode": self._default_compare_mode(),
|
||||
"updated_at": None,
|
||||
"updated_by": None,
|
||||
}
|
||||
|
||||
def _sanitize_payload(self, mode: str, payload: dict[str, Any], persist_if_changed: bool = False) -> dict[str, Any]:
|
||||
cleaned = {
|
||||
"mode": mode,
|
||||
"widgets": self._normalize_widgets(payload.get("widgets")),
|
||||
"realtime_range": self._normalize_realtime_range(payload.get("realtime_range")),
|
||||
"analytics_range": self._normalize_analytics_range(payload.get("analytics_range")),
|
||||
"analytics_bucket": self._normalize_bucket(payload.get("analytics_bucket")),
|
||||
"compare_mode": self._normalize_compare_mode(payload.get("compare_mode")),
|
||||
"updated_at": payload.get("updated_at"),
|
||||
"updated_by": payload.get("updated_by"),
|
||||
}
|
||||
if persist_if_changed:
|
||||
return self.repository.upsert(mode, cleaned, updated_by=cleaned.get("updated_by"))
|
||||
return cleaned
|
||||
|
||||
def _normalize_mode(self, mode: str) -> str:
|
||||
normalized = (mode or "").strip().lower()
|
||||
if normalized not in VALID_MODES:
|
||||
raise ValueError("Mode musi byc jednym z: public, private")
|
||||
return normalized
|
||||
|
||||
def _normalize_widgets(self, widgets: Any) -> list[str]:
|
||||
if not isinstance(widgets, list):
|
||||
return list(DEFAULT_WIDGETS)
|
||||
normalized: list[str] = []
|
||||
for item in widgets:
|
||||
widget = str(item or "").strip()
|
||||
if widget in VALID_WIDGETS and widget not in normalized:
|
||||
normalized.append(widget)
|
||||
return normalized or list(DEFAULT_WIDGETS)
|
||||
|
||||
def _normalize_realtime_range(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_realtime_range()).strip()
|
||||
return normalized if normalized in VALID_REALTIME_RANGES else self._default_realtime_range()
|
||||
|
||||
def _normalize_analytics_range(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_analytics_range()).strip()
|
||||
return normalized if normalized in VALID_ANALYTICS_RANGES else self._default_analytics_range()
|
||||
|
||||
def _normalize_bucket(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_analytics_bucket()).strip()
|
||||
return normalized if normalized in self.settings.analytics["bucket_labels"] else self._default_analytics_bucket()
|
||||
|
||||
def _normalize_compare_mode(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_compare_mode()).strip()
|
||||
return normalized if normalized in self.settings.analytics["compare_modes"] else self._default_compare_mode()
|
||||
|
||||
def _default_realtime_range(self) -> str:
|
||||
raw = str(self.settings.realtime.get("history_default_range", "12h"))
|
||||
return raw if raw in VALID_REALTIME_RANGES else "12h"
|
||||
|
||||
def _default_analytics_range(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_range", "30d"))
|
||||
return raw if raw in VALID_ANALYTICS_RANGES else "30d"
|
||||
|
||||
def _default_analytics_bucket(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_bucket", "day"))
|
||||
return raw if raw in self.settings.analytics["bucket_labels"] else "day"
|
||||
|
||||
def _default_compare_mode(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_compare", "none"))
|
||||
return raw if raw in self.settings.analytics["compare_modes"] else "none"
|
||||
|
||||
|
||||
_kiosk_settings_service: KioskSettingsService | None = None
|
||||
|
||||
|
||||
def get_kiosk_settings_service() -> KioskSettingsService:
|
||||
global _kiosk_settings_service
|
||||
if _kiosk_settings_service is None:
|
||||
_kiosk_settings_service = KioskSettingsService()
|
||||
return _kiosk_settings_service
|
||||
99
backend/app/services/metrics.py
Normal file
99
backend/app/services/metrics.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.definitions import MetricDefinition, MetricValue
|
||||
|
||||
|
||||
|
||||
def to_float(value: float | str | None) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (float, int)):
|
||||
return float(value)
|
||||
try:
|
||||
return float(str(value).replace(",", "."))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def round_value(value: float | None, precision: int) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
return round(value, precision)
|
||||
|
||||
|
||||
|
||||
def compare_delta_pct(current: float | None, previous: float | None) -> float | None:
|
||||
if current is None or previous in (None, 0):
|
||||
return None
|
||||
return round(((current - previous) / previous) * 100.0, 2)
|
||||
|
||||
|
||||
|
||||
def build_status(metric_id: str, numeric: float | None) -> str:
|
||||
if numeric is None:
|
||||
return "neutral"
|
||||
|
||||
if metric_id == "inverter_temp":
|
||||
if numeric < 55:
|
||||
return "ok"
|
||||
if numeric < 70:
|
||||
return "warn"
|
||||
return "critical"
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
|
||||
def metric_value(
|
||||
metric: MetricDefinition,
|
||||
value: float | str | None,
|
||||
*,
|
||||
timestamp: datetime | None = None,
|
||||
) -> MetricValue:
|
||||
rendered = value
|
||||
numeric = None
|
||||
if metric.kind != "text":
|
||||
numeric = to_float(value)
|
||||
rendered = round_value(numeric, metric.precision)
|
||||
|
||||
return MetricValue(
|
||||
metric_id=metric.id,
|
||||
label=metric.label,
|
||||
unit=metric.unit,
|
||||
value=rendered,
|
||||
timestamp=timestamp,
|
||||
precision=metric.precision,
|
||||
kind=metric.kind,
|
||||
status=build_status(metric.id, numeric),
|
||||
)
|
||||
|
||||
|
||||
|
||||
def custom_metric_value(
|
||||
metric_id: str,
|
||||
label: str,
|
||||
value: float | str | None,
|
||||
*,
|
||||
unit: str = "",
|
||||
precision: int = 2,
|
||||
timestamp: datetime | None = None,
|
||||
status: str = "neutral",
|
||||
kind: str = "gauge",
|
||||
) -> MetricValue:
|
||||
rendered = value
|
||||
if kind != "text":
|
||||
numeric = to_float(value)
|
||||
rendered = round_value(numeric, precision)
|
||||
return MetricValue(
|
||||
metric_id=metric_id,
|
||||
label=label,
|
||||
unit=unit,
|
||||
value=rendered,
|
||||
timestamp=timestamp,
|
||||
precision=precision,
|
||||
kind=kind,
|
||||
status=status,
|
||||
)
|
||||
231
backend/app/services/realtime.py
Normal file
231
backend/app/services/realtime.py
Normal file
@@ -0,0 +1,231 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import HeroCard, SnapshotGroupRow, SnapshotPayload
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import compare_delta_pct, custom_metric_value, metric_value, to_float
|
||||
from app.utils.time import choose_power_interval, now_local, resolve_window, start_of_local_day
|
||||
|
||||
|
||||
class RealtimeService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
|
||||
def snapshot(self) -> SnapshotPayload:
|
||||
now = now_local()
|
||||
today_start = start_of_local_day(now)
|
||||
yesterday_start = today_start - timedelta(days=1)
|
||||
|
||||
metric_ids = {"ac_power", "energy_total", "inverter_temp"}
|
||||
for group in self.settings.strings:
|
||||
metric_ids.update(group.get("metrics", {}).values())
|
||||
|
||||
metrics = [self.catalog.get(metric_id) for metric_id in metric_ids if self.catalog.safe_get(metric_id)]
|
||||
latest = self.influx.latest_values(metrics)
|
||||
|
||||
ac_power = to_float(_value(latest, "ac_power"))
|
||||
total_dc_power = round(
|
||||
sum(
|
||||
to_float(_value(latest, group.get("metrics", {}).get("power", ""))) or 0.0
|
||||
for group in self.settings.strings
|
||||
),
|
||||
0,
|
||||
)
|
||||
energy_today = self.energy.total_for_window(today_start, now)
|
||||
energy_yesterday = self.energy.total_for_window(yesterday_start, today_start)
|
||||
total_energy = to_float(_value(latest, "energy_total"))
|
||||
inverter_temp = to_float(_value(latest, "inverter_temp"))
|
||||
|
||||
hero_cards = [
|
||||
self._hero_card("ac_power", ac_power, subtitle="Aktualna moc AC"),
|
||||
self._hero_card("dc_power_total", total_dc_power, label="Moc DC laczna", unit="W", subtitle="Suma stringow DC"),
|
||||
self._hero_card("energy_today", energy_today, label="Energia dzis", unit="kWh", subtitle="Liczona z danych Influx"),
|
||||
self._hero_card("energy_total", total_energy, label="Energia laczna", unit="kWh", subtitle="Licznik calkowity"),
|
||||
]
|
||||
if inverter_temp is not None:
|
||||
hero_cards.append(self._hero_card("inverter_temp", inverter_temp, label="Temp. falownika", unit="°C", subtitle="Sensor opcjonalny"))
|
||||
|
||||
kpis = {
|
||||
"energy_today": custom_metric_value("energy_today", "Energia dzis", energy_today, unit="kWh", precision=2, status="ok"),
|
||||
"energy_yesterday": custom_metric_value("energy_yesterday", "Energia wczoraj", energy_yesterday, unit="kWh", precision=2, status="ok"),
|
||||
"energy_total": custom_metric_value(
|
||||
"energy_total",
|
||||
"Energia laczna",
|
||||
total_energy,
|
||||
unit="kWh",
|
||||
precision=2,
|
||||
timestamp=_timestamp(latest, "energy_total"),
|
||||
status="ok",
|
||||
),
|
||||
"dc_power_total": custom_metric_value("dc_power_total", "Moc DC laczna", total_dc_power, unit="W", precision=0, status="ok"),
|
||||
}
|
||||
|
||||
comparison = compare_delta_pct(energy_today, energy_yesterday)
|
||||
if comparison is not None:
|
||||
kpis["today_vs_yesterday"] = custom_metric_value(
|
||||
"today_vs_yesterday",
|
||||
"Dzis vs wczoraj",
|
||||
comparison,
|
||||
unit="%",
|
||||
precision=2,
|
||||
status="ok" if comparison >= 0 else "warn",
|
||||
)
|
||||
|
||||
strings = self._build_string_rows(latest)
|
||||
status = []
|
||||
if self.catalog.safe_get("inverter_temp"):
|
||||
status.append(
|
||||
metric_value(
|
||||
self.catalog.get("inverter_temp"),
|
||||
inverter_temp,
|
||||
timestamp=_timestamp(latest, "inverter_temp"),
|
||||
)
|
||||
)
|
||||
status.append(
|
||||
custom_metric_value(
|
||||
"data_refresh",
|
||||
"Ostatni odczyt energii",
|
||||
_timestamp(latest, "energy_total").isoformat() if _timestamp(latest, "energy_total") else None,
|
||||
status="ok" if _timestamp(latest, "energy_total") else "neutral",
|
||||
kind="text",
|
||||
)
|
||||
)
|
||||
|
||||
updated_at = _max_timestamp(latest.values())
|
||||
return SnapshotPayload(
|
||||
updated_at=updated_at,
|
||||
hero_cards=hero_cards,
|
||||
kpis=kpis,
|
||||
strings=strings,
|
||||
phases=[],
|
||||
status=status,
|
||||
faults=[],
|
||||
)
|
||||
|
||||
def history(self, range_key: str | None = None, start: str | None = None, end: str | None = None, metric_ids: list[str] | None = None) -> dict:
|
||||
window = resolve_window(range_key=range_key or self.settings.realtime["history_default_range"], start=start, end=end)
|
||||
interval = choose_power_interval(window.start, window.end)
|
||||
series = []
|
||||
|
||||
selected = set(metric_ids or [])
|
||||
|
||||
def include(metric_id: str) -> bool:
|
||||
return not selected or metric_id in selected
|
||||
|
||||
ac_metric = self.catalog.safe_get("ac_power")
|
||||
if ac_metric is not None and include("ac_power"):
|
||||
series.append(
|
||||
{
|
||||
"metric_id": ac_metric.id,
|
||||
"label": ac_metric.label,
|
||||
"unit": ac_metric.unit,
|
||||
"points": self.influx.gauge_history(ac_metric, window.start, window.end, interval=interval, aggregate="mean"),
|
||||
}
|
||||
)
|
||||
|
||||
for group in self.settings.strings:
|
||||
for slot, metric_id in group.get("metrics", {}).items():
|
||||
if not metric_id or not self.catalog.safe_get(metric_id) or not include(metric_id):
|
||||
continue
|
||||
metric = self.catalog.get(metric_id)
|
||||
series.append(
|
||||
{
|
||||
"metric_id": metric.id,
|
||||
"label": metric.label if slot != "power" else group["label"],
|
||||
"unit": metric.unit,
|
||||
"points": self.influx.gauge_history(metric, window.start, window.end, interval=interval, aggregate="mean"),
|
||||
}
|
||||
)
|
||||
|
||||
temp_metric = self.catalog.safe_get("inverter_temp")
|
||||
if temp_metric is not None and include("inverter_temp"):
|
||||
temp_points = self.influx.gauge_history(temp_metric, window.start, window.end, interval=interval, aggregate="mean")
|
||||
last_value = None
|
||||
filled = []
|
||||
for point in temp_points:
|
||||
value = point.value if point.value is not None else last_value
|
||||
if point.value is not None:
|
||||
last_value = point.value
|
||||
filled.append({"timestamp": point.timestamp, "value": value})
|
||||
series.append(
|
||||
{
|
||||
"metric_id": temp_metric.id,
|
||||
"label": temp_metric.label,
|
||||
"unit": temp_metric.unit,
|
||||
"points": filled,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"range_key": window.key,
|
||||
"start": window.start,
|
||||
"end": window.end,
|
||||
"series": series,
|
||||
}
|
||||
|
||||
def _hero_card(self, metric_id: str, value, *, label: str | None = None, unit: str | None = None, subtitle: str = "") -> HeroCard:
|
||||
accent = "slate"
|
||||
numeric = to_float(value)
|
||||
if metric_id == "inverter_temp":
|
||||
if numeric is not None and numeric < 55:
|
||||
accent = "emerald"
|
||||
elif numeric is not None and numeric < 70:
|
||||
accent = "amber"
|
||||
elif numeric is not None:
|
||||
accent = "rose"
|
||||
else:
|
||||
accent = "emerald" if numeric not in (None, 0) else "slate"
|
||||
|
||||
resolved_label = label or (self.catalog.get(metric_id).label if self.catalog.safe_get(metric_id) else metric_id)
|
||||
resolved_unit = unit or (self.catalog.get(metric_id).unit if self.catalog.safe_get(metric_id) else "")
|
||||
return HeroCard(
|
||||
metric_id=metric_id,
|
||||
label=resolved_label,
|
||||
value=value,
|
||||
unit=resolved_unit,
|
||||
accent=accent,
|
||||
subtitle=subtitle,
|
||||
)
|
||||
|
||||
def _build_string_rows(self, latest: dict) -> list[SnapshotGroupRow]:
|
||||
rows = []
|
||||
for group in self.settings.strings:
|
||||
values = {}
|
||||
for slot, metric_id in group.get("metrics", {}).items():
|
||||
metric = self.catalog.safe_get(metric_id)
|
||||
if metric is None:
|
||||
continue
|
||||
values[slot] = metric_value(metric, _value(latest, metric_id), timestamp=_timestamp(latest, metric_id))
|
||||
rows.append(SnapshotGroupRow(id=group["id"], label=group["label"], values=values, meta={}))
|
||||
return rows
|
||||
|
||||
|
||||
|
||||
def _value(latest: dict, metric_id: str):
|
||||
payload = latest.get(metric_id) or {}
|
||||
return payload.get("value")
|
||||
|
||||
|
||||
|
||||
def _timestamp(latest: dict, metric_id: str):
|
||||
payload = latest.get(metric_id) or {}
|
||||
return payload.get("timestamp")
|
||||
|
||||
|
||||
|
||||
def _max_timestamp(items) -> datetime | None:
|
||||
timestamps = [item.get("timestamp") for item in items if item.get("timestamp") is not None]
|
||||
return max(timestamps) if timestamps else None
|
||||
4
backend/app/storage/__init__.py
Normal file
4
backend/app/storage/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .sqlite_repository import SQLiteEnergyRepository
|
||||
from .auth_users import AuthUser, SQLiteAuthUserRepository
|
||||
|
||||
__all__ = ["SQLiteEnergyRepository", "AuthUser", "SQLiteAuthUserRepository"]
|
||||
132
backend/app/storage/auth_users.py
Normal file
132
backend/app/storage/auth_users.py
Normal file
@@ -0,0 +1,132 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AuthUser:
|
||||
username: str
|
||||
password_hash: str
|
||||
role: str
|
||||
display_name: str
|
||||
is_active: bool = True
|
||||
created_at: datetime | None = None
|
||||
updated_at: datetime | None = None
|
||||
|
||||
|
||||
class SQLiteAuthUserRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS auth_users (
|
||||
username TEXT PRIMARY KEY,
|
||||
password_hash TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
display_name TEXT NOT NULL,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_auth_users_role ON auth_users(role)"
|
||||
)
|
||||
|
||||
def get_by_username(self, username: str) -> AuthUser | None:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT username, password_hash, role, display_name, is_active, created_at, updated_at
|
||||
FROM auth_users
|
||||
WHERE username = ?
|
||||
LIMIT 1
|
||||
""",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return AuthUser(
|
||||
username=row["username"],
|
||||
password_hash=row["password_hash"],
|
||||
role=row["role"],
|
||||
display_name=row["display_name"],
|
||||
is_active=bool(row["is_active"]),
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
|
||||
def upsert_user(self, *, username: str, password_hash: str, role: str, display_name: str, is_active: bool = True) -> AuthUser:
|
||||
now = datetime.utcnow().isoformat()
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO auth_users (username, password_hash, role, display_name, is_active, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(username) DO UPDATE SET
|
||||
password_hash = excluded.password_hash,
|
||||
role = excluded.role,
|
||||
display_name = excluded.display_name,
|
||||
is_active = excluded.is_active,
|
||||
updated_at = excluded.updated_at
|
||||
""",
|
||||
(username, password_hash, role, display_name, 1 if is_active else 0, now, now),
|
||||
)
|
||||
return self.get_by_username(username) # type: ignore[return-value]
|
||||
|
||||
def update_password(self, username: str, password_hash: str) -> AuthUser | None:
|
||||
now = datetime.utcnow().isoformat()
|
||||
with self.connect() as conn:
|
||||
cursor = conn.execute(
|
||||
"UPDATE auth_users SET password_hash = ?, updated_at = ? WHERE username = ?",
|
||||
(password_hash, now, username),
|
||||
)
|
||||
if cursor.rowcount == 0:
|
||||
return None
|
||||
return self.get_by_username(username)
|
||||
|
||||
|
||||
def list_users(self) -> list[AuthUser]:
|
||||
with self.connect() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT username, password_hash, role, display_name, is_active, created_at, updated_at
|
||||
FROM auth_users
|
||||
ORDER BY role DESC, username ASC
|
||||
"""
|
||||
).fetchall()
|
||||
return [
|
||||
AuthUser(
|
||||
username=row["username"],
|
||||
password_hash=row["password_hash"],
|
||||
role=row["role"],
|
||||
display_name=row["display_name"],
|
||||
is_active=bool(row["is_active"]),
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
87
backend/app/storage/kiosk_settings.py
Normal file
87
backend/app/storage/kiosk_settings.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator
|
||||
|
||||
|
||||
@dataclass
|
||||
class KioskSettingsRecord:
|
||||
mode: str
|
||||
widgets: list[str]
|
||||
realtime_range: str
|
||||
analytics_range: str
|
||||
analytics_bucket: str
|
||||
compare_mode: str
|
||||
updated_at: datetime | None = None
|
||||
updated_by: str | None = None
|
||||
|
||||
|
||||
class SQLiteKioskSettingsRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS kiosk_settings (
|
||||
mode TEXT PRIMARY KEY,
|
||||
payload_json TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
updated_by TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
def get(self, mode: str) -> dict[str, Any] | None:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT mode, payload_json, updated_at, updated_by FROM kiosk_settings WHERE mode = ? LIMIT 1",
|
||||
(mode,),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
payload = json.loads(row["payload_json"])
|
||||
payload["mode"] = row["mode"]
|
||||
payload["updated_at"] = row["updated_at"]
|
||||
payload["updated_by"] = row["updated_by"]
|
||||
return payload
|
||||
|
||||
def upsert(self, mode: str, payload: dict[str, Any], updated_by: str | None = None) -> dict[str, Any]:
|
||||
now = datetime.utcnow().isoformat()
|
||||
stored_payload = dict(payload)
|
||||
stored_payload.pop("mode", None)
|
||||
stored_payload.pop("updated_at", None)
|
||||
stored_payload.pop("updated_by", None)
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO kiosk_settings (mode, payload_json, updated_at, updated_by)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(mode) DO UPDATE SET
|
||||
payload_json = excluded.payload_json,
|
||||
updated_at = excluded.updated_at,
|
||||
updated_by = excluded.updated_by
|
||||
""",
|
||||
(mode, json.dumps(stored_payload, ensure_ascii=False), now, updated_by),
|
||||
)
|
||||
return self.get(mode) or {"mode": mode, **stored_payload, "updated_at": now, "updated_by": updated_by}
|
||||
131
backend/app/storage/sqlite_repository.py
Normal file
131
backend/app/storage/sqlite_repository.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, date
|
||||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
from app.models import DailyEnergyRecord, HistoricalCoverage
|
||||
|
||||
|
||||
class SQLiteEnergyRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS daily_energy (
|
||||
day TEXT PRIMARY KEY,
|
||||
energy_kwh REAL NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
samples_count INTEGER NOT NULL DEFAULT 0,
|
||||
imported_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_daily_energy_imported_at ON daily_energy(imported_at)"
|
||||
)
|
||||
|
||||
def has_day(self, day: date) -> bool:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute("SELECT 1 FROM daily_energy WHERE day = ? LIMIT 1", (day.isoformat(),)).fetchone()
|
||||
return row is not None
|
||||
|
||||
def upsert_daily_energy(self, record: DailyEnergyRecord) -> None:
|
||||
imported_at = record.imported_at or datetime.utcnow()
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO daily_energy (day, energy_kwh, source, samples_count, imported_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(day) DO UPDATE SET
|
||||
energy_kwh = excluded.energy_kwh,
|
||||
source = excluded.source,
|
||||
samples_count = excluded.samples_count,
|
||||
imported_at = excluded.imported_at
|
||||
""",
|
||||
(
|
||||
record.day.isoformat(),
|
||||
float(record.energy_kwh),
|
||||
record.source,
|
||||
int(record.samples_count),
|
||||
imported_at.isoformat(),
|
||||
),
|
||||
)
|
||||
|
||||
def fetch_daily_energy(self, start_day: date, end_day: date) -> dict[date, DailyEnergyRecord]:
|
||||
with self.connect() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT day, energy_kwh, source, samples_count, imported_at
|
||||
FROM daily_energy
|
||||
WHERE day >= ? AND day <= ?
|
||||
ORDER BY day ASC
|
||||
""",
|
||||
(start_day.isoformat(), end_day.isoformat()),
|
||||
).fetchall()
|
||||
|
||||
payload: dict[date, DailyEnergyRecord] = {}
|
||||
for row in rows:
|
||||
payload[date.fromisoformat(row["day"])] = DailyEnergyRecord(
|
||||
day=date.fromisoformat(row["day"]),
|
||||
energy_kwh=float(row["energy_kwh"]),
|
||||
source=row["source"],
|
||||
samples_count=int(row["samples_count"]),
|
||||
imported_at=datetime.fromisoformat(row["imported_at"]),
|
||||
)
|
||||
return payload
|
||||
|
||||
def coverage(self) -> HistoricalCoverage:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS imported_days,
|
||||
MIN(day) AS first_day,
|
||||
MAX(day) AS last_day,
|
||||
COALESCE(SUM(energy_kwh), 0) AS total_energy_kwh
|
||||
FROM daily_energy
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
if row is None:
|
||||
return HistoricalCoverage()
|
||||
|
||||
return HistoricalCoverage(
|
||||
imported_days=int(row["imported_days"] or 0),
|
||||
first_day=date.fromisoformat(row["first_day"]) if row["first_day"] else None,
|
||||
last_day=date.fromisoformat(row["last_day"]) if row["last_day"] else None,
|
||||
total_energy_kwh=round(float(row["total_energy_kwh"] or 0.0), 2),
|
||||
)
|
||||
|
||||
def latest_day(self) -> date | None:
|
||||
return self.coverage().last_day
|
||||
|
||||
def count_missing_days(self, start_day: date, end_day: date) -> int:
|
||||
existing = self.fetch_daily_energy(start_day, end_day)
|
||||
current = start_day
|
||||
missing = 0
|
||||
while current <= end_day:
|
||||
if current not in existing:
|
||||
missing += 1
|
||||
current = current.fromordinal(current.toordinal() + 1)
|
||||
return missing
|
||||
25
backend/app/utils/__init__.py
Normal file
25
backend/app/utils/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from .serialization import to_plain
|
||||
from .time import (
|
||||
TimeWindow,
|
||||
choose_counter_interval,
|
||||
choose_power_interval,
|
||||
duration_to_seconds,
|
||||
now_local,
|
||||
resolve_window,
|
||||
shift_window,
|
||||
start_of_local_day,
|
||||
to_utc_iso,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"TimeWindow",
|
||||
"choose_counter_interval",
|
||||
"choose_power_interval",
|
||||
"duration_to_seconds",
|
||||
"now_local",
|
||||
"resolve_window",
|
||||
"shift_window",
|
||||
"start_of_local_day",
|
||||
"to_plain",
|
||||
"to_utc_iso",
|
||||
]
|
||||
19
backend/app/utils/serialization.py
Normal file
19
backend/app/utils/serialization.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, is_dataclass
|
||||
from datetime import date, datetime
|
||||
from typing import Any
|
||||
|
||||
|
||||
def to_plain(value: Any) -> Any:
|
||||
if is_dataclass(value):
|
||||
return to_plain(asdict(value))
|
||||
if isinstance(value, datetime):
|
||||
return value.isoformat()
|
||||
if isinstance(value, date):
|
||||
return value.isoformat()
|
||||
if isinstance(value, dict):
|
||||
return {key: to_plain(item) for key, item in value.items()}
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return [to_plain(item) for item in value]
|
||||
return value
|
||||
156
backend/app/utils/time.py
Normal file
156
backend/app/utils/time.py
Normal file
@@ -0,0 +1,156 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from config import TIME_RANGES
|
||||
from app.core_settings import get_settings
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimeWindow:
|
||||
start: datetime
|
||||
end: datetime
|
||||
label: str
|
||||
key: str
|
||||
|
||||
|
||||
def now_local() -> datetime:
|
||||
settings = get_settings()
|
||||
return datetime.now(ZoneInfo(settings.timezone))
|
||||
|
||||
|
||||
def start_of_local_day(moment: datetime | None = None) -> datetime:
|
||||
current = moment or now_local()
|
||||
return current.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
def resolve_window(range_key: str | None = None, start: str | None = None, end: str | None = None) -> TimeWindow:
|
||||
settings = get_settings()
|
||||
tz = ZoneInfo(settings.timezone)
|
||||
|
||||
if (start and not end) or (end and not start):
|
||||
raise ValueError("Provide both start and end for custom range")
|
||||
|
||||
if start and end:
|
||||
start_dt = _parse_iso(start, tz)
|
||||
end_dt = _parse_iso(end, tz)
|
||||
return TimeWindow(start=start_dt, end=end_dt, label="Custom", key="custom")
|
||||
|
||||
key = range_key or settings.analytics["default_range"]
|
||||
definition = settings.time_ranges.get(key)
|
||||
if not definition:
|
||||
raise ValueError(f"Unsupported range: {key}")
|
||||
|
||||
now_dt = datetime.now(tz)
|
||||
end_dt = now_dt
|
||||
special = definition.get("special")
|
||||
if special == "ytd":
|
||||
start_dt = now_dt.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
elif special == "today":
|
||||
start_dt = now_dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
end_dt = now_dt
|
||||
elif special == "yesterday":
|
||||
end_dt = now_dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
start_dt = end_dt - timedelta(days=1)
|
||||
else:
|
||||
start_dt = now_dt - timedelta(seconds=int(definition["seconds"]))
|
||||
|
||||
return TimeWindow(start=start_dt, end=end_dt, label=definition["label"], key=key)
|
||||
|
||||
|
||||
def shift_window(window: TimeWindow, mode: str) -> TimeWindow:
|
||||
if mode == "previous_period":
|
||||
span = window.end - window.start
|
||||
return TimeWindow(
|
||||
start=window.start - span,
|
||||
end=window.start,
|
||||
label="Previous period",
|
||||
key=f"{window.key}:previous_period",
|
||||
)
|
||||
|
||||
if mode in {"previous_year", "previous_year_2", "previous_year_3"}:
|
||||
years = {"previous_year": 1, "previous_year_2": 2, "previous_year_3": 3}[mode]
|
||||
return TimeWindow(
|
||||
start=_safe_replace_year(window.start, window.start.year - years),
|
||||
end=_safe_replace_year(window.end, window.end.year - years),
|
||||
label=f"Previous {years} year",
|
||||
key=f"{window.key}:{mode}",
|
||||
)
|
||||
|
||||
if mode in {"previous_month_12", "previous_month_24"}:
|
||||
months = {"previous_month_12": 12, "previous_month_24": 24}[mode]
|
||||
return TimeWindow(
|
||||
start=_shift_months(window.start, -months),
|
||||
end=_shift_months(window.end, -months),
|
||||
label=f"Previous {months} months",
|
||||
key=f"{window.key}:{mode}",
|
||||
)
|
||||
|
||||
raise ValueError(f"Unsupported compare mode: {mode}")
|
||||
|
||||
|
||||
def choose_counter_interval(start: datetime, end: datetime) -> str:
|
||||
span_seconds = max((end - start).total_seconds(), 0)
|
||||
if span_seconds <= 3 * 86400:
|
||||
return "5m"
|
||||
if span_seconds <= 14 * 86400:
|
||||
return "15m"
|
||||
if span_seconds <= 93 * 86400:
|
||||
return "30m"
|
||||
if span_seconds <= 366 * 86400:
|
||||
return "1h"
|
||||
return "3h"
|
||||
|
||||
|
||||
def choose_power_interval(start: datetime, end: datetime) -> str:
|
||||
span_seconds = max((end - start).total_seconds(), 0)
|
||||
if span_seconds <= 24 * 3600:
|
||||
return "5m"
|
||||
if span_seconds <= 7 * 86400:
|
||||
return "15m"
|
||||
if span_seconds <= 31 * 86400:
|
||||
return "30m"
|
||||
if span_seconds <= 366 * 86400:
|
||||
return "1h"
|
||||
return "3h"
|
||||
|
||||
|
||||
def duration_to_seconds(interval: str) -> int:
|
||||
suffix = interval[-1]
|
||||
amount = int(interval[:-1])
|
||||
if suffix == "s":
|
||||
return amount
|
||||
if suffix == "m":
|
||||
return amount * 60
|
||||
if suffix == "h":
|
||||
return amount * 3600
|
||||
if suffix == "d":
|
||||
return amount * 86400
|
||||
raise ValueError(f"Unsupported duration format: {interval}")
|
||||
|
||||
|
||||
def to_utc_iso(dt: datetime) -> str:
|
||||
return dt.astimezone(ZoneInfo("UTC")).isoformat()
|
||||
|
||||
|
||||
def _parse_iso(value: str, tz: ZoneInfo) -> datetime:
|
||||
parsed = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
if parsed.tzinfo is None:
|
||||
return parsed.replace(tzinfo=tz)
|
||||
return parsed.astimezone(tz)
|
||||
|
||||
|
||||
def _safe_replace_year(value: datetime, year: int) -> datetime:
|
||||
try:
|
||||
return value.replace(year=year)
|
||||
except ValueError:
|
||||
return value.replace(year=year, day=28)
|
||||
|
||||
|
||||
def _shift_months(value: datetime, months: int) -> datetime:
|
||||
year = value.year + ((value.month - 1 + months) // 12)
|
||||
month = ((value.month - 1 + months) % 12) + 1
|
||||
day = min(value.day, [31, 29 if year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) else 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month - 1])
|
||||
return value.replace(year=year, month=month, day=day)
|
||||
24
backend/backfill.py
Normal file
24
backend/backfill.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from datetime import date
|
||||
|
||||
from app.services.historical_sync import get_historical_sync_service
|
||||
from app.utils.serialization import to_plain
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description="Import dziennych agregatow PV z InfluxDB do lokalnego cache SQLite")
|
||||
parser.add_argument("--start-date", dest="start_date", help="Data startowa YYYY-MM-DD")
|
||||
parser.add_argument("--end-date", dest="end_date", help="Data koncowa YYYY-MM-DD")
|
||||
parser.add_argument("--chunk-days", dest="chunk_days", type=int, default=7, help="Liczba dni na chunk")
|
||||
parser.add_argument("--force", action="store_true", help="Nadpisz dni juz zapisane w cache")
|
||||
args = parser.parse_args()
|
||||
|
||||
service = get_historical_sync_service()
|
||||
status = service.run_blocking(
|
||||
start_date=date.fromisoformat(args.start_date) if args.start_date else None,
|
||||
end_date=date.fromisoformat(args.end_date) if args.end_date else None,
|
||||
chunk_days=args.chunk_days,
|
||||
force=args.force,
|
||||
)
|
||||
print(to_plain(status))
|
||||
289
backend/config.py
Normal file
289
backend/config.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
PROJECT_DIR = BASE_DIR.parent
|
||||
DATA_DIR = PROJECT_DIR / "data"
|
||||
DATA_DIR.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def _load_dotenv(path: Path) -> None:
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
for raw_line in path.read_text(encoding="utf-8").splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
if not key or key in os.environ:
|
||||
continue
|
||||
|
||||
cleaned = value.strip().strip('"').strip("'")
|
||||
os.environ[key] = cleaned
|
||||
|
||||
|
||||
_load_dotenv(PROJECT_DIR / ".env")
|
||||
|
||||
|
||||
def env_bool(name: str, default: bool = False) -> bool:
|
||||
return os.getenv(name, str(default)).strip().lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def env_int(name: str, default: int) -> int:
|
||||
try:
|
||||
return int(os.getenv(name, str(default)))
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
|
||||
|
||||
def env_float(name: str, default: float) -> float:
|
||||
try:
|
||||
return float(os.getenv(name, str(default)).replace(",", "."))
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
|
||||
|
||||
APP_CONFIG = {
|
||||
"name": os.getenv("APP_NAME", "PV Insight"),
|
||||
"version": os.getenv("APP_VERSION", "1.3.0"),
|
||||
"debug": env_bool("APP_DEBUG", False),
|
||||
"api_prefix": "/api/v1",
|
||||
"timezone": os.getenv("APP_TIMEZONE", "Europe/Warsaw"),
|
||||
"host": os.getenv("APP_HOST", "0.0.0.0"),
|
||||
"port": env_int("APP_PORT", 8105),
|
||||
}
|
||||
|
||||
SITE_CONFIG = {
|
||||
"site_name": os.getenv("SITE_NAME", "Domowa instalacja PV"),
|
||||
"timezone": APP_CONFIG["timezone"],
|
||||
"installed_power_kwp": env_float("PV_INSTALLED_POWER_KWP", 9.99),
|
||||
"currency": os.getenv("SITE_CURRENCY", "PLN"),
|
||||
"co2_factor_kg_per_kwh": env_float("CO2_FACTOR_KG_PER_KWH", 0.72),
|
||||
}
|
||||
|
||||
INFLUXDB_CONFIG = {
|
||||
"scheme": os.getenv("INFLUXDB_SCHEME", "http"),
|
||||
"host": os.getenv("INFLUXDB_HOST", "127.0.0.1"),
|
||||
"port": env_int("INFLUXDB_PORT", 8086),
|
||||
"database": os.getenv("INFLUXDB_DATABASE", "ha"),
|
||||
"username": os.getenv("INFLUXDB_USER", ""),
|
||||
"password": os.getenv("INFLUXDB_PASSWORD", ""),
|
||||
"verify_ssl": env_bool("INFLUXDB_VERIFY_SSL", False),
|
||||
"timeout_seconds": env_int("INFLUXDB_TIMEOUT_SECONDS", 15),
|
||||
}
|
||||
|
||||
STORAGE_CONFIG = {
|
||||
"sqlite_path": os.getenv("APP_SQLITE_PATH", str(DATA_DIR / "pv_insight.sqlite3")),
|
||||
}
|
||||
|
||||
CORS_ORIGINS = [
|
||||
value.strip()
|
||||
for value in os.getenv(
|
||||
"CORS_ORIGINS",
|
||||
"http://localhost:5173,http://127.0.0.1:5173,http://localhost:4173,http://127.0.0.1:4173",
|
||||
).split(",")
|
||||
if value.strip()
|
||||
]
|
||||
|
||||
TIME_RANGES = {
|
||||
"today": {"label": "Dzis", "special": "today"},
|
||||
"yesterday": {"label": "Wczoraj", "special": "yesterday"},
|
||||
"6h": {"label": "6h", "seconds": 6 * 3600},
|
||||
"12h": {"label": "12h", "seconds": 12 * 3600},
|
||||
"24h": {"label": "24h", "seconds": 24 * 3600},
|
||||
"48h": {"label": "48h", "seconds": 48 * 3600},
|
||||
"1d": {"label": "1 dzien", "seconds": 1 * 24 * 3600},
|
||||
"3d": {"label": "3 dni", "seconds": 3 * 24 * 3600},
|
||||
"7d": {"label": "7 dni", "seconds": 7 * 24 * 3600},
|
||||
"14d": {"label": "14 dni", "seconds": 14 * 24 * 3600},
|
||||
"30d": {"label": "30 dni", "seconds": 30 * 24 * 3600},
|
||||
"60d": {"label": "60 dni", "seconds": 60 * 24 * 3600},
|
||||
"90d": {"label": "90 dni", "seconds": 90 * 24 * 3600},
|
||||
"365d": {"label": "365 dni", "seconds": 365 * 24 * 3600},
|
||||
"ytd": {"label": "YTD", "special": "ytd"},
|
||||
}
|
||||
|
||||
REALTIME = {
|
||||
"refresh_seconds": env_int("REALTIME_REFRESH_SECONDS", 8),
|
||||
"history_default_range": os.getenv("REALTIME_HISTORY_DEFAULT_RANGE", "6h"),
|
||||
}
|
||||
|
||||
ANALYTICS = {
|
||||
"production_metric_id": "energy_total",
|
||||
"fallback_power_metric_id": "ac_power",
|
||||
"default_range": os.getenv("ANALYTICS_DEFAULT_RANGE", "30d"),
|
||||
"default_bucket": os.getenv("ANALYTICS_DEFAULT_BUCKET", "day"),
|
||||
"bucket_labels": {
|
||||
"day": "Dzien",
|
||||
"week": "Tydzien",
|
||||
"month": "Miesiac",
|
||||
"year": "Rok",
|
||||
},
|
||||
"compare_modes": {
|
||||
"none": "Porownanie",
|
||||
"previous_period": "Poprzedni okres",
|
||||
"previous_year": "Poprzedni rok",
|
||||
"previous_year_2": "2 lata wstecz",
|
||||
"previous_year_3": "3 lata wstecz",
|
||||
"previous_month_12": "12 miesiecy wstecz",
|
||||
"previous_month_24": "24 miesiace wstecz",
|
||||
"custom_multi": "Wlasne zakresy",
|
||||
},
|
||||
}
|
||||
|
||||
HISTORY = {
|
||||
"enabled": env_bool("HISTORY_ENABLED", True),
|
||||
"chunk_days": env_int("HISTORY_CHUNK_DAYS", 7),
|
||||
"default_chunk_days": env_int("HISTORY_DEFAULT_CHUNK_DAYS", 7),
|
||||
"auto_sync_enabled": env_bool("HISTORY_AUTO_SYNC_ENABLED", True),
|
||||
"auto_sync_on_start": env_bool("HISTORY_AUTO_SYNC_ON_START", False),
|
||||
"auto_sync_interval_minutes": env_int("HISTORY_AUTO_SYNC_INTERVAL_MINUTES", 30),
|
||||
"include_today_in_sync": env_bool("HISTORY_INCLUDE_TODAY_IN_SYNC", False),
|
||||
"bootstrap_start_date": os.getenv("HISTORY_BOOTSTRAP_START_DATE", "").strip(),
|
||||
}
|
||||
|
||||
|
||||
AUTH_CONFIG = {
|
||||
"enabled": env_bool("AUTH_ENABLED", True),
|
||||
"username": os.getenv("AUTH_USERNAME", "admin"),
|
||||
"password": os.getenv("AUTH_PASSWORD", "change-me"),
|
||||
"password_hash": os.getenv("AUTH_PASSWORD_HASH", "").strip(),
|
||||
"display_name": os.getenv("AUTH_DISPLAY_NAME", "Operator"),
|
||||
"secret_key": os.getenv("APP_SECRET_KEY", "pv-insight-dev-secret-change-me"),
|
||||
"session_cookie_name": os.getenv("APP_SESSION_COOKIE_NAME", "pv_insight_session"),
|
||||
"session_max_age_seconds": env_int("AUTH_SESSION_MAX_AGE_SECONDS", 60 * 60 * 12),
|
||||
"cookie_secure": env_bool("AUTH_COOKIE_SECURE", False),
|
||||
"cookie_samesite": os.getenv("AUTH_COOKIE_SAMESITE", "Lax"),
|
||||
}
|
||||
|
||||
I18N = {
|
||||
"default_language": os.getenv("APP_DEFAULT_LANGUAGE", "pl"),
|
||||
"supported_languages": ["pl", "en"],
|
||||
}
|
||||
|
||||
FRONTEND_DEFAULTS = {
|
||||
"tab": os.getenv("FRONTEND_DEFAULT_TAB", "realtime"),
|
||||
"theme": os.getenv("FRONTEND_THEME", "dark"),
|
||||
"language": os.getenv("FRONTEND_LANGUAGE", I18N["default_language"]),
|
||||
}
|
||||
|
||||
METRICS: dict[str, dict] = {}
|
||||
STRINGS: list[dict] = []
|
||||
|
||||
|
||||
def register_metric(
|
||||
metric_id: str,
|
||||
*,
|
||||
entity_id: str,
|
||||
measurement: str,
|
||||
unit: str,
|
||||
label: str,
|
||||
kind: str = "gauge",
|
||||
precision: int = 2,
|
||||
) -> str | None:
|
||||
entity_id = (entity_id or "").strip()
|
||||
measurement = (measurement or "").strip()
|
||||
if not entity_id or not measurement:
|
||||
return None
|
||||
|
||||
METRICS[metric_id] = {
|
||||
"entity_id": entity_id,
|
||||
"measurement": measurement,
|
||||
"unit": unit,
|
||||
"label": label,
|
||||
"kind": kind,
|
||||
"precision": precision,
|
||||
"enabled": True,
|
||||
}
|
||||
return metric_id
|
||||
|
||||
|
||||
register_metric(
|
||||
"ac_power",
|
||||
entity_id=os.getenv("PV_AC_POWER_ENTITY", "sofarsolar_ac_power"),
|
||||
measurement=os.getenv("PV_AC_POWER_MEASUREMENT", "W"),
|
||||
unit="W",
|
||||
label="Moc AC",
|
||||
precision=0,
|
||||
)
|
||||
register_metric(
|
||||
"energy_total",
|
||||
entity_id=os.getenv("PV_TOTAL_ENERGY_ENTITY", "sofarsolar_energy_total"),
|
||||
measurement=os.getenv("PV_TOTAL_ENERGY_MEASUREMENT", "kWh"),
|
||||
unit="kWh",
|
||||
label="Energia laczna",
|
||||
kind="counter",
|
||||
precision=2,
|
||||
)
|
||||
register_metric(
|
||||
"inverter_temp",
|
||||
entity_id=os.getenv("PV_INVERTER_TEMP_ENTITY", "sofarsolar_temprature_inverter"),
|
||||
measurement=os.getenv("PV_INVERTER_TEMP_MEASUREMENT", "°C"),
|
||||
unit="°C",
|
||||
label="Temperatura falownika",
|
||||
precision=1,
|
||||
)
|
||||
|
||||
STRING_DEFAULTS = {
|
||||
1: {"label": "DC1", "power": "sofarsolar_dc1_power", "voltage": "sofarsolar_dc1_voltage"},
|
||||
2: {"label": "DC2", "power": "sofarsolar_dc2_power", "voltage": "sofarsolar_dc2_voltage"},
|
||||
3: {"label": "DC3", "power": "", "voltage": ""},
|
||||
4: {"label": "DC4", "power": "", "voltage": ""},
|
||||
}
|
||||
|
||||
for index, defaults in STRING_DEFAULTS.items():
|
||||
label = os.getenv(f"PV_STRING_{index}_LABEL", defaults["label"]).strip() or defaults["label"]
|
||||
power_metric_id = register_metric(
|
||||
f"string_{index}_power",
|
||||
entity_id=os.getenv(f"PV_STRING_{index}_POWER_ENTITY", defaults["power"]),
|
||||
measurement=os.getenv(f"PV_STRING_{index}_POWER_MEASUREMENT", "W"),
|
||||
unit="W",
|
||||
label=f"{label} moc",
|
||||
precision=0,
|
||||
)
|
||||
voltage_metric_id = register_metric(
|
||||
f"string_{index}_voltage",
|
||||
entity_id=os.getenv(f"PV_STRING_{index}_VOLTAGE_ENTITY", defaults["voltage"]),
|
||||
measurement=os.getenv(f"PV_STRING_{index}_VOLTAGE_MEASUREMENT", "V"),
|
||||
unit="V",
|
||||
label=f"{label} napiecie",
|
||||
precision=1,
|
||||
)
|
||||
|
||||
if power_metric_id or voltage_metric_id:
|
||||
STRINGS.append(
|
||||
{
|
||||
"id": f"string_{index}",
|
||||
"label": label,
|
||||
"metrics": {
|
||||
key: value
|
||||
for key, value in {
|
||||
"power": power_metric_id,
|
||||
"voltage": voltage_metric_id,
|
||||
}.items()
|
||||
if value
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
MODULES = {
|
||||
"realtime_overview": True,
|
||||
"realtime_history": True,
|
||||
"analytics": "energy_total" in METRICS or "ac_power" in METRICS,
|
||||
"comparison": "energy_total" in METRICS or "ac_power" in METRICS,
|
||||
"distribution_pie": "energy_total" in METRICS or "ac_power" in METRICS,
|
||||
"strings": len(STRINGS) > 0,
|
||||
"temperatures": "inverter_temp" in METRICS,
|
||||
"historical_import": HISTORY["enabled"],
|
||||
"phases": False,
|
||||
"faults": False,
|
||||
"settings_panel": True,
|
||||
}
|
||||
|
||||
STATUS_METRICS = [metric_id for metric_id in ["inverter_temp"] if metric_id in METRICS]
|
||||
VISIBLE_ENTITY_TABLE = list(METRICS.keys())
|
||||
2
backend/requirements.txt
Normal file
2
backend/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
Flask>=3.1,<4
|
||||
waitress>=3.0.2,<4
|
||||
15
backend/run.py
Normal file
15
backend/run.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.main import app
|
||||
from app.core_settings import get_settings
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
settings = get_settings()
|
||||
app.run(
|
||||
host=settings.host,
|
||||
port=settings.port,
|
||||
debug=settings.debug,
|
||||
use_reloader=settings.debug,
|
||||
threaded=True,
|
||||
)
|
||||
18
backend/run_prod.py
Normal file
18
backend/run_prod.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from waitress import serve
|
||||
|
||||
from app.main import app
|
||||
from app.core_settings import get_settings
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
settings = get_settings()
|
||||
try:
|
||||
threads = int(os.getenv("WAITRESS_THREADS", "8"))
|
||||
except (TypeError, ValueError):
|
||||
threads = 8
|
||||
|
||||
serve(app, host=settings.host, port=settings.port, threads=threads)
|
||||
Reference in New Issue
Block a user