first commit
This commit is contained in:
5
backend/app/services/__init__.py
Normal file
5
backend/app/services/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .analytics import AnalyticsService
|
||||
from .historical_sync import HistoricalSyncService
|
||||
from .realtime import RealtimeService
|
||||
|
||||
__all__ = ["AnalyticsService", "HistoricalSyncService", "RealtimeService"]
|
||||
140
backend/app/services/analytics.py
Normal file
140
backend/app/services/analytics.py
Normal file
@@ -0,0 +1,140 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import compare_delta_pct
|
||||
from app.utils.time import resolve_window, shift_window
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
|
||||
def production(
|
||||
self,
|
||||
range_key: str | None = None,
|
||||
bucket: str | None = None,
|
||||
compare_mode: str = "none",
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
compare_ranges: list[dict] | None = None,
|
||||
) -> dict:
|
||||
bucket = bucket or self.settings.analytics["default_bucket"]
|
||||
if bucket not in self.settings.analytics["bucket_labels"]:
|
||||
raise ValueError(f"Unsupported bucket: {bucket}")
|
||||
|
||||
window = resolve_window(range_key=range_key, start=start, end=end)
|
||||
current_days = self.energy.daily_records_for_window(window.start, window.end, persist_missing=True)
|
||||
current = self.energy.bucketize_daily(current_days, bucket)
|
||||
total = round(sum(item.value for item in current), 2)
|
||||
|
||||
comparison = []
|
||||
comparison_total = None
|
||||
comparison_delta_pct = None
|
||||
comparisons = []
|
||||
if compare_mode == "custom_multi":
|
||||
for index, item in enumerate(compare_ranges or []):
|
||||
compare_start = item.get("start")
|
||||
compare_end = item.get("end")
|
||||
if not compare_start or not compare_end:
|
||||
continue
|
||||
compare_window = resolve_window(start=compare_start, end=compare_end)
|
||||
comparison_days = self.energy.daily_records_for_window(compare_window.start, compare_window.end, persist_missing=True)
|
||||
comparison_series = self.energy.bucketize_daily(comparison_days, bucket)
|
||||
comparison_total_value = round(sum(point.value for point in comparison_series), 2)
|
||||
comparisons.append({
|
||||
"key": item.get("key") or f"custom_{index + 1}",
|
||||
"label": item.get("label") or f"Custom {index + 1}",
|
||||
"start": compare_window.start,
|
||||
"end": compare_window.end,
|
||||
"total": comparison_total_value,
|
||||
"delta_pct": compare_delta_pct(total, comparison_total_value),
|
||||
"points": comparison_series,
|
||||
})
|
||||
if comparisons:
|
||||
comparison = comparisons[0]["points"]
|
||||
comparison_total = comparisons[0]["total"]
|
||||
comparison_delta_pct = comparisons[0]["delta_pct"]
|
||||
elif compare_mode != "none":
|
||||
compare_window = shift_window(window, compare_mode)
|
||||
comparison_days = self.energy.daily_records_for_window(compare_window.start, compare_window.end, persist_missing=True)
|
||||
comparison = self.energy.bucketize_daily(comparison_days, bucket)
|
||||
comparison_total = round(sum(item.value for item in comparison), 2)
|
||||
comparison_delta_pct = compare_delta_pct(total, comparison_total)
|
||||
comparisons.append({
|
||||
"key": compare_mode,
|
||||
"label": compare_mode,
|
||||
"start": compare_window.start,
|
||||
"end": compare_window.end,
|
||||
"total": comparison_total,
|
||||
"delta_pct": comparison_delta_pct,
|
||||
"points": comparison,
|
||||
})
|
||||
|
||||
average_bucket = round(total / len(current), 2) if current else 0.0
|
||||
best_bucket = max(current, key=lambda item: item.value, default=None)
|
||||
|
||||
return {
|
||||
"unit": "kWh",
|
||||
"bucket": bucket,
|
||||
"compare_mode": compare_mode,
|
||||
"current": current,
|
||||
"comparison": comparison,
|
||||
"comparisons": comparisons,
|
||||
"summary": {
|
||||
"total": total,
|
||||
"unit": "kWh",
|
||||
"average_bucket": average_bucket,
|
||||
"best_bucket_label": best_bucket.label if best_bucket else "",
|
||||
"best_bucket_value": best_bucket.value if best_bucket else 0.0,
|
||||
"co2_saved_kg": round(total * self.settings.co2_factor, 2),
|
||||
"comparison_total": comparison_total,
|
||||
"comparison_delta_pct": comparison_delta_pct,
|
||||
},
|
||||
"meta": {
|
||||
"window": {
|
||||
"start": window.start,
|
||||
"end": window.end,
|
||||
"range_key": window.key,
|
||||
},
|
||||
"source": "sqlite_cache_plus_live_influx",
|
||||
},
|
||||
}
|
||||
|
||||
def distribution(
|
||||
self,
|
||||
range_key: str | None = None,
|
||||
bucket: str | None = None,
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
) -> dict:
|
||||
payload = self.production(range_key=range_key, bucket=bucket, compare_mode="none", start=start, end=end)
|
||||
current = payload["current"]
|
||||
total = round(sum(item.value for item in current), 2)
|
||||
denominator = total or 1.0
|
||||
return {
|
||||
"unit": payload["unit"],
|
||||
"bucket": payload["bucket"],
|
||||
"total": total,
|
||||
"slices": [
|
||||
{
|
||||
"label": item.label,
|
||||
"value": item.value,
|
||||
"share": round((item.value / denominator) * 100.0, 2),
|
||||
}
|
||||
for item in current
|
||||
if item.value > 0
|
||||
],
|
||||
"meta": payload["meta"],
|
||||
}
|
||||
179
backend/app/services/auth.py
Normal file
179
backend/app/services/auth.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from flask import session
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.storage.auth_users import AuthUser, SQLiteAuthUserRepository
|
||||
|
||||
|
||||
SESSION_USER_KEY = "auth_user"
|
||||
SESSION_DISPLAY_NAME_KEY = "auth_display_name"
|
||||
SESSION_ROLE_KEY = "auth_role"
|
||||
VALID_ROLES = {"admin", "user"}
|
||||
|
||||
|
||||
class AuthService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.user_repository = SQLiteAuthUserRepository(self.settings.storage["sqlite_path"])
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
return bool(self.settings.auth["enabled"])
|
||||
|
||||
def status(self) -> dict[str, Any]:
|
||||
if not self.enabled:
|
||||
return {
|
||||
"enabled": False,
|
||||
"authenticated": True,
|
||||
"user": None,
|
||||
"display_name": None,
|
||||
"role": None,
|
||||
}
|
||||
|
||||
return {
|
||||
"enabled": True,
|
||||
"authenticated": SESSION_USER_KEY in session,
|
||||
"user": session.get(SESSION_USER_KEY),
|
||||
"display_name": session.get(SESSION_DISPLAY_NAME_KEY),
|
||||
"role": session.get(SESSION_ROLE_KEY),
|
||||
}
|
||||
|
||||
def login(self, username: str, password: str) -> dict[str, Any]:
|
||||
if not self.enabled:
|
||||
return self.status()
|
||||
|
||||
username = (username or "").strip()
|
||||
password = password or ""
|
||||
user = self.user_repository.get_by_username(username)
|
||||
|
||||
if user is None:
|
||||
self._login_legacy_user(username, password)
|
||||
else:
|
||||
if not user.is_active:
|
||||
raise ValueError("Konto jest nieaktywne")
|
||||
if not check_password_hash(user.password_hash, password):
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
self._set_session(user.username, user.display_name, user.role)
|
||||
|
||||
return self.status()
|
||||
|
||||
def logout(self) -> dict[str, Any]:
|
||||
session.clear()
|
||||
return self.status()
|
||||
|
||||
def list_users(self) -> list[dict[str, Any]]:
|
||||
users = self.user_repository.list_users()
|
||||
return [
|
||||
{
|
||||
"username": user.username,
|
||||
"display_name": user.display_name,
|
||||
"role": user.role,
|
||||
"is_active": user.is_active,
|
||||
"created_at": user.created_at,
|
||||
"updated_at": user.updated_at,
|
||||
}
|
||||
for user in users
|
||||
]
|
||||
|
||||
def require_admin(self) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
if session.get(SESSION_ROLE_KEY) != "admin":
|
||||
raise PermissionError("Brak uprawnien administratora")
|
||||
|
||||
def configure_app(self, app) -> None:
|
||||
max_age = int(self.settings.auth["session_max_age_seconds"])
|
||||
app.secret_key = self.settings.auth["secret_key"]
|
||||
app.config["PERMANENT_SESSION_LIFETIME"] = timedelta(seconds=max_age)
|
||||
app.config["SESSION_COOKIE_NAME"] = self.settings.auth["session_cookie_name"]
|
||||
app.config["SESSION_COOKIE_HTTPONLY"] = True
|
||||
app.config["SESSION_COOKIE_SAMESITE"] = self.settings.auth.get("cookie_samesite", "Lax")
|
||||
app.config["SESSION_COOKIE_SECURE"] = bool(self.settings.auth.get("cookie_secure", False))
|
||||
|
||||
def create_user(self, *, username: str, password: str, role: str, display_name: str | None = None) -> AuthUser:
|
||||
normalized_username = self._normalize_username(username)
|
||||
normalized_role = self._normalize_role(role)
|
||||
clean_password = self._validate_password(password)
|
||||
resolved_display_name = (display_name or normalized_username).strip()
|
||||
if not resolved_display_name:
|
||||
raise ValueError("Display name nie moze byc pusty")
|
||||
return self.user_repository.upsert_user(
|
||||
username=normalized_username,
|
||||
password_hash=generate_password_hash(clean_password),
|
||||
role=normalized_role,
|
||||
display_name=resolved_display_name,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
def reset_password(self, *, username: str, new_password: str) -> AuthUser:
|
||||
normalized_username = self._normalize_username(username)
|
||||
clean_password = self._validate_password(new_password)
|
||||
user = self.user_repository.update_password(
|
||||
normalized_username,
|
||||
generate_password_hash(clean_password),
|
||||
)
|
||||
if user is None:
|
||||
raise ValueError(f"Uzytkownik '{normalized_username}' nie istnieje")
|
||||
return user
|
||||
|
||||
def _login_legacy_user(self, username: str, password: str) -> None:
|
||||
expected_username = self.settings.auth["username"]
|
||||
expected_password = self.settings.auth["password"]
|
||||
expected_password_hash = self.settings.auth.get("password_hash")
|
||||
|
||||
if username != expected_username:
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
|
||||
if expected_password_hash:
|
||||
password_ok = check_password_hash(expected_password_hash, password)
|
||||
else:
|
||||
password_ok = password == expected_password
|
||||
|
||||
if not password_ok:
|
||||
raise ValueError("Niepoprawny login lub haslo")
|
||||
|
||||
self._set_session(
|
||||
expected_username,
|
||||
self.settings.auth.get("display_name") or expected_username,
|
||||
self.settings.auth.get("role", "admin"),
|
||||
)
|
||||
|
||||
def _set_session(self, username: str, display_name: str, role: str) -> None:
|
||||
session.clear()
|
||||
session.permanent = True
|
||||
session[SESSION_USER_KEY] = username
|
||||
session[SESSION_DISPLAY_NAME_KEY] = display_name
|
||||
session[SESSION_ROLE_KEY] = role
|
||||
|
||||
def _normalize_username(self, username: str) -> str:
|
||||
normalized = (username or "").strip()
|
||||
if not normalized:
|
||||
raise ValueError("Username nie moze byc pusty")
|
||||
return normalized
|
||||
|
||||
def _normalize_role(self, role: str) -> str:
|
||||
normalized = (role or "").strip().lower()
|
||||
if normalized not in VALID_ROLES:
|
||||
raise ValueError("Rola musi byc jedna z: admin, user")
|
||||
return normalized
|
||||
|
||||
def _validate_password(self, password: str) -> str:
|
||||
clean_password = password or ""
|
||||
if len(clean_password) < 8:
|
||||
raise ValueError("Haslo musi miec co najmniej 8 znakow")
|
||||
return clean_password
|
||||
|
||||
|
||||
_auth_service: AuthService | None = None
|
||||
|
||||
|
||||
def get_auth_service() -> AuthService:
|
||||
global _auth_service
|
||||
if _auth_service is None:
|
||||
_auth_service = AuthService()
|
||||
return _auth_service
|
||||
43
backend/app/services/capabilities.py
Normal file
43
backend/app/services/capabilities.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
|
||||
|
||||
|
||||
def build_capabilities(catalog: MetricCatalog | None = None) -> dict:
|
||||
catalog = catalog or get_catalog()
|
||||
settings = catalog.settings
|
||||
|
||||
string_rows = []
|
||||
for item in settings.strings:
|
||||
metric_ids = list(item.get("metrics", {}).values())
|
||||
if any(catalog.safe_get(metric_id) for metric_id in metric_ids):
|
||||
string_rows.append(item)
|
||||
|
||||
analytics_enabled = settings.modules.get("analytics", False)
|
||||
|
||||
return {
|
||||
"modules": settings.modules,
|
||||
"strings_enabled": settings.modules.get("strings", False) and len(string_rows) > 0,
|
||||
"strings_count": len(string_rows),
|
||||
"phases_enabled": False,
|
||||
"phases_count": 0,
|
||||
"analytics_enabled": analytics_enabled,
|
||||
"realtime_enabled": settings.modules.get("realtime_overview", False),
|
||||
"comparison_modes": list(settings.analytics["compare_modes"].keys()),
|
||||
"ranges": [
|
||||
{"key": key, "label": definition["label"]}
|
||||
for key, definition in settings.time_ranges.items()
|
||||
],
|
||||
"buckets": [
|
||||
{"key": key, "label": label}
|
||||
for key, label in settings.analytics["bucket_labels"].items()
|
||||
],
|
||||
"historical_import_enabled": settings.modules.get("historical_import", False),
|
||||
"history": {
|
||||
"enabled": settings.history.get("enabled", True),
|
||||
"default_chunk_days": settings.history.get("default_chunk_days", 7),
|
||||
"auto_sync_enabled": settings.history.get("auto_sync_enabled", False),
|
||||
"auto_sync_interval_minutes": settings.history.get("auto_sync_interval_minutes", 30),
|
||||
},
|
||||
}
|
||||
27
backend/app/services/catalog.py
Normal file
27
backend/app/services/catalog.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import MetricDefinition
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricCatalog:
|
||||
settings: AppSettings
|
||||
|
||||
def get(self, metric_id: str) -> MetricDefinition:
|
||||
if metric_id not in self.settings.metrics:
|
||||
raise KeyError(f"Unknown metric: {metric_id}")
|
||||
return self.settings.metrics[metric_id]
|
||||
|
||||
def safe_get(self, metric_id: str) -> MetricDefinition | None:
|
||||
return self.settings.metrics.get(metric_id)
|
||||
|
||||
def visible_entities(self) -> list[MetricDefinition]:
|
||||
return [self.get(metric_id) for metric_id in self.settings.visible_entity_table if metric_id in self.settings.metrics]
|
||||
|
||||
|
||||
|
||||
def get_catalog() -> MetricCatalog:
|
||||
return MetricCatalog(get_settings())
|
||||
220
backend/app/services/energy.py
Normal file
220
backend/app/services/energy.py
Normal file
@@ -0,0 +1,220 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import BucketPoint, DailyEnergyRecord, MetricDefinition, SeriesPoint
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import to_float
|
||||
from app.storage import SQLiteEnergyRepository
|
||||
from app.utils.time import (
|
||||
choose_counter_interval,
|
||||
choose_power_interval,
|
||||
duration_to_seconds,
|
||||
now_local,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnergySample:
|
||||
timestamp: datetime
|
||||
delta_kwh: float
|
||||
|
||||
|
||||
class EnergyService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
repository: SQLiteEnergyRepository | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.repository = repository or SQLiteEnergyRepository(self.settings.storage["sqlite_path"])
|
||||
self.tz = ZoneInfo(self.settings.timezone)
|
||||
|
||||
def total_for_window(self, start: datetime, end: datetime) -> float:
|
||||
total, _, _ = self.window_total_with_meta(start, end)
|
||||
return total
|
||||
|
||||
def window_total_with_meta(self, start: datetime, end: datetime) -> tuple[float, str, int]:
|
||||
samples, source, observations_count = self._samples_for_window(start, end)
|
||||
return round(sum(sample.delta_kwh for sample in samples), 2), source, observations_count
|
||||
|
||||
def total_for_full_day(self, day: date) -> tuple[float, str, int]:
|
||||
start = datetime.combine(day, time.min, tzinfo=self.tz)
|
||||
end = start + timedelta(days=1)
|
||||
return self.window_total_with_meta(start, end)
|
||||
|
||||
def samples(self, start: datetime, end: datetime) -> list[EnergySample]:
|
||||
samples, _, _ = self._samples_for_window(start, end)
|
||||
return samples
|
||||
|
||||
def daily_records_for_window(
|
||||
self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
*,
|
||||
persist_missing: bool = True,
|
||||
) -> list[DailyEnergyRecord]:
|
||||
start_local = start.astimezone(self.tz)
|
||||
end_local = end.astimezone(self.tz)
|
||||
if end_local <= start_local:
|
||||
return []
|
||||
|
||||
start_day = start_local.date()
|
||||
end_day = end_local.date()
|
||||
cached = self.repository.fetch_daily_energy(start_day, end_day)
|
||||
today_local = now_local().date()
|
||||
rows: list[DailyEnergyRecord] = []
|
||||
|
||||
current = start_day
|
||||
while current <= end_day:
|
||||
day_start = datetime.combine(current, time.min, tzinfo=self.tz)
|
||||
day_end = day_start + timedelta(days=1)
|
||||
segment_start = max(start_local, day_start)
|
||||
segment_end = min(end_local, day_end)
|
||||
if segment_end <= segment_start:
|
||||
current = current + timedelta(days=1)
|
||||
continue
|
||||
|
||||
is_full_day = segment_start == day_start and segment_end == day_end
|
||||
cached_row = cached.get(current)
|
||||
if is_full_day and cached_row is not None:
|
||||
rows.append(cached_row)
|
||||
else:
|
||||
total, source, observations_count = self.window_total_with_meta(segment_start, segment_end)
|
||||
record = DailyEnergyRecord(
|
||||
day=current,
|
||||
energy_kwh=total,
|
||||
source=source,
|
||||
samples_count=observations_count,
|
||||
)
|
||||
rows.append(record)
|
||||
if is_full_day and persist_missing and current < today_local and observations_count > 0:
|
||||
self.repository.upsert_daily_energy(record)
|
||||
current = current + timedelta(days=1)
|
||||
|
||||
return rows
|
||||
|
||||
def bucketize_daily(self, records: list[DailyEnergyRecord], bucket: str) -> list[BucketPoint]:
|
||||
grouped: dict[str, dict] = defaultdict(lambda: {"value": 0.0, "start": None, "end": None, "label": ""})
|
||||
|
||||
for record in records:
|
||||
start = datetime.combine(record.day, time.min, tzinfo=self.tz)
|
||||
if bucket == "day":
|
||||
bucket_start = start
|
||||
bucket_end = bucket_start + timedelta(days=1)
|
||||
key = bucket_start.strftime("%Y-%m-%d")
|
||||
label = bucket_start.strftime("%d.%m")
|
||||
elif bucket == "week":
|
||||
bucket_start = start - timedelta(days=start.weekday())
|
||||
bucket_end = bucket_start + timedelta(days=7)
|
||||
iso = bucket_start.isocalendar()
|
||||
key = f"{iso.year}-W{iso.week:02d}"
|
||||
label = key
|
||||
elif bucket == "month":
|
||||
bucket_start = start.replace(day=1)
|
||||
if bucket_start.month == 12:
|
||||
bucket_end = bucket_start.replace(year=bucket_start.year + 1, month=1)
|
||||
else:
|
||||
bucket_end = bucket_start.replace(month=bucket_start.month + 1)
|
||||
key = bucket_start.strftime("%Y-%m")
|
||||
label = key
|
||||
elif bucket == "year":
|
||||
bucket_start = start.replace(month=1, day=1)
|
||||
bucket_end = bucket_start.replace(year=bucket_start.year + 1)
|
||||
key = bucket_start.strftime("%Y")
|
||||
label = key
|
||||
else:
|
||||
raise ValueError(f"Unsupported bucket: {bucket}")
|
||||
|
||||
current = grouped[key]
|
||||
current["label"] = label
|
||||
current["value"] += record.energy_kwh
|
||||
current["start"] = bucket_start if current["start"] is None else min(current["start"], bucket_start)
|
||||
current["end"] = bucket_end if current["end"] is None else max(current["end"], bucket_end)
|
||||
|
||||
rows = []
|
||||
for key in sorted(grouped.keys()):
|
||||
item = grouped[key]
|
||||
rows.append(
|
||||
BucketPoint(
|
||||
label=item["label"],
|
||||
start=item["start"],
|
||||
end=item["end"],
|
||||
value=round(item["value"], 2),
|
||||
)
|
||||
)
|
||||
return rows
|
||||
|
||||
def _samples_for_window(self, start: datetime, end: datetime) -> tuple[list[EnergySample], str, int]:
|
||||
counter_metric = self.catalog.safe_get(self.settings.analytics["production_metric_id"])
|
||||
if counter_metric is not None:
|
||||
samples, observations_count = self._samples_from_counter(counter_metric, start, end)
|
||||
return samples, "counter", observations_count
|
||||
|
||||
power_metric = self.catalog.safe_get(self.settings.analytics["fallback_power_metric_id"])
|
||||
if power_metric is not None:
|
||||
samples, observations_count = self._samples_from_power(power_metric, start, end)
|
||||
return samples, "power_estimated", observations_count
|
||||
|
||||
return [], "unavailable", 0
|
||||
|
||||
def _samples_from_counter(self, metric: MetricDefinition, start: datetime, end: datetime) -> tuple[list[EnergySample], int]:
|
||||
interval = choose_counter_interval(start, end)
|
||||
baseline = self.influx.last_before(metric, start)
|
||||
series = self.influx.grouped_last_series(metric, start, end, interval)
|
||||
|
||||
points: list[SeriesPoint] = []
|
||||
if baseline and baseline.value is not None:
|
||||
points.append(SeriesPoint(timestamp=start, value=baseline.value))
|
||||
else:
|
||||
first_value = next((point.value for point in series if point.value is not None), None)
|
||||
if first_value is not None:
|
||||
points.append(SeriesPoint(timestamp=start, value=first_value))
|
||||
points.extend(series)
|
||||
|
||||
samples: list[EnergySample] = []
|
||||
previous_value = None
|
||||
for point in points:
|
||||
current_value = to_float(point.value)
|
||||
if current_value is None:
|
||||
continue
|
||||
if previous_value is None:
|
||||
previous_value = current_value
|
||||
continue
|
||||
|
||||
delta = current_value - previous_value
|
||||
previous_value = current_value
|
||||
if delta <= 0:
|
||||
continue
|
||||
if point.timestamp < start or point.timestamp > end:
|
||||
continue
|
||||
samples.append(EnergySample(timestamp=point.timestamp, delta_kwh=round(delta, 6)))
|
||||
|
||||
observations_count = sum(1 for point in series if to_float(point.value) is not None)
|
||||
return samples, observations_count
|
||||
|
||||
def _samples_from_power(self, metric: MetricDefinition, start: datetime, end: datetime) -> tuple[list[EnergySample], int]:
|
||||
interval = choose_power_interval(start, end)
|
||||
interval_seconds = duration_to_seconds(interval)
|
||||
points = self.influx.gauge_history(metric, start, end, interval, aggregate="mean")
|
||||
samples: list[EnergySample] = []
|
||||
observations_count = 0
|
||||
for point in points:
|
||||
watts = to_float(point.value)
|
||||
if watts is None:
|
||||
continue
|
||||
observations_count += 1
|
||||
if watts <= 0:
|
||||
continue
|
||||
delta_kwh = watts * (interval_seconds / 3600.0) / 1000.0
|
||||
samples.append(EnergySample(timestamp=point.timestamp, delta_kwh=round(delta_kwh, 6)))
|
||||
return samples, observations_count
|
||||
605
backend/app/services/historical_sync.py
Normal file
605
backend/app/services/historical_sync.py
Normal file
@@ -0,0 +1,605 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from datetime import date, datetime, timedelta
|
||||
from functools import lru_cache
|
||||
from math import ceil
|
||||
from typing import Iterable
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models import (
|
||||
DailyEnergyRecord,
|
||||
HistoricalActivityEvent,
|
||||
HistoricalChunkProgress,
|
||||
HistoricalImportStatus,
|
||||
)
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.storage import SQLiteEnergyRepository
|
||||
from app.utils.time import now_local
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HistoricalSyncService:
|
||||
MAX_RECENT_CHUNKS = 18
|
||||
MAX_RECENT_EVENTS = 40
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
repository: SQLiteEnergyRepository | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
self.repository = repository or SQLiteEnergyRepository(self.settings.storage["sqlite_path"])
|
||||
self._state_lock = threading.Lock()
|
||||
self._worker: threading.Thread | None = None
|
||||
self._cancel_event = threading.Event()
|
||||
self._scheduler_stop = threading.Event()
|
||||
self._scheduler: threading.Thread | None = None
|
||||
self._available_bounds_cache: tuple[datetime, date | None, date | None] | None = None
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=self.settings.history.get("enabled", True),
|
||||
state="idle",
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
)
|
||||
self._refresh_coverage()
|
||||
self._refresh_available_bounds()
|
||||
self._refresh_runtime_metrics()
|
||||
|
||||
def status(self) -> HistoricalImportStatus:
|
||||
with self._state_lock:
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
def start(
|
||||
self,
|
||||
*,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
chunk_days: int | None = None,
|
||||
force: bool = False,
|
||||
auto: bool = False,
|
||||
) -> HistoricalImportStatus:
|
||||
if not self.settings.history.get("enabled", True):
|
||||
raise RuntimeError("Historical import is disabled")
|
||||
|
||||
chunk_days = max(int(chunk_days or self.settings.history.get("default_chunk_days", 7)), 1)
|
||||
resolved = self._resolve_range(start_date=start_date, end_date=end_date)
|
||||
if resolved is None:
|
||||
with self._state_lock:
|
||||
self._state.running = False
|
||||
self._state.state = "idle"
|
||||
self._state.message = "Brak brakujacych dni do importu."
|
||||
self._state.finished_at = datetime.utcnow()
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
resolved_start, resolved_end = resolved
|
||||
total_days = (resolved_end - resolved_start).days + 1
|
||||
total_chunks = max(ceil(total_days / chunk_days), 1)
|
||||
start_message = "Start importu archiwalnego" if not auto else "Start automatycznej synchronizacji archiwum"
|
||||
|
||||
with self._state_lock:
|
||||
if self._worker and self._worker.is_alive():
|
||||
return copy.deepcopy(self._state)
|
||||
|
||||
self._cancel_event = threading.Event()
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=True,
|
||||
running=True,
|
||||
state="running",
|
||||
job_id=uuid.uuid4().hex[:12],
|
||||
started_at=datetime.utcnow(),
|
||||
requested_start_date=resolved_start,
|
||||
requested_end_date=resolved_end,
|
||||
total_days=total_days,
|
||||
chunk_days=chunk_days,
|
||||
total_chunks=total_chunks,
|
||||
active_chunk_index=1,
|
||||
current_chunk_start=resolved_start,
|
||||
current_chunk_end=min(resolved_start + timedelta(days=chunk_days - 1), resolved_end),
|
||||
message=start_message,
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
recent_chunks=[],
|
||||
recent_events=[],
|
||||
)
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
self._worker = threading.Thread(
|
||||
target=self._run_worker,
|
||||
kwargs={
|
||||
"start_date": resolved_start,
|
||||
"end_date": resolved_end,
|
||||
"chunk_days": chunk_days,
|
||||
"force": force,
|
||||
"auto": auto,
|
||||
},
|
||||
name="pv-historical-backfill",
|
||||
daemon=True,
|
||||
)
|
||||
self._worker.start()
|
||||
|
||||
self._record_event(
|
||||
level="info",
|
||||
title="Uruchomiono zadanie",
|
||||
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
|
||||
)
|
||||
return self.status()
|
||||
|
||||
def cancel(self) -> HistoricalImportStatus:
|
||||
self._cancel_event.set()
|
||||
with self._state_lock:
|
||||
self._state.message = "Anulowanie zadania..."
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
snapshot = copy.deepcopy(self._state)
|
||||
self._record_event(level="warn", title="Anulowanie", message="Uzytkownik poprosil o zatrzymanie zadania.")
|
||||
return snapshot
|
||||
|
||||
def run_blocking(
|
||||
self,
|
||||
*,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
chunk_days: int | None = None,
|
||||
force: bool = False,
|
||||
) -> HistoricalImportStatus:
|
||||
resolved = self._resolve_range(start_date=start_date, end_date=end_date)
|
||||
if resolved is None:
|
||||
return self.status()
|
||||
resolved_start, resolved_end = resolved
|
||||
chunk_days = max(int(chunk_days or self.settings.history.get("default_chunk_days", 7)), 1)
|
||||
total_days = (resolved_end - resolved_start).days + 1
|
||||
total_chunks = max(ceil(total_days / chunk_days), 1)
|
||||
with self._state_lock:
|
||||
self._state = HistoricalImportStatus(
|
||||
enabled=True,
|
||||
running=True,
|
||||
state="running",
|
||||
job_id=uuid.uuid4().hex[:12],
|
||||
started_at=datetime.utcnow(),
|
||||
requested_start_date=resolved_start,
|
||||
requested_end_date=resolved_end,
|
||||
total_days=total_days,
|
||||
chunk_days=chunk_days,
|
||||
total_chunks=total_chunks,
|
||||
default_chunk_days=self.settings.history.get("default_chunk_days", 7),
|
||||
recent_chunks=[],
|
||||
recent_events=[],
|
||||
)
|
||||
self._record_event(
|
||||
level="info",
|
||||
title="Uruchomiono zadanie",
|
||||
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
|
||||
)
|
||||
self._run_worker(
|
||||
start_date=resolved_start,
|
||||
end_date=resolved_end,
|
||||
chunk_days=chunk_days,
|
||||
force=force,
|
||||
auto=False,
|
||||
)
|
||||
return self.status()
|
||||
|
||||
def start_scheduler_if_enabled(self) -> None:
|
||||
if not self.settings.history.get("enabled", True):
|
||||
return
|
||||
if not self.settings.history.get("auto_sync_enabled", False):
|
||||
return
|
||||
if self._scheduler and self._scheduler.is_alive():
|
||||
return
|
||||
self._scheduler_stop.clear()
|
||||
self._scheduler = threading.Thread(target=self._scheduler_loop, name="pv-history-scheduler", daemon=True)
|
||||
self._scheduler.start()
|
||||
|
||||
def _scheduler_loop(self) -> None:
|
||||
interval_seconds = max(int(self.settings.history.get("auto_sync_interval_minutes", 30)), 1) * 60
|
||||
if self.settings.history.get("auto_sync_on_start", False):
|
||||
try:
|
||||
self.start(auto=True)
|
||||
except Exception as exc:
|
||||
logger.warning("Unable to auto-start historical sync: %s", exc)
|
||||
|
||||
while not self._scheduler_stop.wait(interval_seconds):
|
||||
try:
|
||||
if self._worker and self._worker.is_alive():
|
||||
continue
|
||||
self.start(auto=True)
|
||||
except Exception as exc:
|
||||
logger.warning("Historical scheduler cycle failed: %s", exc)
|
||||
|
||||
def _run_worker(
|
||||
self,
|
||||
*,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
chunk_days: int,
|
||||
force: bool,
|
||||
auto: bool,
|
||||
) -> None:
|
||||
total_chunks = max(ceil(((end_date - start_date).days + 1) / chunk_days), 1)
|
||||
try:
|
||||
chunk_index = 0
|
||||
chunk_start = start_date
|
||||
while chunk_start <= end_date:
|
||||
if self._cancel_event.is_set():
|
||||
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
|
||||
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
|
||||
return
|
||||
|
||||
chunk_index += 1
|
||||
chunk_end = min(chunk_start + timedelta(days=chunk_days - 1), end_date)
|
||||
self._update_chunk(chunk_index, total_chunks, chunk_start, chunk_end)
|
||||
imported, skipped, energy_kwh, cancelled = self._process_chunk(
|
||||
chunk_index=chunk_index,
|
||||
start_day=chunk_start,
|
||||
end_day=chunk_end,
|
||||
force=force,
|
||||
)
|
||||
if cancelled:
|
||||
self._close_chunk(
|
||||
chunk_index,
|
||||
imported_days=imported,
|
||||
skipped_days=skipped,
|
||||
energy_kwh=energy_kwh,
|
||||
state="cancelled",
|
||||
note="Chunk zatrzymany podczas przetwarzania",
|
||||
)
|
||||
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
|
||||
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
|
||||
return
|
||||
|
||||
self._close_chunk(
|
||||
chunk_index,
|
||||
imported_days=imported,
|
||||
skipped_days=skipped,
|
||||
energy_kwh=energy_kwh,
|
||||
state="completed",
|
||||
note=f"Chunk zakonczony: import {imported}, pominiete {skipped}",
|
||||
)
|
||||
self._record_event(
|
||||
level="success",
|
||||
title=f"Chunk {chunk_index}/{total_chunks} zakonczony",
|
||||
message=f"Zakres {chunk_start.isoformat()} -> {chunk_end.isoformat()}, import {imported}, pominiete {skipped}, energia {energy_kwh:.2f} kWh",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
chunk_start = chunk_end + timedelta(days=1)
|
||||
|
||||
final_message = "Synchronizacja archiwalna zakonczona" if auto else "Import archiwalny zakonczony"
|
||||
self._record_event(level="success", title="Zakonczono", message=final_message)
|
||||
self._finish("completed", running=False, message=final_message)
|
||||
except Exception as exc:
|
||||
logger.exception("Historical import failed")
|
||||
self._record_event(level="error", title="Blad importu", message=str(exc))
|
||||
self._finish("failed", running=False, message="Import archiwalny zakonczyl sie bledem.", last_error=str(exc))
|
||||
|
||||
def _process_chunk(self, *, chunk_index: int, start_day: date, end_day: date, force: bool) -> tuple[int, int, float, bool]:
|
||||
imported_days = 0
|
||||
skipped_days = 0
|
||||
energy_kwh = 0.0
|
||||
|
||||
for day in self._date_range(start_day, end_day):
|
||||
if self._cancel_event.is_set():
|
||||
return imported_days, skipped_days, energy_kwh, True
|
||||
|
||||
if not force and self.repository.has_day(day):
|
||||
skipped_days += 1
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=False,
|
||||
message=f"Pominieto {day.isoformat()} - dzien juz istnieje w cache",
|
||||
level="warn",
|
||||
title="Pominieto dzien",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
continue
|
||||
|
||||
total, source, samples_count = self.energy.total_for_full_day(day)
|
||||
if samples_count <= 0:
|
||||
skipped_days += 1
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=False,
|
||||
message=f"Pominieto {day.isoformat()} - brak probek w InfluxDB",
|
||||
level="warn",
|
||||
title="Brak probek",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
continue
|
||||
|
||||
self.repository.upsert_daily_energy(
|
||||
DailyEnergyRecord(
|
||||
day=day,
|
||||
energy_kwh=total,
|
||||
source=source,
|
||||
samples_count=samples_count,
|
||||
)
|
||||
)
|
||||
imported_days += 1
|
||||
energy_kwh += total
|
||||
self._advance_day(
|
||||
day,
|
||||
imported=True,
|
||||
message=f"Zaimportowano {day.isoformat()} ({total:.2f} kWh)",
|
||||
level="success",
|
||||
title="Zaimportowano dzien",
|
||||
chunk_index=chunk_index,
|
||||
energy_kwh=total,
|
||||
)
|
||||
|
||||
return imported_days, skipped_days, round(energy_kwh, 3), False
|
||||
|
||||
def _advance_day(
|
||||
self,
|
||||
day: date,
|
||||
*,
|
||||
imported: bool,
|
||||
message: str,
|
||||
level: str,
|
||||
title: str,
|
||||
chunk_index: int,
|
||||
energy_kwh: float | None = None,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
self._state.processed_days += 1
|
||||
if imported:
|
||||
self._state.imported_days += 1
|
||||
else:
|
||||
self._state.skipped_days += 1
|
||||
self._state.current_date = day
|
||||
self._state.message = message
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
suffix = f" Energia: {energy_kwh:.2f} kWh." if imported and energy_kwh is not None else ""
|
||||
self._record_event(
|
||||
level=level,
|
||||
title=title,
|
||||
message=f"{message}.{suffix}" if not message.endswith(".") else f"{message}{suffix}",
|
||||
day=day,
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
|
||||
def _update_chunk(self, chunk_index: int, total_chunks: int, chunk_start: date, chunk_end: date) -> None:
|
||||
chunk = HistoricalChunkProgress(
|
||||
chunk_index=chunk_index,
|
||||
total_chunks=total_chunks,
|
||||
start_date=chunk_start,
|
||||
end_date=chunk_end,
|
||||
state="running",
|
||||
started_at=datetime.utcnow(),
|
||||
note=f"Aktywny chunk {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
|
||||
)
|
||||
with self._state_lock:
|
||||
self._state.current_chunk_start = chunk_start
|
||||
self._state.current_chunk_end = chunk_end
|
||||
self._state.active_chunk_index = chunk_index
|
||||
self._state.message = f"Przetwarzanie zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}"
|
||||
self._upsert_chunk_locked(chunk)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
self._record_event(
|
||||
level="info",
|
||||
title=f"Chunk {chunk_index}/{total_chunks}",
|
||||
message=f"Start zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
|
||||
def _close_chunk(
|
||||
self,
|
||||
chunk_index: int,
|
||||
*,
|
||||
imported_days: int,
|
||||
skipped_days: int,
|
||||
energy_kwh: float,
|
||||
state: str,
|
||||
note: str,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
existing = self._find_chunk_locked(chunk_index)
|
||||
started_at = existing.started_at if existing and existing.started_at else datetime.utcnow()
|
||||
finished_at = datetime.utcnow()
|
||||
processed_days = imported_days + skipped_days
|
||||
duration_seconds = max((finished_at - started_at).total_seconds(), 0.0)
|
||||
chunk = HistoricalChunkProgress(
|
||||
chunk_index=chunk_index,
|
||||
total_chunks=self._state.total_chunks,
|
||||
start_date=existing.start_date if existing else self._state.current_chunk_start or self._state.requested_start_date or date.today(),
|
||||
end_date=existing.end_date if existing else self._state.current_chunk_end or self._state.requested_end_date or date.today(),
|
||||
processed_days=processed_days,
|
||||
imported_days=imported_days,
|
||||
skipped_days=skipped_days,
|
||||
energy_kwh=round(energy_kwh, 3),
|
||||
state=state,
|
||||
started_at=started_at,
|
||||
finished_at=finished_at,
|
||||
duration_seconds=round(duration_seconds, 2),
|
||||
note=note,
|
||||
)
|
||||
self._upsert_chunk_locked(chunk)
|
||||
if state != "running":
|
||||
self._state.message = note
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
def _finish(
|
||||
self,
|
||||
state: str,
|
||||
*,
|
||||
running: bool,
|
||||
message: str,
|
||||
last_error: str | None = None,
|
||||
) -> None:
|
||||
with self._state_lock:
|
||||
self._state.running = running
|
||||
self._state.state = state
|
||||
self._state.finished_at = datetime.utcnow()
|
||||
self._state.last_error = last_error
|
||||
self._state.message = message
|
||||
self._state.active_chunk_index = 0
|
||||
self._refresh_coverage(lock_held=True)
|
||||
self._refresh_available_bounds(lock_held=True)
|
||||
self._refresh_runtime_metrics(lock_held=True)
|
||||
|
||||
def _resolve_range(self, *, start_date: date | None, end_date: date | None) -> tuple[date, date] | None:
|
||||
today = now_local().date()
|
||||
include_today = self.settings.history.get("include_today_in_sync", False)
|
||||
default_end = today if include_today else today - timedelta(days=1)
|
||||
resolved_end = end_date or default_end
|
||||
|
||||
if start_date is None:
|
||||
coverage = self.repository.coverage()
|
||||
if coverage.last_day:
|
||||
resolved_start = coverage.last_day + timedelta(days=1)
|
||||
else:
|
||||
bootstrap_start = self.settings.history.get("bootstrap_start_date")
|
||||
if bootstrap_start:
|
||||
resolved_start = date.fromisoformat(bootstrap_start)
|
||||
else:
|
||||
available_start, _ = self._available_bounds()
|
||||
resolved_start = available_start or resolved_end
|
||||
else:
|
||||
resolved_start = start_date
|
||||
|
||||
if resolved_start > resolved_end:
|
||||
return None
|
||||
return resolved_start, resolved_end
|
||||
|
||||
def _available_bounds(self) -> tuple[date | None, date | None]:
|
||||
now_utc = datetime.utcnow()
|
||||
cached = self._available_bounds_cache
|
||||
if cached and (now_utc - cached[0]).total_seconds() < 300:
|
||||
return cached[1], cached[2]
|
||||
|
||||
available_start: date | None = None
|
||||
available_end: date | None = None
|
||||
metric = self.catalog.safe_get(self.settings.analytics.get("production_metric_id", "energy_total"))
|
||||
fallback = self.catalog.safe_get(self.settings.analytics.get("fallback_power_metric_id", "ac_power"))
|
||||
source_metric = metric or fallback
|
||||
if source_metric is not None:
|
||||
first_point = self.influx.first_value(source_metric)
|
||||
last_point = self.influx.last_value(source_metric)
|
||||
available_start = first_point.timestamp.astimezone(self.energy.tz).date() if first_point else None
|
||||
available_end = last_point.timestamp.astimezone(self.energy.tz).date() if last_point else None
|
||||
self._available_bounds_cache = (now_utc, available_start, available_end)
|
||||
return available_start, available_end
|
||||
|
||||
def _refresh_coverage(self, *, lock_held: bool = False) -> None:
|
||||
coverage = self.repository.coverage()
|
||||
available_start, available_end = self._available_bounds()
|
||||
if available_start and available_end and available_start <= available_end:
|
||||
available_days = (available_end - available_start).days + 1
|
||||
missing_days = self.repository.count_missing_days(available_start, available_end)
|
||||
coverage.available_days = available_days
|
||||
coverage.missing_days = missing_days
|
||||
imported_in_range = max(available_days - missing_days, 0)
|
||||
coverage.coverage_pct = round((imported_in_range / available_days) * 100, 1) if available_days > 0 else None
|
||||
else:
|
||||
coverage.available_days = 0
|
||||
coverage.missing_days = 0
|
||||
coverage.coverage_pct = None
|
||||
|
||||
if lock_held:
|
||||
self._state.coverage = coverage
|
||||
else:
|
||||
with self._state_lock:
|
||||
self._state.coverage = coverage
|
||||
|
||||
def _refresh_available_bounds(self, *, lock_held: bool = False) -> None:
|
||||
available_start, available_end = self._available_bounds()
|
||||
if lock_held:
|
||||
self._state.available_start_date = available_start
|
||||
self._state.available_end_date = available_end
|
||||
else:
|
||||
with self._state_lock:
|
||||
self._state.available_start_date = available_start
|
||||
self._state.available_end_date = available_end
|
||||
|
||||
def _refresh_runtime_metrics(self, *, lock_held: bool = False) -> None:
|
||||
def apply() -> None:
|
||||
if self._state.started_at is None:
|
||||
self._state.elapsed_seconds = None
|
||||
self._state.estimated_remaining_seconds = None
|
||||
self._state.avg_days_per_minute = None
|
||||
return
|
||||
|
||||
end_reference = datetime.utcnow() if self._state.running or self._state.finished_at is None else self._state.finished_at
|
||||
elapsed_seconds = max((end_reference - self._state.started_at).total_seconds(), 0.0)
|
||||
self._state.elapsed_seconds = round(elapsed_seconds, 1)
|
||||
|
||||
if self._state.processed_days > 0 and elapsed_seconds > 0:
|
||||
avg_days_per_minute = (self._state.processed_days / elapsed_seconds) * 60
|
||||
remaining_days = max(self._state.total_days - self._state.processed_days, 0)
|
||||
estimated_remaining = (remaining_days / self._state.processed_days) * elapsed_seconds
|
||||
self._state.avg_days_per_minute = round(avg_days_per_minute, 2)
|
||||
self._state.estimated_remaining_seconds = round(estimated_remaining, 1) if self._state.running else 0.0
|
||||
else:
|
||||
self._state.avg_days_per_minute = None
|
||||
self._state.estimated_remaining_seconds = None if self._state.running else 0.0
|
||||
|
||||
if lock_held:
|
||||
apply()
|
||||
else:
|
||||
with self._state_lock:
|
||||
apply()
|
||||
|
||||
def _record_event(
|
||||
self,
|
||||
*,
|
||||
level: str,
|
||||
title: str,
|
||||
message: str,
|
||||
day: date | None = None,
|
||||
chunk_index: int | None = None,
|
||||
) -> None:
|
||||
event = HistoricalActivityEvent(
|
||||
timestamp=datetime.utcnow(),
|
||||
level=level,
|
||||
title=title,
|
||||
message=message,
|
||||
day=day,
|
||||
chunk_index=chunk_index,
|
||||
)
|
||||
with self._state_lock:
|
||||
self._state.recent_events.append(event)
|
||||
self._state.recent_events = self._state.recent_events[-self.MAX_RECENT_EVENTS :]
|
||||
|
||||
def _find_chunk_locked(self, chunk_index: int) -> HistoricalChunkProgress | None:
|
||||
for chunk in self._state.recent_chunks:
|
||||
if chunk.chunk_index == chunk_index:
|
||||
return chunk
|
||||
return None
|
||||
|
||||
def _upsert_chunk_locked(self, chunk: HistoricalChunkProgress) -> None:
|
||||
for index, existing in enumerate(self._state.recent_chunks):
|
||||
if existing.chunk_index == chunk.chunk_index:
|
||||
self._state.recent_chunks[index] = chunk
|
||||
break
|
||||
else:
|
||||
self._state.recent_chunks.append(chunk)
|
||||
self._state.recent_chunks = self._state.recent_chunks[-self.MAX_RECENT_CHUNKS :]
|
||||
|
||||
@staticmethod
|
||||
def _date_range(start_day: date, end_day: date) -> Iterable[date]:
|
||||
current = start_day
|
||||
while current <= end_day:
|
||||
yield current
|
||||
current = current + timedelta(days=1)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_historical_sync_service() -> HistoricalSyncService:
|
||||
return HistoricalSyncService()
|
||||
241
backend/app/services/influx_http.py
Normal file
241
backend/app/services/influx_http.py
Normal file
@@ -0,0 +1,241 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import ssl
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import Iterable
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import MetricDefinition, SeriesPoint
|
||||
from app.services.metrics import to_float
|
||||
from app.utils.time import to_utc_iso
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
def _quote_identifier(value: str) -> str:
|
||||
return '"' + value.replace('"', '\\"') + '"'
|
||||
|
||||
|
||||
|
||||
def _quote_literal(value: str) -> str:
|
||||
return "'" + value.replace("\\", "\\\\").replace("'", "\\'") + "'"
|
||||
|
||||
|
||||
class InfluxHTTPService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
config = self.settings.influx
|
||||
return f"{config['scheme']}://{config['host']}:{config['port']}"
|
||||
|
||||
def latest_values(self, metrics: Iterable[MetricDefinition]) -> dict[str, dict]:
|
||||
grouped: dict[str, list[MetricDefinition]] = defaultdict(list)
|
||||
for metric in metrics:
|
||||
grouped[metric.measurement].append(metric)
|
||||
|
||||
payload: dict[str, dict] = {}
|
||||
for measurement, measurement_metrics in grouped.items():
|
||||
conditions = " OR ".join(
|
||||
f'("entity_id" = {_quote_literal(metric.entity_id)})'
|
||||
for metric in measurement_metrics
|
||||
)
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(measurement)} '
|
||||
f'WHERE {conditions} '
|
||||
f'GROUP BY "entity_id"'
|
||||
)
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
entity_id = (series.get("tags") or {}).get("entity_id")
|
||||
if not entity_id:
|
||||
continue
|
||||
metric = next((item for item in measurement_metrics if item.entity_id == entity_id), None)
|
||||
if metric is None:
|
||||
continue
|
||||
row = self._row_from_series(series)
|
||||
payload[metric.id] = {
|
||||
"value": row.get("value"),
|
||||
"timestamp": _parse_time(row.get("time")),
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("Influx latest_values error for %s: %s", measurement, exc)
|
||||
return payload
|
||||
|
||||
def latest_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self._single_value(
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)}'
|
||||
)
|
||||
|
||||
def first_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self._single_value(
|
||||
f'SELECT FIRST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)}'
|
||||
)
|
||||
|
||||
def last_value(self, metric: MetricDefinition) -> SeriesPoint | None:
|
||||
return self.latest_value(metric)
|
||||
|
||||
def gauge_history(
|
||||
self,
|
||||
metric: MetricDefinition,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
interval: str,
|
||||
aggregate: str = "mean",
|
||||
) -> list[SeriesPoint]:
|
||||
query = (
|
||||
f'SELECT {aggregate}("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time >= {_quote_literal(to_utc_iso(start))} '
|
||||
f'AND time <= {_quote_literal(to_utc_iso(end))} '
|
||||
f'GROUP BY time({interval}) fill(null)'
|
||||
)
|
||||
points: list[SeriesPoint] = []
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
for row in self._rows_from_series(series):
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
if timestamp is None:
|
||||
continue
|
||||
points.append(SeriesPoint(timestamp=timestamp, value=to_float(row.get("value"))))
|
||||
except Exception as exc:
|
||||
logger.warning("Influx gauge_history error for %s: %s", metric.id, exc)
|
||||
return points
|
||||
|
||||
def grouped_last_series(
|
||||
self,
|
||||
metric: MetricDefinition,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
interval: str,
|
||||
) -> list[SeriesPoint]:
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time >= {_quote_literal(to_utc_iso(start))} '
|
||||
f'AND time <= {_quote_literal(to_utc_iso(end))} '
|
||||
f'GROUP BY time({interval}) fill(null)'
|
||||
)
|
||||
points: list[SeriesPoint] = []
|
||||
try:
|
||||
for series in self._execute(query):
|
||||
for row in self._rows_from_series(series):
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
if timestamp is None:
|
||||
continue
|
||||
points.append(SeriesPoint(timestamp=timestamp, value=to_float(row.get("value"))))
|
||||
except Exception as exc:
|
||||
logger.warning("Influx grouped_last_series error for %s: %s", metric.id, exc)
|
||||
return points
|
||||
|
||||
def last_before(self, metric: MetricDefinition, moment: datetime) -> SeriesPoint | None:
|
||||
query = (
|
||||
f'SELECT LAST("value") AS value '
|
||||
f'FROM {_quote_identifier(metric.measurement)} '
|
||||
f'WHERE "entity_id" = {_quote_literal(metric.entity_id)} '
|
||||
f'AND time < {_quote_literal(to_utc_iso(moment))}'
|
||||
)
|
||||
try:
|
||||
series = self._execute(query)
|
||||
if not series:
|
||||
return None
|
||||
row = self._row_from_series(series[0])
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
value = to_float(row.get("value"))
|
||||
if timestamp is None or value is None:
|
||||
return None
|
||||
return SeriesPoint(timestamp=timestamp, value=value)
|
||||
except Exception as exc:
|
||||
logger.warning("Influx last_before error for %s: %s", metric.id, exc)
|
||||
return None
|
||||
|
||||
def _single_value(self, query: str) -> SeriesPoint | None:
|
||||
try:
|
||||
series = self._execute(query)
|
||||
if not series:
|
||||
return None
|
||||
row = self._row_from_series(series[0])
|
||||
timestamp = _parse_time(row.get("time"))
|
||||
value = to_float(row.get("value"))
|
||||
if timestamp is None or value is None:
|
||||
return None
|
||||
return SeriesPoint(timestamp=timestamp, value=value)
|
||||
except Exception as exc:
|
||||
logger.warning("Influx single value query error: %s", exc)
|
||||
return None
|
||||
|
||||
def _execute(self, query: str) -> list[dict]:
|
||||
params = {
|
||||
"db": self.settings.influx["database"],
|
||||
"q": query,
|
||||
}
|
||||
url = f"{self.base_url}/query?{urllib.parse.urlencode(params)}"
|
||||
headers = {"Accept": "application/json"}
|
||||
username = self.settings.influx.get("username") or ""
|
||||
password = self.settings.influx.get("password") or ""
|
||||
if username:
|
||||
token = base64.b64encode(f"{username}:{password}".encode("utf-8")).decode("ascii")
|
||||
headers["Authorization"] = f"Basic {token}"
|
||||
|
||||
request = urllib.request.Request(url, headers=headers, method="GET")
|
||||
verify_ssl = self.settings.influx.get("verify_ssl", False)
|
||||
timeout = self.settings.influx.get("timeout_seconds", 15)
|
||||
context = None
|
||||
if self.settings.influx.get("scheme") == "https" and not verify_ssl:
|
||||
context = ssl._create_unverified_context()
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(request, timeout=timeout, context=context) as response:
|
||||
payload = json.loads(response.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as exc:
|
||||
body = exc.read().decode("utf-8", errors="ignore")
|
||||
raise RuntimeError(f"Influx HTTP {exc.code}: {body}") from exc
|
||||
except urllib.error.URLError as exc:
|
||||
raise RuntimeError(f"Influx connection error: {exc}") from exc
|
||||
|
||||
results = payload.get("results") or []
|
||||
if not results:
|
||||
return []
|
||||
result = results[0]
|
||||
if "error" in result:
|
||||
raise RuntimeError(result["error"])
|
||||
return result.get("series") or []
|
||||
|
||||
@staticmethod
|
||||
def _rows_from_series(series: dict) -> list[dict]:
|
||||
columns = series.get("columns") or []
|
||||
rows = []
|
||||
for values in series.get("values") or []:
|
||||
rows.append(dict(zip(columns, values)))
|
||||
return rows
|
||||
|
||||
@classmethod
|
||||
def _row_from_series(cls, series: dict) -> dict:
|
||||
rows = cls._rows_from_series(series)
|
||||
return rows[0] if rows else {}
|
||||
|
||||
|
||||
|
||||
def _parse_time(value: str | None) -> datetime | None:
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
123
backend/app/services/kiosk_settings.py
Normal file
123
backend/app/services/kiosk_settings.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from flask import session
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.storage.kiosk_settings import SQLiteKioskSettingsRepository
|
||||
|
||||
|
||||
VALID_MODES = {"public", "private"}
|
||||
DEFAULT_WIDGETS = ["hero", "history", "strings", "status", "production", "comparison", "importStatus"]
|
||||
VALID_WIDGETS = {"hero", "quickMetrics", "history", "status", "strings", "production", "comparison", "distribution", "importStatus"}
|
||||
VALID_REALTIME_RANGES = {"today", "yesterday", "6h", "12h", "24h", "48h", "7d"}
|
||||
VALID_ANALYTICS_RANGES = {"today", "yesterday", "7d", "30d", "90d", "365d", "custom"}
|
||||
|
||||
|
||||
class KioskSettingsService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.repository = SQLiteKioskSettingsRepository(self.settings.storage["sqlite_path"])
|
||||
|
||||
def get(self, mode: str) -> dict[str, Any]:
|
||||
normalized_mode = self._normalize_mode(mode)
|
||||
stored = self.repository.get(normalized_mode)
|
||||
if stored is None:
|
||||
return self._default_payload(normalized_mode)
|
||||
return self._sanitize_payload(normalized_mode, stored, persist_if_changed=False)
|
||||
|
||||
def update(self, mode: str, payload: dict[str, Any], updated_by: str | None = None) -> dict[str, Any]:
|
||||
normalized_mode = self._normalize_mode(mode)
|
||||
merged = {**self.get(normalized_mode), **(payload or {})}
|
||||
cleaned = self._sanitize_payload(normalized_mode, merged, persist_if_changed=False)
|
||||
return self.repository.upsert(normalized_mode, cleaned, updated_by=updated_by)
|
||||
|
||||
def update_from_session(self, mode: str, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
updated_by = session.get("auth_user")
|
||||
return self.update(mode, payload, updated_by=updated_by)
|
||||
|
||||
def _default_payload(self, mode: str) -> dict[str, Any]:
|
||||
return {
|
||||
"mode": mode,
|
||||
"widgets": list(DEFAULT_WIDGETS),
|
||||
"realtime_range": self._default_realtime_range(),
|
||||
"analytics_range": self._default_analytics_range(),
|
||||
"analytics_bucket": self._default_analytics_bucket(),
|
||||
"compare_mode": self._default_compare_mode(),
|
||||
"updated_at": None,
|
||||
"updated_by": None,
|
||||
}
|
||||
|
||||
def _sanitize_payload(self, mode: str, payload: dict[str, Any], persist_if_changed: bool = False) -> dict[str, Any]:
|
||||
cleaned = {
|
||||
"mode": mode,
|
||||
"widgets": self._normalize_widgets(payload.get("widgets")),
|
||||
"realtime_range": self._normalize_realtime_range(payload.get("realtime_range")),
|
||||
"analytics_range": self._normalize_analytics_range(payload.get("analytics_range")),
|
||||
"analytics_bucket": self._normalize_bucket(payload.get("analytics_bucket")),
|
||||
"compare_mode": self._normalize_compare_mode(payload.get("compare_mode")),
|
||||
"updated_at": payload.get("updated_at"),
|
||||
"updated_by": payload.get("updated_by"),
|
||||
}
|
||||
if persist_if_changed:
|
||||
return self.repository.upsert(mode, cleaned, updated_by=cleaned.get("updated_by"))
|
||||
return cleaned
|
||||
|
||||
def _normalize_mode(self, mode: str) -> str:
|
||||
normalized = (mode or "").strip().lower()
|
||||
if normalized not in VALID_MODES:
|
||||
raise ValueError("Mode musi byc jednym z: public, private")
|
||||
return normalized
|
||||
|
||||
def _normalize_widgets(self, widgets: Any) -> list[str]:
|
||||
if not isinstance(widgets, list):
|
||||
return list(DEFAULT_WIDGETS)
|
||||
normalized: list[str] = []
|
||||
for item in widgets:
|
||||
widget = str(item or "").strip()
|
||||
if widget in VALID_WIDGETS and widget not in normalized:
|
||||
normalized.append(widget)
|
||||
return normalized or list(DEFAULT_WIDGETS)
|
||||
|
||||
def _normalize_realtime_range(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_realtime_range()).strip()
|
||||
return normalized if normalized in VALID_REALTIME_RANGES else self._default_realtime_range()
|
||||
|
||||
def _normalize_analytics_range(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_analytics_range()).strip()
|
||||
return normalized if normalized in VALID_ANALYTICS_RANGES else self._default_analytics_range()
|
||||
|
||||
def _normalize_bucket(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_analytics_bucket()).strip()
|
||||
return normalized if normalized in self.settings.analytics["bucket_labels"] else self._default_analytics_bucket()
|
||||
|
||||
def _normalize_compare_mode(self, value: Any) -> str:
|
||||
normalized = str(value or self._default_compare_mode()).strip()
|
||||
return normalized if normalized in self.settings.analytics["compare_modes"] else self._default_compare_mode()
|
||||
|
||||
def _default_realtime_range(self) -> str:
|
||||
raw = str(self.settings.realtime.get("history_default_range", "12h"))
|
||||
return raw if raw in VALID_REALTIME_RANGES else "12h"
|
||||
|
||||
def _default_analytics_range(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_range", "30d"))
|
||||
return raw if raw in VALID_ANALYTICS_RANGES else "30d"
|
||||
|
||||
def _default_analytics_bucket(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_bucket", "day"))
|
||||
return raw if raw in self.settings.analytics["bucket_labels"] else "day"
|
||||
|
||||
def _default_compare_mode(self) -> str:
|
||||
raw = str(self.settings.analytics.get("default_compare", "none"))
|
||||
return raw if raw in self.settings.analytics["compare_modes"] else "none"
|
||||
|
||||
|
||||
_kiosk_settings_service: KioskSettingsService | None = None
|
||||
|
||||
|
||||
def get_kiosk_settings_service() -> KioskSettingsService:
|
||||
global _kiosk_settings_service
|
||||
if _kiosk_settings_service is None:
|
||||
_kiosk_settings_service = KioskSettingsService()
|
||||
return _kiosk_settings_service
|
||||
99
backend/app/services/metrics.py
Normal file
99
backend/app/services/metrics.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.definitions import MetricDefinition, MetricValue
|
||||
|
||||
|
||||
|
||||
def to_float(value: float | str | None) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (float, int)):
|
||||
return float(value)
|
||||
try:
|
||||
return float(str(value).replace(",", "."))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def round_value(value: float | None, precision: int) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
return round(value, precision)
|
||||
|
||||
|
||||
|
||||
def compare_delta_pct(current: float | None, previous: float | None) -> float | None:
|
||||
if current is None or previous in (None, 0):
|
||||
return None
|
||||
return round(((current - previous) / previous) * 100.0, 2)
|
||||
|
||||
|
||||
|
||||
def build_status(metric_id: str, numeric: float | None) -> str:
|
||||
if numeric is None:
|
||||
return "neutral"
|
||||
|
||||
if metric_id == "inverter_temp":
|
||||
if numeric < 55:
|
||||
return "ok"
|
||||
if numeric < 70:
|
||||
return "warn"
|
||||
return "critical"
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
|
||||
def metric_value(
|
||||
metric: MetricDefinition,
|
||||
value: float | str | None,
|
||||
*,
|
||||
timestamp: datetime | None = None,
|
||||
) -> MetricValue:
|
||||
rendered = value
|
||||
numeric = None
|
||||
if metric.kind != "text":
|
||||
numeric = to_float(value)
|
||||
rendered = round_value(numeric, metric.precision)
|
||||
|
||||
return MetricValue(
|
||||
metric_id=metric.id,
|
||||
label=metric.label,
|
||||
unit=metric.unit,
|
||||
value=rendered,
|
||||
timestamp=timestamp,
|
||||
precision=metric.precision,
|
||||
kind=metric.kind,
|
||||
status=build_status(metric.id, numeric),
|
||||
)
|
||||
|
||||
|
||||
|
||||
def custom_metric_value(
|
||||
metric_id: str,
|
||||
label: str,
|
||||
value: float | str | None,
|
||||
*,
|
||||
unit: str = "",
|
||||
precision: int = 2,
|
||||
timestamp: datetime | None = None,
|
||||
status: str = "neutral",
|
||||
kind: str = "gauge",
|
||||
) -> MetricValue:
|
||||
rendered = value
|
||||
if kind != "text":
|
||||
numeric = to_float(value)
|
||||
rendered = round_value(numeric, precision)
|
||||
return MetricValue(
|
||||
metric_id=metric_id,
|
||||
label=label,
|
||||
unit=unit,
|
||||
value=rendered,
|
||||
timestamp=timestamp,
|
||||
precision=precision,
|
||||
kind=kind,
|
||||
status=status,
|
||||
)
|
||||
231
backend/app/services/realtime.py
Normal file
231
backend/app/services/realtime.py
Normal file
@@ -0,0 +1,231 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.core_settings import AppSettings, get_settings
|
||||
from app.models.definitions import HeroCard, SnapshotGroupRow, SnapshotPayload
|
||||
from app.services.catalog import MetricCatalog, get_catalog
|
||||
from app.services.energy import EnergyService
|
||||
from app.services.influx_http import InfluxHTTPService
|
||||
from app.services.metrics import compare_delta_pct, custom_metric_value, metric_value, to_float
|
||||
from app.utils.time import choose_power_interval, now_local, resolve_window, start_of_local_day
|
||||
|
||||
|
||||
class RealtimeService:
|
||||
def __init__(
|
||||
self,
|
||||
settings: AppSettings | None = None,
|
||||
catalog: MetricCatalog | None = None,
|
||||
influx: InfluxHTTPService | None = None,
|
||||
energy: EnergyService | None = None,
|
||||
) -> None:
|
||||
self.settings = settings or get_settings()
|
||||
self.catalog = catalog or get_catalog()
|
||||
self.influx = influx or InfluxHTTPService(self.settings)
|
||||
self.energy = energy or EnergyService(self.settings, self.catalog, self.influx)
|
||||
|
||||
def snapshot(self) -> SnapshotPayload:
|
||||
now = now_local()
|
||||
today_start = start_of_local_day(now)
|
||||
yesterday_start = today_start - timedelta(days=1)
|
||||
|
||||
metric_ids = {"ac_power", "energy_total", "inverter_temp"}
|
||||
for group in self.settings.strings:
|
||||
metric_ids.update(group.get("metrics", {}).values())
|
||||
|
||||
metrics = [self.catalog.get(metric_id) for metric_id in metric_ids if self.catalog.safe_get(metric_id)]
|
||||
latest = self.influx.latest_values(metrics)
|
||||
|
||||
ac_power = to_float(_value(latest, "ac_power"))
|
||||
total_dc_power = round(
|
||||
sum(
|
||||
to_float(_value(latest, group.get("metrics", {}).get("power", ""))) or 0.0
|
||||
for group in self.settings.strings
|
||||
),
|
||||
0,
|
||||
)
|
||||
energy_today = self.energy.total_for_window(today_start, now)
|
||||
energy_yesterday = self.energy.total_for_window(yesterday_start, today_start)
|
||||
total_energy = to_float(_value(latest, "energy_total"))
|
||||
inverter_temp = to_float(_value(latest, "inverter_temp"))
|
||||
|
||||
hero_cards = [
|
||||
self._hero_card("ac_power", ac_power, subtitle="Aktualna moc AC"),
|
||||
self._hero_card("dc_power_total", total_dc_power, label="Moc DC laczna", unit="W", subtitle="Suma stringow DC"),
|
||||
self._hero_card("energy_today", energy_today, label="Energia dzis", unit="kWh", subtitle="Liczona z danych Influx"),
|
||||
self._hero_card("energy_total", total_energy, label="Energia laczna", unit="kWh", subtitle="Licznik calkowity"),
|
||||
]
|
||||
if inverter_temp is not None:
|
||||
hero_cards.append(self._hero_card("inverter_temp", inverter_temp, label="Temp. falownika", unit="°C", subtitle="Sensor opcjonalny"))
|
||||
|
||||
kpis = {
|
||||
"energy_today": custom_metric_value("energy_today", "Energia dzis", energy_today, unit="kWh", precision=2, status="ok"),
|
||||
"energy_yesterday": custom_metric_value("energy_yesterday", "Energia wczoraj", energy_yesterday, unit="kWh", precision=2, status="ok"),
|
||||
"energy_total": custom_metric_value(
|
||||
"energy_total",
|
||||
"Energia laczna",
|
||||
total_energy,
|
||||
unit="kWh",
|
||||
precision=2,
|
||||
timestamp=_timestamp(latest, "energy_total"),
|
||||
status="ok",
|
||||
),
|
||||
"dc_power_total": custom_metric_value("dc_power_total", "Moc DC laczna", total_dc_power, unit="W", precision=0, status="ok"),
|
||||
}
|
||||
|
||||
comparison = compare_delta_pct(energy_today, energy_yesterday)
|
||||
if comparison is not None:
|
||||
kpis["today_vs_yesterday"] = custom_metric_value(
|
||||
"today_vs_yesterday",
|
||||
"Dzis vs wczoraj",
|
||||
comparison,
|
||||
unit="%",
|
||||
precision=2,
|
||||
status="ok" if comparison >= 0 else "warn",
|
||||
)
|
||||
|
||||
strings = self._build_string_rows(latest)
|
||||
status = []
|
||||
if self.catalog.safe_get("inverter_temp"):
|
||||
status.append(
|
||||
metric_value(
|
||||
self.catalog.get("inverter_temp"),
|
||||
inverter_temp,
|
||||
timestamp=_timestamp(latest, "inverter_temp"),
|
||||
)
|
||||
)
|
||||
status.append(
|
||||
custom_metric_value(
|
||||
"data_refresh",
|
||||
"Ostatni odczyt energii",
|
||||
_timestamp(latest, "energy_total").isoformat() if _timestamp(latest, "energy_total") else None,
|
||||
status="ok" if _timestamp(latest, "energy_total") else "neutral",
|
||||
kind="text",
|
||||
)
|
||||
)
|
||||
|
||||
updated_at = _max_timestamp(latest.values())
|
||||
return SnapshotPayload(
|
||||
updated_at=updated_at,
|
||||
hero_cards=hero_cards,
|
||||
kpis=kpis,
|
||||
strings=strings,
|
||||
phases=[],
|
||||
status=status,
|
||||
faults=[],
|
||||
)
|
||||
|
||||
def history(self, range_key: str | None = None, start: str | None = None, end: str | None = None, metric_ids: list[str] | None = None) -> dict:
|
||||
window = resolve_window(range_key=range_key or self.settings.realtime["history_default_range"], start=start, end=end)
|
||||
interval = choose_power_interval(window.start, window.end)
|
||||
series = []
|
||||
|
||||
selected = set(metric_ids or [])
|
||||
|
||||
def include(metric_id: str) -> bool:
|
||||
return not selected or metric_id in selected
|
||||
|
||||
ac_metric = self.catalog.safe_get("ac_power")
|
||||
if ac_metric is not None and include("ac_power"):
|
||||
series.append(
|
||||
{
|
||||
"metric_id": ac_metric.id,
|
||||
"label": ac_metric.label,
|
||||
"unit": ac_metric.unit,
|
||||
"points": self.influx.gauge_history(ac_metric, window.start, window.end, interval=interval, aggregate="mean"),
|
||||
}
|
||||
)
|
||||
|
||||
for group in self.settings.strings:
|
||||
for slot, metric_id in group.get("metrics", {}).items():
|
||||
if not metric_id or not self.catalog.safe_get(metric_id) or not include(metric_id):
|
||||
continue
|
||||
metric = self.catalog.get(metric_id)
|
||||
series.append(
|
||||
{
|
||||
"metric_id": metric.id,
|
||||
"label": metric.label if slot != "power" else group["label"],
|
||||
"unit": metric.unit,
|
||||
"points": self.influx.gauge_history(metric, window.start, window.end, interval=interval, aggregate="mean"),
|
||||
}
|
||||
)
|
||||
|
||||
temp_metric = self.catalog.safe_get("inverter_temp")
|
||||
if temp_metric is not None and include("inverter_temp"):
|
||||
temp_points = self.influx.gauge_history(temp_metric, window.start, window.end, interval=interval, aggregate="mean")
|
||||
last_value = None
|
||||
filled = []
|
||||
for point in temp_points:
|
||||
value = point.value if point.value is not None else last_value
|
||||
if point.value is not None:
|
||||
last_value = point.value
|
||||
filled.append({"timestamp": point.timestamp, "value": value})
|
||||
series.append(
|
||||
{
|
||||
"metric_id": temp_metric.id,
|
||||
"label": temp_metric.label,
|
||||
"unit": temp_metric.unit,
|
||||
"points": filled,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"range_key": window.key,
|
||||
"start": window.start,
|
||||
"end": window.end,
|
||||
"series": series,
|
||||
}
|
||||
|
||||
def _hero_card(self, metric_id: str, value, *, label: str | None = None, unit: str | None = None, subtitle: str = "") -> HeroCard:
|
||||
accent = "slate"
|
||||
numeric = to_float(value)
|
||||
if metric_id == "inverter_temp":
|
||||
if numeric is not None and numeric < 55:
|
||||
accent = "emerald"
|
||||
elif numeric is not None and numeric < 70:
|
||||
accent = "amber"
|
||||
elif numeric is not None:
|
||||
accent = "rose"
|
||||
else:
|
||||
accent = "emerald" if numeric not in (None, 0) else "slate"
|
||||
|
||||
resolved_label = label or (self.catalog.get(metric_id).label if self.catalog.safe_get(metric_id) else metric_id)
|
||||
resolved_unit = unit or (self.catalog.get(metric_id).unit if self.catalog.safe_get(metric_id) else "")
|
||||
return HeroCard(
|
||||
metric_id=metric_id,
|
||||
label=resolved_label,
|
||||
value=value,
|
||||
unit=resolved_unit,
|
||||
accent=accent,
|
||||
subtitle=subtitle,
|
||||
)
|
||||
|
||||
def _build_string_rows(self, latest: dict) -> list[SnapshotGroupRow]:
|
||||
rows = []
|
||||
for group in self.settings.strings:
|
||||
values = {}
|
||||
for slot, metric_id in group.get("metrics", {}).items():
|
||||
metric = self.catalog.safe_get(metric_id)
|
||||
if metric is None:
|
||||
continue
|
||||
values[slot] = metric_value(metric, _value(latest, metric_id), timestamp=_timestamp(latest, metric_id))
|
||||
rows.append(SnapshotGroupRow(id=group["id"], label=group["label"], values=values, meta={}))
|
||||
return rows
|
||||
|
||||
|
||||
|
||||
def _value(latest: dict, metric_id: str):
|
||||
payload = latest.get(metric_id) or {}
|
||||
return payload.get("value")
|
||||
|
||||
|
||||
|
||||
def _timestamp(latest: dict, metric_id: str):
|
||||
payload = latest.get(metric_id) or {}
|
||||
return payload.get("timestamp")
|
||||
|
||||
|
||||
|
||||
def _max_timestamp(items) -> datetime | None:
|
||||
timestamps = [item.get("timestamp") for item in items if item.get("timestamp") is not None]
|
||||
return max(timestamps) if timestamps else None
|
||||
Reference in New Issue
Block a user