first commit
This commit is contained in:
4
backend/app/storage/__init__.py
Normal file
4
backend/app/storage/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .sqlite_repository import SQLiteEnergyRepository
|
||||
from .auth_users import AuthUser, SQLiteAuthUserRepository
|
||||
|
||||
__all__ = ["SQLiteEnergyRepository", "AuthUser", "SQLiteAuthUserRepository"]
|
||||
132
backend/app/storage/auth_users.py
Normal file
132
backend/app/storage/auth_users.py
Normal file
@@ -0,0 +1,132 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AuthUser:
|
||||
username: str
|
||||
password_hash: str
|
||||
role: str
|
||||
display_name: str
|
||||
is_active: bool = True
|
||||
created_at: datetime | None = None
|
||||
updated_at: datetime | None = None
|
||||
|
||||
|
||||
class SQLiteAuthUserRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS auth_users (
|
||||
username TEXT PRIMARY KEY,
|
||||
password_hash TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
display_name TEXT NOT NULL,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_auth_users_role ON auth_users(role)"
|
||||
)
|
||||
|
||||
def get_by_username(self, username: str) -> AuthUser | None:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT username, password_hash, role, display_name, is_active, created_at, updated_at
|
||||
FROM auth_users
|
||||
WHERE username = ?
|
||||
LIMIT 1
|
||||
""",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return AuthUser(
|
||||
username=row["username"],
|
||||
password_hash=row["password_hash"],
|
||||
role=row["role"],
|
||||
display_name=row["display_name"],
|
||||
is_active=bool(row["is_active"]),
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
|
||||
def upsert_user(self, *, username: str, password_hash: str, role: str, display_name: str, is_active: bool = True) -> AuthUser:
|
||||
now = datetime.utcnow().isoformat()
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO auth_users (username, password_hash, role, display_name, is_active, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(username) DO UPDATE SET
|
||||
password_hash = excluded.password_hash,
|
||||
role = excluded.role,
|
||||
display_name = excluded.display_name,
|
||||
is_active = excluded.is_active,
|
||||
updated_at = excluded.updated_at
|
||||
""",
|
||||
(username, password_hash, role, display_name, 1 if is_active else 0, now, now),
|
||||
)
|
||||
return self.get_by_username(username) # type: ignore[return-value]
|
||||
|
||||
def update_password(self, username: str, password_hash: str) -> AuthUser | None:
|
||||
now = datetime.utcnow().isoformat()
|
||||
with self.connect() as conn:
|
||||
cursor = conn.execute(
|
||||
"UPDATE auth_users SET password_hash = ?, updated_at = ? WHERE username = ?",
|
||||
(password_hash, now, username),
|
||||
)
|
||||
if cursor.rowcount == 0:
|
||||
return None
|
||||
return self.get_by_username(username)
|
||||
|
||||
|
||||
def list_users(self) -> list[AuthUser]:
|
||||
with self.connect() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT username, password_hash, role, display_name, is_active, created_at, updated_at
|
||||
FROM auth_users
|
||||
ORDER BY role DESC, username ASC
|
||||
"""
|
||||
).fetchall()
|
||||
return [
|
||||
AuthUser(
|
||||
username=row["username"],
|
||||
password_hash=row["password_hash"],
|
||||
role=row["role"],
|
||||
display_name=row["display_name"],
|
||||
is_active=bool(row["is_active"]),
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
87
backend/app/storage/kiosk_settings.py
Normal file
87
backend/app/storage/kiosk_settings.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator
|
||||
|
||||
|
||||
@dataclass
|
||||
class KioskSettingsRecord:
|
||||
mode: str
|
||||
widgets: list[str]
|
||||
realtime_range: str
|
||||
analytics_range: str
|
||||
analytics_bucket: str
|
||||
compare_mode: str
|
||||
updated_at: datetime | None = None
|
||||
updated_by: str | None = None
|
||||
|
||||
|
||||
class SQLiteKioskSettingsRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS kiosk_settings (
|
||||
mode TEXT PRIMARY KEY,
|
||||
payload_json TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
updated_by TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
def get(self, mode: str) -> dict[str, Any] | None:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT mode, payload_json, updated_at, updated_by FROM kiosk_settings WHERE mode = ? LIMIT 1",
|
||||
(mode,),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
payload = json.loads(row["payload_json"])
|
||||
payload["mode"] = row["mode"]
|
||||
payload["updated_at"] = row["updated_at"]
|
||||
payload["updated_by"] = row["updated_by"]
|
||||
return payload
|
||||
|
||||
def upsert(self, mode: str, payload: dict[str, Any], updated_by: str | None = None) -> dict[str, Any]:
|
||||
now = datetime.utcnow().isoformat()
|
||||
stored_payload = dict(payload)
|
||||
stored_payload.pop("mode", None)
|
||||
stored_payload.pop("updated_at", None)
|
||||
stored_payload.pop("updated_by", None)
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO kiosk_settings (mode, payload_json, updated_at, updated_by)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(mode) DO UPDATE SET
|
||||
payload_json = excluded.payload_json,
|
||||
updated_at = excluded.updated_at,
|
||||
updated_by = excluded.updated_by
|
||||
""",
|
||||
(mode, json.dumps(stored_payload, ensure_ascii=False), now, updated_by),
|
||||
)
|
||||
return self.get(mode) or {"mode": mode, **stored_payload, "updated_at": now, "updated_by": updated_by}
|
||||
131
backend/app/storage/sqlite_repository.py
Normal file
131
backend/app/storage/sqlite_repository.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, date
|
||||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
from app.models import DailyEnergyRecord, HistoricalCoverage
|
||||
|
||||
|
||||
class SQLiteEnergyRepository:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.ensure_schema()
|
||||
|
||||
@contextmanager
|
||||
def connect(self) -> Iterator[sqlite3.Connection]:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
yield conn
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def ensure_schema(self) -> None:
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS daily_energy (
|
||||
day TEXT PRIMARY KEY,
|
||||
energy_kwh REAL NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
samples_count INTEGER NOT NULL DEFAULT 0,
|
||||
imported_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_daily_energy_imported_at ON daily_energy(imported_at)"
|
||||
)
|
||||
|
||||
def has_day(self, day: date) -> bool:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute("SELECT 1 FROM daily_energy WHERE day = ? LIMIT 1", (day.isoformat(),)).fetchone()
|
||||
return row is not None
|
||||
|
||||
def upsert_daily_energy(self, record: DailyEnergyRecord) -> None:
|
||||
imported_at = record.imported_at or datetime.utcnow()
|
||||
with self.connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO daily_energy (day, energy_kwh, source, samples_count, imported_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(day) DO UPDATE SET
|
||||
energy_kwh = excluded.energy_kwh,
|
||||
source = excluded.source,
|
||||
samples_count = excluded.samples_count,
|
||||
imported_at = excluded.imported_at
|
||||
""",
|
||||
(
|
||||
record.day.isoformat(),
|
||||
float(record.energy_kwh),
|
||||
record.source,
|
||||
int(record.samples_count),
|
||||
imported_at.isoformat(),
|
||||
),
|
||||
)
|
||||
|
||||
def fetch_daily_energy(self, start_day: date, end_day: date) -> dict[date, DailyEnergyRecord]:
|
||||
with self.connect() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT day, energy_kwh, source, samples_count, imported_at
|
||||
FROM daily_energy
|
||||
WHERE day >= ? AND day <= ?
|
||||
ORDER BY day ASC
|
||||
""",
|
||||
(start_day.isoformat(), end_day.isoformat()),
|
||||
).fetchall()
|
||||
|
||||
payload: dict[date, DailyEnergyRecord] = {}
|
||||
for row in rows:
|
||||
payload[date.fromisoformat(row["day"])] = DailyEnergyRecord(
|
||||
day=date.fromisoformat(row["day"]),
|
||||
energy_kwh=float(row["energy_kwh"]),
|
||||
source=row["source"],
|
||||
samples_count=int(row["samples_count"]),
|
||||
imported_at=datetime.fromisoformat(row["imported_at"]),
|
||||
)
|
||||
return payload
|
||||
|
||||
def coverage(self) -> HistoricalCoverage:
|
||||
with self.connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS imported_days,
|
||||
MIN(day) AS first_day,
|
||||
MAX(day) AS last_day,
|
||||
COALESCE(SUM(energy_kwh), 0) AS total_energy_kwh
|
||||
FROM daily_energy
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
if row is None:
|
||||
return HistoricalCoverage()
|
||||
|
||||
return HistoricalCoverage(
|
||||
imported_days=int(row["imported_days"] or 0),
|
||||
first_day=date.fromisoformat(row["first_day"]) if row["first_day"] else None,
|
||||
last_day=date.fromisoformat(row["last_day"]) if row["last_day"] else None,
|
||||
total_energy_kwh=round(float(row["total_energy_kwh"] or 0.0), 2),
|
||||
)
|
||||
|
||||
def latest_day(self) -> date | None:
|
||||
return self.coverage().last_day
|
||||
|
||||
def count_missing_days(self, start_day: date, end_day: date) -> int:
|
||||
existing = self.fetch_daily_energy(start_day, end_day)
|
||||
current = start_day
|
||||
missing = 0
|
||||
while current <= end_day:
|
||||
if current not in existing:
|
||||
missing += 1
|
||||
current = current.fromordinal(current.toordinal() + 1)
|
||||
return missing
|
||||
Reference in New Issue
Block a user