poprawki i zmiany ux

This commit is contained in:
Mateusz Gruszczyński
2026-03-26 09:30:39 +01:00
parent fd0f645251
commit 138059945e
28 changed files with 1000 additions and 225 deletions

View File

@@ -82,7 +82,7 @@ class HistoricalSyncService:
with self._state_lock:
self._state.running = False
self._state.state = "idle"
self._state.message = "Brak brakujacych dni do importu."
self._state.message = "There are no missing days to import."
self._state.finished_at = datetime.utcnow()
self._refresh_coverage(lock_held=True)
self._refresh_available_bounds(lock_held=True)
@@ -92,7 +92,7 @@ class HistoricalSyncService:
resolved_start, resolved_end = resolved
total_days = (resolved_end - resolved_start).days + 1
total_chunks = max(ceil(total_days / chunk_days), 1)
start_message = "Start importu archiwalnego" if not auto else "Start automatycznej synchronizacji archiwum"
start_message = "Historical import started" if not auto else "Automatic historical sync started"
with self._state_lock:
if self._worker and self._worker.is_alive():
@@ -138,18 +138,18 @@ class HistoricalSyncService:
self._record_event(
level="info",
title="Uruchomiono zadanie",
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
title="Job started",
message=f"Range {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk size {chunk_days} days",
)
return self.status()
def cancel(self) -> HistoricalImportStatus:
self._cancel_event.set()
with self._state_lock:
self._state.message = "Anulowanie zadania..."
self._state.message = "Cancelling job..."
self._refresh_runtime_metrics(lock_held=True)
snapshot = copy.deepcopy(self._state)
self._record_event(level="warn", title="Anulowanie", message="Uzytkownik poprosil o zatrzymanie zadania.")
self._record_event(level="warn", title="Cancellation requested", message="The user requested the job to stop.")
return snapshot
def run_blocking(
@@ -185,8 +185,8 @@ class HistoricalSyncService:
)
self._record_event(
level="info",
title="Uruchomiono zadanie",
message=f"Zakres {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk {chunk_days} dni",
title="Job started",
message=f"Range {resolved_start.isoformat()} -> {resolved_end.isoformat()}, chunk size {chunk_days} days",
)
self._run_worker(
start_date=resolved_start,
@@ -239,8 +239,8 @@ class HistoricalSyncService:
chunk_start = start_date
while chunk_start <= end_date:
if self._cancel_event.is_set():
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
self._record_event(level="warn", title="Cancelled", message="Historical import was cancelled by the user.")
self._finish("cancelled", running=False, message="Historical import was cancelled by the user.")
return
chunk_index += 1
@@ -259,10 +259,10 @@ class HistoricalSyncService:
skipped_days=skipped,
energy_kwh=energy_kwh,
state="cancelled",
note="Chunk zatrzymany podczas przetwarzania",
note="Chunk stopped during processing",
)
self._record_event(level="warn", title="Anulowano", message="Import archiwalny anulowany przez uzytkownika.")
self._finish("cancelled", running=False, message="Import archiwalny anulowany przez uzytkownika.")
self._record_event(level="warn", title="Cancelled", message="Historical import was cancelled by the user.")
self._finish("cancelled", running=False, message="Historical import was cancelled by the user.")
return
self._close_chunk(
@@ -271,23 +271,23 @@ class HistoricalSyncService:
skipped_days=skipped,
energy_kwh=energy_kwh,
state="completed",
note=f"Chunk zakonczony: import {imported}, pominiete {skipped}",
note=f"Chunk completed: imported {imported}, skipped {skipped}",
)
self._record_event(
level="success",
title=f"Chunk {chunk_index}/{total_chunks} zakonczony",
message=f"Zakres {chunk_start.isoformat()} -> {chunk_end.isoformat()}, import {imported}, pominiete {skipped}, energia {energy_kwh:.2f} kWh",
title=f"Chunk {chunk_index}/{total_chunks} completed",
message=f"Range {chunk_start.isoformat()} -> {chunk_end.isoformat()}, imported {imported}, skipped {skipped}, energy {energy_kwh:.2f} kWh",
chunk_index=chunk_index,
)
chunk_start = chunk_end + timedelta(days=1)
final_message = "Synchronizacja archiwalna zakonczona" if auto else "Import archiwalny zakonczony"
self._record_event(level="success", title="Zakonczono", message=final_message)
final_message = "Historical synchronization completed" if auto else "Historical import completed"
self._record_event(level="success", title="Completed", message=final_message)
self._finish("completed", running=False, message=final_message)
except Exception as exc:
logger.exception("Historical import failed")
self._record_event(level="error", title="Blad importu", message=str(exc))
self._finish("failed", running=False, message="Import archiwalny zakonczyl sie bledem.", last_error=str(exc))
self._record_event(level="error", title="Import error", message=str(exc))
self._finish("failed", running=False, message="Historical import finished with an error.", last_error=str(exc))
def _process_chunk(self, *, chunk_index: int, start_day: date, end_day: date, force: bool) -> tuple[int, int, float, bool]:
imported_days = 0
@@ -303,9 +303,9 @@ class HistoricalSyncService:
self._advance_day(
day,
imported=False,
message=f"Pominieto {day.isoformat()} - dzien juz istnieje w cache",
message=f"Skipped {day.isoformat()} - day already exists in cache",
level="warn",
title="Pominieto dzien",
title="Day skipped",
chunk_index=chunk_index,
)
continue
@@ -316,9 +316,9 @@ class HistoricalSyncService:
self._advance_day(
day,
imported=False,
message=f"Pominieto {day.isoformat()} - brak probek w InfluxDB",
message=f"Skipped {day.isoformat()} - no samples in InfluxDB",
level="warn",
title="Brak probek",
title="No samples",
chunk_index=chunk_index,
)
continue
@@ -336,9 +336,9 @@ class HistoricalSyncService:
self._advance_day(
day,
imported=True,
message=f"Zaimportowano {day.isoformat()} ({total:.2f} kWh)",
message=f"Imported {day.isoformat()} ({total:.2f} kWh)",
level="success",
title="Zaimportowano dzien",
title="Day imported",
chunk_index=chunk_index,
energy_kwh=total,
)
@@ -366,7 +366,7 @@ class HistoricalSyncService:
self._state.message = message
self._refresh_coverage(lock_held=True)
self._refresh_runtime_metrics(lock_held=True)
suffix = f" Energia: {energy_kwh:.2f} kWh." if imported and energy_kwh is not None else ""
suffix = f" Energy: {energy_kwh:.2f} kWh." if imported and energy_kwh is not None else ""
self._record_event(
level=level,
title=title,
@@ -383,19 +383,19 @@ class HistoricalSyncService:
end_date=chunk_end,
state="running",
started_at=datetime.utcnow(),
note=f"Aktywny chunk {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
note=f"Active chunk {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
)
with self._state_lock:
self._state.current_chunk_start = chunk_start
self._state.current_chunk_end = chunk_end
self._state.active_chunk_index = chunk_index
self._state.message = f"Przetwarzanie zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}"
self._state.message = f"Processing range {chunk_start.isoformat()} -> {chunk_end.isoformat()}"
self._upsert_chunk_locked(chunk)
self._refresh_runtime_metrics(lock_held=True)
self._record_event(
level="info",
title=f"Chunk {chunk_index}/{total_chunks}",
message=f"Start zakresu {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
message=f"Starting range {chunk_start.isoformat()} -> {chunk_end.isoformat()}",
chunk_index=chunk_index,
)