diff --git a/.gitignore b/.gitignore index 82a1204..36c6a72 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ apps/api/data/*.db # OS .DS_Store Thumbs.db +@eaDir/ # Docker docker-compose.override.yml diff --git a/apps/api/alembic/versions/20260216_1230_add_max_power_data_table.py b/apps/api/alembic/versions/20260216_1230_add_max_power_data_table.py new file mode 100644 index 0000000..920463c --- /dev/null +++ b/apps/api/alembic/versions/20260216_1230_add_max_power_data_table.py @@ -0,0 +1,89 @@ +"""Add max_power_data table for client mode. + +Revision ID: c9d3e7f1a2b4 +Revises: b2c3d4e5f6g7 +Create Date: 2026-02-16 12:30:00 +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "c9d3e7f1a2b4" +down_revision: Union[str, None] = "b2c3d4e5f6g7" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + bind = op.get_bind() + dialect = bind.dialect.name + + if dialect == "postgresql": + op.execute( + """ + CREATE TABLE IF NOT EXISTS max_power_data ( + id VARCHAR(36) PRIMARY KEY, + usage_point_id VARCHAR(14) NOT NULL, + date DATE NOT NULL, + interval_start VARCHAR(5), + value INTEGER NOT NULL, + source VARCHAR(50) DEFAULT 'myelectricaldata', + raw_data JSONB, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + CONSTRAINT uq_max_power_data UNIQUE (usage_point_id, date) + ) + """ + ) + op.execute( + "CREATE INDEX IF NOT EXISTS ix_max_power_usage_point_date ON max_power_data(usage_point_id, date)" + ) + op.execute("CREATE INDEX IF NOT EXISTS ix_max_power_date ON max_power_data(date)") + else: + op.create_table( + "max_power_data", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("usage_point_id", sa.String(length=14), nullable=False), + sa.Column("date", sa.Date(), nullable=False), + sa.Column("interval_start", sa.String(length=5), nullable=True), + sa.Column("value", sa.Integer(), nullable=False), + sa.Column("source", sa.String(length=50), nullable=True, server_default="myelectricaldata"), + sa.Column("raw_data", sa.JSON(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint("usage_point_id", "date", name="uq_max_power_data"), + ) + op.create_index( + "ix_max_power_usage_point_date", + "max_power_data", + ["usage_point_id", "date"], + ) + op.create_index("ix_max_power_date", "max_power_data", ["date"]) + + +def downgrade() -> None: + bind = op.get_bind() + dialect = bind.dialect.name + + if dialect == "postgresql": + op.execute("DROP INDEX IF EXISTS ix_max_power_usage_point_date") + op.execute("DROP INDEX IF EXISTS ix_max_power_date") + op.execute("DROP TABLE IF EXISTS max_power_data") + else: + op.drop_index("ix_max_power_usage_point_date", table_name="max_power_data") + op.drop_index("ix_max_power_date", table_name="max_power_data") + op.drop_table("max_power_data") diff --git a/apps/api/alembic/versions/20260223_fix_nulls_not_distinct.py b/apps/api/alembic/versions/20260223_fix_nulls_not_distinct.py new file mode 100644 index 0000000..afb8f74 --- /dev/null +++ b/apps/api/alembic/versions/20260223_fix_nulls_not_distinct.py @@ -0,0 +1,56 @@ +"""Fix unique constraints to use NULLS NOT DISTINCT + +For daily records, interval_start is NULL. PostgreSQL treats NULLs as distinct +in unique constraints by default, which means ON CONFLICT never fires for daily +records and duplicates can be inserted. This migration recreates the constraints +with NULLS NOT DISTINCT (PostgreSQL 15+). + +Revision ID: c3d4e5f6g7h8 +Revises: b2c3d4e5f6g7 +Create Date: 2026-02-23 + +""" +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "c3d4e5f6g7h8" +down_revision: Union[str, None] = "c9d3e7f1a2b4" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Recreate consumption_data unique constraint with NULLS NOT DISTINCT + op.execute("ALTER TABLE consumption_data DROP CONSTRAINT IF EXISTS uq_consumption_data") + op.execute(""" + ALTER TABLE consumption_data + ADD CONSTRAINT uq_consumption_data + UNIQUE NULLS NOT DISTINCT (usage_point_id, date, granularity, interval_start) + """) + + # Recreate production_data unique constraint with NULLS NOT DISTINCT + op.execute("ALTER TABLE production_data DROP CONSTRAINT IF EXISTS uq_production_data") + op.execute(""" + ALTER TABLE production_data + ADD CONSTRAINT uq_production_data + UNIQUE NULLS NOT DISTINCT (usage_point_id, date, granularity, interval_start) + """) + + +def downgrade() -> None: + # Revert to standard unique constraints + op.execute("ALTER TABLE consumption_data DROP CONSTRAINT IF EXISTS uq_consumption_data") + op.execute(""" + ALTER TABLE consumption_data + ADD CONSTRAINT uq_consumption_data + UNIQUE (usage_point_id, date, granularity, interval_start) + """) + + op.execute("ALTER TABLE production_data DROP CONSTRAINT IF EXISTS uq_production_data") + op.execute(""" + ALTER TABLE production_data + ADD CONSTRAINT uq_production_data + UNIQUE (usage_point_id, date, granularity, interval_start) + """) diff --git a/apps/api/src/adapters/myelectricaldata.py b/apps/api/src/adapters/myelectricaldata.py index 8250e79..bad85eb 100644 --- a/apps/api/src/adapters/myelectricaldata.py +++ b/apps/api/src/adapters/myelectricaldata.py @@ -159,7 +159,7 @@ async def get_consumption_daily( return await self._make_request( "GET", f"/enedis/consumption/daily/{usage_point_id}", - params={"start": start, "end": end}, + params={"start": start, "end": end, "use_cache": "true"}, ) async def get_consumption_detail( @@ -178,7 +178,7 @@ async def get_consumption_detail( return await self._make_request( "GET", f"/enedis/consumption/detail/{usage_point_id}", - params={"start": start, "end": end}, + params={"start": start, "end": end, "use_cache": "true"}, ) async def get_consumption_max_power( @@ -194,7 +194,7 @@ async def get_consumption_max_power( return await self._make_request( "GET", f"/enedis/power/{usage_point_id}", - params={"start": start, "end": end}, + params={"start": start, "end": end, "use_cache": "true"}, ) # ========================================================================= @@ -214,7 +214,7 @@ async def get_production_daily( return await self._make_request( "GET", f"/enedis/production/daily/{usage_point_id}", - params={"start": start, "end": end}, + params={"start": start, "end": end, "use_cache": "true"}, ) async def get_production_detail( @@ -230,7 +230,7 @@ async def get_production_detail( return await self._make_request( "GET", f"/enedis/production/detail/{usage_point_id}", - params={"start": start, "end": end}, + params={"start": start, "end": end, "use_cache": "true"}, ) # ========================================================================= diff --git a/apps/api/src/main.py b/apps/api/src/main.py index 935bcd8..09cb1c0 100644 --- a/apps/api/src/main.py +++ b/apps/api/src/main.py @@ -4,7 +4,7 @@ from fastapi import FastAPI, Request, status from fastapi.middleware.cors import CORSMiddleware -from fastapi.middleware.trustedhost import TrustedHostMiddleware + from fastapi.responses import JSONResponse, Response from fastapi.staticfiles import StaticFiles @@ -145,17 +145,6 @@ def get_servers() -> list[dict[str, str]]: # Mount static files for custom Swagger CSS app.mount("/static", StaticFiles(directory="/app/static"), name="static") -# Trusted Host middleware to handle proxy headers -app.add_middleware(TrustedHostMiddleware, allowed_hosts=[ - "myelectricaldata.fr", - "*.myelectricaldata.fr", # Allow all subdomains - "localhost", - "127.0.0.1", - "backend", - "backend-client", # Client mode Docker service name - "host.docker.internal", # Allow client mode to connect to server mode locally -]) - # CORS middleware - explicit origins required for credentials (httpOnly cookies) def get_cors_origins() -> list[str]: """Build CORS origins from settings""" diff --git a/apps/api/src/models/__init__.py b/apps/api/src/models/__init__.py index adace3e..745aeae 100644 --- a/apps/api/src/models/__init__.py +++ b/apps/api/src/models/__init__.py @@ -12,6 +12,7 @@ from .client_mode import ( ConsumptionData, ProductionData, + MaxPowerData, SyncStatus, SyncStatusType, ExportConfig, @@ -44,6 +45,7 @@ # Client mode models "ConsumptionData", "ProductionData", + "MaxPowerData", "SyncStatus", "SyncStatusType", "ExportConfig", diff --git a/apps/api/src/models/client_mode.py b/apps/api/src/models/client_mode.py index e6ed9cb..50d79b2 100644 --- a/apps/api/src/models/client_mode.py +++ b/apps/api/src/models/client_mode.py @@ -52,7 +52,7 @@ class ConsumptionData(Base, TimestampMixin): __tablename__ = "consumption_data" __table_args__ = ( - UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_consumption_data"), + UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_consumption_data", postgresql_nulls_not_distinct=True), Index("ix_consumption_usage_point_date", "usage_point_id", "date"), Index("ix_consumption_granularity_date", "granularity", "date"), ) @@ -88,7 +88,7 @@ class ProductionData(Base, TimestampMixin): __tablename__ = "production_data" __table_args__ = ( - UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_production_data"), + UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_production_data", postgresql_nulls_not_distinct=True), Index("ix_production_usage_point_date", "usage_point_id", "date"), Index("ix_production_granularity_date", "granularity", "date"), ) @@ -115,6 +115,39 @@ def __repr__(self) -> str: return f"" +class MaxPowerData(Base, TimestampMixin): + """Store daily maximum power data from MyElectricalData API. + + One row per day and per usage point, containing: + - maximum power value (W) + - time of the peak interval (HH:MM) + """ + + __tablename__ = "max_power_data" + __table_args__ = ( + UniqueConstraint("usage_point_id", "date", name="uq_max_power_data"), + Index("ix_max_power_usage_point_date", "usage_point_id", "date"), + Index("ix_max_power_date", "date"), + ) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + usage_point_id: Mapped[str] = mapped_column(String(14), nullable=False, index=True) + date: Mapped[date] = mapped_column(Date, nullable=False, index=True) + + # Start time of interval containing the max power for this day (HH:MM). + interval_start: Mapped[str | None] = mapped_column(String(5), nullable=True) + + # Maximum power value in W. + value: Mapped[int] = mapped_column(Integer, nullable=False) + + # Source metadata + source: Mapped[str] = mapped_column(String(50), default="myelectricaldata") + raw_data: Mapped[dict[str, Any] | None] = mapped_column(JSON, nullable=True) + + def __repr__(self) -> str: + return f"" + + class SyncStatusType(str, enum.Enum): """Sync operation status""" diff --git a/apps/api/src/routers/ecowatt.py b/apps/api/src/routers/ecowatt.py index cc9850b..bd92765 100644 --- a/apps/api/src/routers/ecowatt.py +++ b/apps/api/src/routers/ecowatt.py @@ -349,7 +349,7 @@ async def refresh_ecowatt_cache( from ..services.sync import SyncService sync_service = SyncService(db) result = await sync_service.sync_ecowatt() - updated_count = result.get("synced", 0) + updated_count = int(result.get("created", 0)) + int(result.get("updated", 0)) else: # Server mode: fetch from RTE API updated_count = await rte_service.update_ecowatt_cache(db) @@ -376,4 +376,4 @@ async def refresh_ecowatt_cache( raise HTTPException( status_code=500, detail=f"Failed to refresh EcoWatt cache: {str(e)}" - ) \ No newline at end of file + ) diff --git a/apps/api/src/routers/enedis_client.py b/apps/api/src/routers/enedis_client.py index 058eeb7..ea30ff8 100644 --- a/apps/api/src/routers/enedis_client.py +++ b/apps/api/src/routers/enedis_client.py @@ -31,6 +31,12 @@ logger = logging.getLogger(__name__) +# Lightweight in-process caches to avoid hammering remote gateway +_POWER_CACHE_TTL = timedelta(hours=6) +_DETAIL_CHUNK_BACKOFF_TTL = timedelta(hours=12) +_power_cache: dict[str, tuple[datetime, dict]] = {} +_detail_chunk_backoff: dict[str, datetime] = {} + router = APIRouter( prefix="/enedis", tags=["Enedis Data (via Gateway)"], @@ -78,9 +84,198 @@ def extract_readings_from_response(response: dict) -> list[dict]: # Try different response structures meter_reading = response.get("meter_reading", {}) if not meter_reading and "data" in response: - meter_reading = response.get("data", {}).get("meter_reading", {}) + data = response.get("data") + if isinstance(data, dict): + meter_reading = data.get("meter_reading", {}) + + if not isinstance(meter_reading, dict): + return [] + + interval_reading = meter_reading.get("interval_reading", []) + return interval_reading if isinstance(interval_reading, list) else [] + + +def _extract_day(date_str: str) -> str: + """Extract YYYY-MM-DD from reading date string.""" + if not date_str: + return "" + if "T" in date_str: + return date_str.split("T", 1)[0][:10] + if " " in date_str: + return date_str.split(" ", 1)[0][:10] + return date_str[:10] + + +def _merge_daily_readings(readings: list[dict]) -> list[dict]: + """Deduplicate daily readings by date (keep highest value).""" + by_day: dict[str, dict] = {} + for reading in readings: + day = _extract_day(str(reading.get("date", ""))) + if not day: + continue + try: + value = int(float(reading.get("value", 0))) + except (TypeError, ValueError): + value = 0 + + current = by_day.get(day) + if current is None or value >= int(float(current.get("value", 0))): + by_day[day] = {"date": day, "value": value} + + return [by_day[d] for d in sorted(by_day.keys())] + + +def _extract_day_and_time(date_str: str) -> tuple[str, str]: + """Extract YYYY-MM-DD and HH:MM from a reading date string.""" + if not date_str: + return "", "00:00" + + if "T" in date_str: + day_part, time_part = date_str.split("T", 1) + elif " " in date_str: + day_part, time_part = date_str.split(" ", 1) + else: + return date_str[:10], "00:00" + + # Keep only HH:MM and ignore seconds/timezone suffixes. + hhmm = time_part[:5] if len(time_part) >= 5 else "00:00" + return day_part[:10], hhmm + + +def _format_power_response( + usage_point_id: str, + start: str, + end: str, + readings: list[dict], + from_cache: bool = False, +) -> dict: + """Format max-power payload in Enedis-compatible shape.""" + return { + "meter_reading": { + "usage_point_id": usage_point_id, + "start": start, + "end": end, + "reading_type": { + "unit": "W", + "measurement_kind": "power", + "aggregate": "maximum", + }, + "interval_reading": readings, + }, + "_from_local_cache": from_cache, + } + + +def _merge_power_interval_readings(readings: list[dict]) -> list[dict]: + """Deduplicate power readings by day, keeping the highest daily value.""" + by_day: dict[str, tuple[int, str]] = {} + + for reading in readings: + day, hhmm = _extract_day_and_time(str(reading.get("date", ""))) + if not day: + continue + + try: + value = int(float(reading.get("value", 0))) + except (TypeError, ValueError): + value = 0 + + current = by_day.get(day) + if current is None or value > current[0] or (value == current[0] and hhmm > current[1]): + by_day[day] = (value, hhmm) + + merged: list[dict] = [] + for day in sorted(by_day.keys()): + value, hhmm = by_day[day] + merged.append( + { + "date": f"{day} {hhmm}:00", + "value": value, + } + ) + return merged + + +async def _get_power_data_local_first( + db: AsyncSession, + usage_point_id: str, + start: str, + end: str, + start_date: date, + end_date: date, + use_cache: bool, +) -> dict: + """Get max power with local-first strategy on dedicated max_power_data cache.""" + cache_key = f"{usage_point_id}:{start}:{end}" + now = datetime.now() + + if use_cache: + cached = _power_cache.get(cache_key) + if cached and cached[0] > now: + return cached[1] + + if use_cache: + local_service = LocalDataService(db) + local_readings, missing_ranges = await local_service.get_consumption_max_power( + usage_point_id=usage_point_id, + start_date=start_date, + end_date=end_date, + ) + + if not missing_ranges: + payload = _format_power_response( + usage_point_id=usage_point_id, + start=start, + end=end, + readings=_merge_power_interval_readings(local_readings), + from_cache=True, + ) + _power_cache[cache_key] = (now + _POWER_CACHE_TTL, payload) + return payload + + # Fill only missing day-ranges from gateway to limit remote calls. + all_readings = list(local_readings) + adapter = get_med_adapter() + for range_start, range_end in missing_ranges: + try: + response = await adapter.get_consumption_max_power( + usage_point_id, + range_start.isoformat(), + range_end.isoformat(), + ) + remote_data = extract_gateway_data(response) + remote_readings = extract_readings_from_response(remote_data) + all_readings.extend(remote_readings) + await local_service.save_consumption_max_power(usage_point_id, remote_data) + except Exception as exc: + logger.warning( + f"[{usage_point_id}] Failed to fetch max power for missing range " + f"{range_start} -> {range_end}: {exc}" + ) - return meter_reading.get("interval_reading", []) + # Rebuild from DB for canonical values after upsert. + refreshed_local, _ = await local_service.get_consumption_max_power( + usage_point_id=usage_point_id, + start_date=start_date, + end_date=end_date, + ) + source_readings = refreshed_local if refreshed_local else all_readings + merged = _merge_power_interval_readings(source_readings) + combined = _format_power_response( + usage_point_id=usage_point_id, + start=start, + end=end, + readings=merged, + from_cache=False, + ) + _power_cache[cache_key] = (now + _POWER_CACHE_TTL, combined) + return combined + + # Direct gateway fetch when cache/local strategy is disabled. + adapter = get_med_adapter() + response = await adapter.get_consumption_max_power(usage_point_id, start, end) + data = extract_gateway_data(response) + return data # ========================================================================= @@ -242,13 +437,14 @@ async def get_consumption_daily( # If no missing ranges, return local data only if not missing_ranges: + merged_readings = _merge_daily_readings(all_readings) logger.info( f"[{usage_point_id}] Daily consumption fully served from local cache " - f"({len(local_data)} records)" + f"({len(merged_readings)} records)" ) return APIResponse( success=True, - data=format_daily_response(usage_point_id, start, end, all_readings, from_cache=True), + data=format_daily_response(usage_point_id, start, end, merged_readings, from_cache=True), ) # Fetch only missing ranges from gateway @@ -271,8 +467,8 @@ async def get_consumption_daily( f"[{usage_point_id}] Failed to fetch {range_start} to {range_end}: {e}" ) - # Sort by date - all_readings.sort(key=lambda x: x.get("date", "")) + # Sort + dedup by date + all_readings = _merge_daily_readings(all_readings) return APIResponse( success=True, @@ -400,7 +596,7 @@ async def get_max_power( usage_point_id: str = Path(..., description="Point de livraison (14 chiffres)"), start: str = Query(..., description="Date de début (YYYY-MM-DD)"), end: str = Query(..., description="Date de fin (YYYY-MM-DD)"), - use_cache: bool = Query(False, description="Use cached data if available (ignored in client mode)"), + use_cache: bool = Query(True, description="Use local cache and only fetch missing data"), current_user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db), ) -> APIResponse: @@ -416,9 +612,24 @@ async def get_max_power( ) try: - adapter = get_med_adapter() - response = await adapter.get_consumption_max_power(usage_point_id, start, end) - data = extract_gateway_data(response) + start_date = parse_date(start) + end_date = parse_date(end) + except ValueError as e: + return APIResponse( + success=False, + error=ErrorDetail(code="INVALID_DATE", message=str(e)), + ) + + try: + data = await _get_power_data_local_first( + db=db, + usage_point_id=usage_point_id, + start=start, + end=end, + start_date=start_date, + end_date=end_date, + use_cache=use_cache, + ) return APIResponse(success=True, data=data) except Exception as e: logger.error(f"[{usage_point_id}] Error fetching max power: {e}") @@ -480,13 +691,14 @@ async def get_production_daily( # If no missing ranges, return local data only if not missing_ranges: + merged_readings = _merge_daily_readings(all_readings) logger.info( f"[{usage_point_id}] Daily production fully served from local cache " - f"({len(local_data)} records)" + f"({len(merged_readings)} records)" ) return APIResponse( success=True, - data=format_daily_response(usage_point_id, start, end, all_readings, from_cache=True), + data=format_daily_response(usage_point_id, start, end, merged_readings, from_cache=True), ) # Fetch only missing ranges from gateway @@ -509,8 +721,8 @@ async def get_production_daily( f"[{usage_point_id}] Failed to fetch production {range_start} to {range_end}: {e}" ) - # Sort by date - all_readings.sort(key=lambda x: x.get("date", "")) + # Sort + dedup by date + all_readings = _merge_daily_readings(all_readings) return APIResponse( success=True, @@ -643,7 +855,7 @@ async def get_power( usage_point_id: str = Path(..., description="Point de livraison (14 chiffres)"), start: str = Query(..., description="Date de début (YYYY-MM-DD)"), end: str = Query(..., description="Date de fin (YYYY-MM-DD)"), - use_cache: bool = Query(False, description="Use cached data if available (ignored in client mode)"), + use_cache: bool = Query(True, description="Use local cache and only fetch missing data"), current_user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db), ) -> APIResponse: @@ -659,9 +871,24 @@ async def get_power( ) try: - adapter = get_med_adapter() - response = await adapter.get_consumption_max_power(usage_point_id, start, end) - data = extract_gateway_data(response) + start_date = parse_date(start) + end_date = parse_date(end) + except ValueError as e: + return APIResponse( + success=False, + error=ErrorDetail(code="INVALID_DATE", message=str(e)), + ) + + try: + data = await _get_power_data_local_first( + db=db, + usage_point_id=usage_point_id, + start=start, + end=end, + start_date=start_date, + end_date=end_date, + use_cache=use_cache, + ) return APIResponse(success=True, data=data) except Exception as e: logger.error(f"[{usage_point_id}] Error fetching max power: {e}") @@ -736,6 +963,11 @@ async def get_consumption_detail_batch( current_start = range_start while current_start < range_end: chunk_end = min(current_start + timedelta(days=7), range_end) + backoff_key = f"consumption:{usage_point_id}:{current_start.isoformat()}:{chunk_end.isoformat()}" + backoff_until = _detail_chunk_backoff.get(backoff_key) + if backoff_until and backoff_until > datetime.now(): + current_start = chunk_end + continue try: response = await adapter.get_consumption_detail( usage_point_id, @@ -743,8 +975,11 @@ async def get_consumption_detail_batch( chunk_end.isoformat(), ) gateway_readings = extract_readings_from_response(response) + if not gateway_readings: + _detail_chunk_backoff[backoff_key] = datetime.now() + _DETAIL_CHUNK_BACKOFF_TTL all_readings.extend(gateway_readings) except Exception as chunk_error: + _detail_chunk_backoff[backoff_key] = datetime.now() + _DETAIL_CHUNK_BACKOFF_TTL logger.warning( f"[{usage_point_id}] Chunk {current_start} - {chunk_end} échoué: {chunk_error}" ) @@ -851,6 +1086,11 @@ async def get_production_detail_batch( current_start = range_start while current_start < range_end: chunk_end = min(current_start + timedelta(days=7), range_end) + backoff_key = f"production:{usage_point_id}:{current_start.isoformat()}:{chunk_end.isoformat()}" + backoff_until = _detail_chunk_backoff.get(backoff_key) + if backoff_until and backoff_until > datetime.now(): + current_start = chunk_end + continue try: response = await adapter.get_production_detail( usage_point_id, @@ -858,8 +1098,11 @@ async def get_production_detail_batch( chunk_end.isoformat(), ) gateway_readings = extract_readings_from_response(response) + if not gateway_readings: + _detail_chunk_backoff[backoff_key] = datetime.now() + _DETAIL_CHUNK_BACKOFF_TTL all_readings.extend(gateway_readings) except Exception as chunk_error: + _detail_chunk_backoff[backoff_key] = datetime.now() + _DETAIL_CHUNK_BACKOFF_TTL logger.warning( f"[{usage_point_id}] Chunk {current_start} - {chunk_end} échoué: {chunk_error}" ) diff --git a/apps/api/src/routers/export.py b/apps/api/src/routers/export.py index 34ffde1..6c18f71 100644 --- a/apps/api/src/routers/export.py +++ b/apps/api/src/routers/export.py @@ -202,6 +202,8 @@ async def get_export_config( "export_consumption": config.export_consumption, "export_production": config.export_production, "export_detailed": config.export_detailed, + "export_interval_minutes": config.export_interval_minutes, + "next_export_at": config.next_export_at.isoformat() if config.next_export_at else None, "last_export_at": config.last_export_at.isoformat() if config.last_export_at else None, "last_export_status": config.last_export_status, "last_export_error": config.last_export_error, @@ -302,15 +304,18 @@ async def update_export_config( if data.export_detailed is not None: config.export_detailed = data.export_detailed # Gestion de export_interval_minutes : - # - Si non fourni (None) : on ne modifie pas - # - Si fourni avec valeur > 0 : on applique - # - Si fourni avec valeur <= 0 : on met à null (désactive la planification) - if data.export_interval_minutes is not None: - if data.export_interval_minutes > 0: + # - Si champ absent : on ne modifie pas + # - Si champ présent avec valeur > 0 : on applique + # - Si champ présent avec null/0/négatif : on désactive la planification + provided_fields = getattr(data, "model_fields_set", None) + if provided_fields is None: + provided_fields = getattr(data, "__fields_set__", set()) + + if "export_interval_minutes" in provided_fields: + if data.export_interval_minutes is not None and data.export_interval_minutes > 0: logger.info(f"[EXPORT] Setting export_interval_minutes: {config.export_interval_minutes} -> {data.export_interval_minutes}") config.export_interval_minutes = data.export_interval_minutes else: - # 0 ou négatif = désactiver la planification logger.warning( f"[EXPORT] Disabling scheduled export (received {data.export_interval_minutes}): " f"{config.export_interval_minutes} -> None" diff --git a/apps/api/src/scheduler.py b/apps/api/src/scheduler.py index 239dcae..cd42d8b 100644 --- a/apps/api/src/scheduler.py +++ b/apps/api/src/scheduler.py @@ -1,9 +1,9 @@ """Scheduler for Client Mode Runs background tasks: -- Sync data from MyElectricalData API every 30 minutes -- Run exports after each sync -- Sync Tempo every 15 min (6h-23h) if tomorrow's color is unknown +- Sync data from MyElectricalData API at 8h05, 12h05, 18h05 (Europe/Paris) +- Run all exports immediately after each sync completes +- Sync Tempo at 7h + 11h (colors published around 6h and 10h40) - Sync EcoWatt at 12h15 (friday) and 17h (daily) if J+3 is incomplete Uses APScheduler for task scheduling. @@ -36,7 +36,7 @@ class SyncScheduler: """Scheduler for automatic data synchronization - Runs sync every 30 minutes to fetch new data from MyElectricalData API. + Syncs data at 8h05, 12h05, 18h05 (Europe/Paris) and runs exports after each sync. """ def __init__(self) -> None: @@ -64,14 +64,31 @@ def start(self) -> None: self._scheduler = AsyncIOScheduler() - # Add sync job - runs every 30 minutes + # Sync au démarrage (redémarrage service à n'importe quelle heure) self._scheduler.add_job( self._run_sync, - trigger=IntervalTrigger(minutes=30), - id="sync_all", - name="Sync all PDLs from MyElectricalData API", + id="sync_all_startup", + name="Sync all PDLs (startup)", + replace_existing=True, + next_run_time=datetime.now(UTC), + ) + + # Sync matinale : toutes les 30 min de 6h à 9h + self._scheduler.add_job( + self._run_sync, + trigger=CronTrigger(hour="6-9", minute="*/30"), + id="sync_all_morning", + name="Sync all PDLs (morning 6h-9h every 30min)", + replace_existing=True, + ) + + # Checks ponctuels en journée + self._scheduler.add_job( + self._run_sync, + trigger=CronTrigger(hour="12,18", minute=0), + id="sync_all_daytime", + name="Sync all PDLs (daytime 12h/18h)", replace_existing=True, - next_run_time=datetime.now(UTC), # Run immediately on start ) # Add export scheduler job - runs every minute to check for due exports @@ -83,26 +100,16 @@ def start(self) -> None: replace_existing=True, ) - # Add Tempo sync job - runs every 15 minutes from 6h to 23h - # + run immédiat au démarrage pour remplir l'historique si absent + # Tempo : 2 appels/jour (7h couleur du jour, 11h couleur de demain) self._scheduler.add_job( self._run_tempo_sync, - trigger=CronTrigger(minute="*/15", hour="6-23"), + trigger=CronTrigger(hour="7,11", minute=0), id="sync_tempo", - name="Sync Tempo calendar from gateway", - replace_existing=True, - ) - # Sync initiale Tempo au démarrage (indépendante du cron) - self._scheduler.add_job( - self._run_tempo_sync, - id="sync_tempo_startup", - name="Sync Tempo calendar (startup)", + name="Sync Tempo calendar from gateway (7h + 11h)", replace_existing=True, - next_run_time=datetime.now(UTC), ) - # Add EcoWatt sync jobs - # 1. Daily at 17h00 - RTE updates J+3 around 17h + # EcoWatt : ~1-2 appels/jour (RTE publie vers 17h, vendredi 12h15) self._scheduler.add_job( self._run_ecowatt_sync, trigger=CronTrigger(hour=17, minute=0), @@ -110,7 +117,6 @@ def start(self) -> None: name="Sync EcoWatt daily at 17h", replace_existing=True, ) - # 2. Friday at 12h15 - RTE updates earlier on Fridays self._scheduler.add_job( self._run_ecowatt_sync, trigger=CronTrigger(day_of_week="fri", hour=12, minute=15), @@ -118,42 +124,43 @@ def start(self) -> None: name="Sync EcoWatt Friday at 12h15", replace_existing=True, ) - # 3. Check every hour if J+3 data is complete (fallback) + # Fallback conditionnel : 2 appels/jour max si données J+3 incomplètes self._scheduler.add_job( self._run_ecowatt_sync_if_incomplete, - trigger=IntervalTrigger(hours=1), + trigger=CronTrigger(hour="8,20", minute=30), id="sync_ecowatt_fallback", - name="Sync EcoWatt if incomplete", + name="Sync EcoWatt if incomplete (8h30 + 20h30)", replace_existing=True, - next_run_time=datetime.now(UTC), # Run au démarrage ) - # Add Consumption France sync job - runs every 15 minutes - # RTE data is updated every 15 minutes for realised consumption + # Consommation France : 3 appels/jour (matin/midi/soir) self._scheduler.add_job( self._run_consumption_france_sync, - trigger=IntervalTrigger(minutes=15), + trigger=CronTrigger(hour="8,14,20", minute=0), id="sync_consumption_france", - name="Sync Consumption France from gateway", + name="Sync Consumption France from gateway (8h/14h/20h)", replace_existing=True, - next_run_time=datetime.now(UTC), # Run au démarrage ) - # Add Generation Forecast sync job - runs every 30 minutes - # Renewable forecasts are updated less frequently + # Prévisions production : 2 appels/jour self._scheduler.add_job( self._run_generation_forecast_sync, - trigger=IntervalTrigger(minutes=30), + trigger=CronTrigger(hour="9,21", minute=0), id="sync_generation_forecast", - name="Sync Generation Forecast from gateway", + name="Sync Generation Forecast from gateway (9h + 21h)", replace_existing=True, - next_run_time=datetime.now(UTC), # Run au démarrage ) self._scheduler.start() self._running = True - logger.info("[SCHEDULER] Started. Data sync every 30min, Tempo every 15min (6h-23h), EcoWatt at 17h/12h15(fri), France data every 15-30min.") + logger.info( + "[SCHEDULER] Started (~20 appels/jour max). " + "PDL: 6h-9h toutes les 30min + 12h + 18h (~10). " + "Tempo: 7h + 11h (2). " + "EcoWatt: 17h + vendredi 12h15 + fallback 8h30/20h30 (~4 max). " + "France: 8h/14h/20h (3). Forecast: 9h/21h (2)." + ) def stop(self) -> None: """Stop the scheduler""" @@ -163,7 +170,7 @@ def stop(self) -> None: logger.info("[SCHEDULER] Stopped") async def _run_sync(self) -> None: - """Run sync job""" + """Run sync job, then trigger all due exports.""" logger.info("[SCHEDULER] Starting scheduled sync...") try: @@ -184,6 +191,37 @@ async def _run_sync(self) -> None: except Exception as e: logger.error(f"[SCHEDULER] Sync failed: {e}") + # Always run exports after sync (even if sync had errors, some data may be fresh) + try: + logger.info("[SCHEDULER] Sync done, triggering exports...") + await self._run_all_exports() + except Exception as e: + logger.error(f"[SCHEDULER] Post-sync exports failed: {e}") + + async def _run_all_exports(self) -> None: + """Run all enabled exports unconditionally (called after sync).""" + from sqlalchemy import select + from .models.client_mode import ExportConfig + from .models.database import async_session_maker + + async with async_session_maker() as db: + stmt = select(ExportConfig).where(ExportConfig.is_enabled.is_(True)) + result = await db.execute(stmt) + configs = result.scalars().all() + + for config in configs: + try: + await self._run_export(db, config) + config.next_export_at = datetime.now(UTC) + timedelta( + minutes=config.export_interval_minutes or 60 + ) + await db.commit() + except Exception as e: + logger.error(f"[SCHEDULER] Export failed for {config.name}: {e}") + config.last_export_status = "failed" + config.last_export_error = str(e)[:500] + await db.commit() + async def _run_scheduled_exports(self) -> None: """Check for exports that are due and run them @@ -509,7 +547,9 @@ async def _run_ecowatt_sync_if_incomplete(self) -> None: async with async_session_maker() as db: # Check if we have data for today through J+3 - today = datetime.now(UTC).replace(hour=0, minute=0, second=0, microsecond=0) + # EcoWatt.periode is stored as timezone-naive datetime in PostgreSQL. + # Use naive UTC boundaries to avoid asyncpg naive/aware mismatch. + today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) dates_needed = [today + timedelta(days=i) for i in range(4)] # J, J+1, J+2, J+3 # Query existing data @@ -519,7 +559,11 @@ async def _run_ecowatt_sync_if_incomplete(self) -> None: EcoWatt.periode < today + timedelta(days=4) ) ) - existing_dates = {row[0].replace(tzinfo=None).date() for row in existing_result.all()} + existing_dates = set() + for row in existing_result.all(): + val = row[0] + if hasattr(val, "date"): + existing_dates.add(val.date()) needed_dates = {d.date() for d in dates_needed} missing_dates = needed_dates - existing_dates diff --git a/apps/api/src/services/exporters/home_assistant.py b/apps/api/src/services/exporters/home_assistant.py index 69b9b81..9e237ac 100644 --- a/apps/api/src/services/exporters/home_assistant.py +++ b/apps/api/src/services/exporters/home_assistant.py @@ -245,6 +245,7 @@ async def export_consumption( # Discovery config discovery_config = { "unique_id": unique_id, + "object_id": unique_id, "name": f"Consommation {usage_point_id} ({granularity})", "state_topic": state_topic, "unit_of_measurement": "kWh", @@ -310,6 +311,7 @@ async def export_production( discovery_config = { "unique_id": unique_id, + "object_id": unique_id, "name": f"Production {usage_point_id} ({granularity})", "state_topic": state_topic, "unit_of_measurement": "kWh", @@ -387,7 +389,7 @@ async def run_full_export(self, db: AsyncSession, usage_point_ids: list[str]) -> # Global exports (not PDL-specific) try: - count = await self._export_tempo(client, db) + count = await self._export_tempo(client, db, usage_point_ids) results["tempo"] = count except Exception as e: logger.error(f"[HA-MQTT] Tempo export failed: {e}") @@ -459,6 +461,7 @@ async def _publish_sensor_old_format( discovery_config: dict[str, Any] = { "name": name, "uniq_id": unique_id, + "obj_id": unique_id, "stat_t": state_topic, "json_attr_t": attributes_topic, "device": device, @@ -516,6 +519,7 @@ async def _publish_sensor( # Build discovery config discovery_config: dict[str, Any] = { "unique_id": unique_id, + "object_id": unique_id, "name": name, "state_topic": state_topic, "value_template": "{{ value_json.state }}", @@ -582,6 +586,7 @@ async def _publish_binary_sensor( """ discovery_config: dict[str, Any] = { "unique_id": unique_id, + "object_id": unique_id, "name": name, "state_topic": state_topic, "value_template": "{{ value_json.state }}", @@ -619,6 +624,481 @@ async def _publish_binary_sensor( # CONSUMPTION/PRODUCTION STATISTICS # ========================================================================= + @staticmethod + def _safe_evolution_percent(current_value: float, reference_value: float) -> float: + """Return percentage evolution, guarded against division by zero.""" + if reference_value == 0: + return 0.0 + return round(((current_value - reference_value) / reference_value) * 100, 2) + + @staticmethod + def _normalize_offpeak_hours(offpeak_raw: Any) -> list[dict[str, str]]: + """Normalize off-peak configuration to [{'start': 'HH:MM', 'end': 'HH:MM'}].""" + normalized: list[dict[str, str]] = [] + + def add_range(start: str, end: str) -> None: + normalized.append({"start": start, "end": end}) + + def parse_range_string(range_str: str) -> None: + match = re.search(r"(\d{1,2})[h:](\d{2})\s*-\s*(\d{1,2})[h:](\d{2})", range_str) + if not match: + return + add_range( + f"{match.group(1).zfill(2)}:{match.group(2)}", + f"{match.group(3).zfill(2)}:{match.group(4)}", + ) + + if isinstance(offpeak_raw, list): + for item in offpeak_raw: + if isinstance(item, dict): + start = item.get("start") + end = item.get("end") + if isinstance(start, str) and isinstance(end, str): + add_range(start, end) + elif isinstance(item, str): + parse_range_string(item) + return normalized + + if isinstance(offpeak_raw, dict): + ranges = offpeak_raw.get("ranges") + if isinstance(ranges, list): + for item in ranges: + if isinstance(item, str): + parse_range_string(item) + elif isinstance(item, dict): + start = item.get("start") + end = item.get("end") + if isinstance(start, str) and isinstance(end, str): + add_range(start, end) + for value in offpeak_raw.values(): + if isinstance(value, str): + parse_range_string(value) + elif isinstance(value, list): + for sub_item in value: + if isinstance(sub_item, str): + parse_range_string(sub_item) + return normalized + + return normalized + + def _is_offpeak_interval(self, interval_start: str | None, offpeak_hours: list[dict[str, str]]) -> bool: + """Check if an interval start time is in off-peak period.""" + if not interval_start: + return False + + # Default HC range when no contract schedule is available. + periods = offpeak_hours or [{"start": "22:00", "end": "06:00"}] + + try: + hour, minute = map(int, interval_start.split(":")) + current_minutes = hour * 60 + minute + except ValueError: + return False + + for period in periods: + try: + start_h, start_m = map(int, period["start"].split(":")) + end_h, end_m = map(int, period["end"].split(":")) + except (KeyError, ValueError): + continue + + start_minutes = start_h * 60 + start_m + end_minutes = end_h * 60 + end_m + + # Overnight range (e.g. 22:00 -> 06:00) + if start_minutes > end_minutes: + if current_minutes >= start_minutes or current_minutes < end_minutes: + return True + elif start_minutes <= current_minutes < end_minutes: + return True + + return False + + @staticmethod + def _detailed_value_to_wh(value: int, raw_data: dict[str, Any] | None) -> float: + """Convert a detailed record value to Wh. + + Some MED payloads expose interval power values (W), others already provide Wh. + We use interval_length when available to normalize. + """ + if value is None: + return 0.0 + + if not raw_data: + # Historical default used by the existing code path. + return float(value) / 2 + + interval_length = str(raw_data.get("interval_length", "PT30M")) + match = re.match(r"PT(\d+)M", interval_length) + if not match: + # Daily/unknown payloads are already in Wh. + return float(value) + + interval_minutes = int(match.group(1)) + if interval_minutes <= 0: + return float(value) + + return float(value) / (60 / interval_minutes) + + async def _resolve_linky_pricing_context( + self, + db: AsyncSession, + pdl: str, + ) -> tuple[str, list[dict[str, str]], dict[str, float], int | None]: + """Resolve pricing option, offpeak hours, prices and subscribed power for one PDL.""" + from ...models.client_mode import ContractData + from ...models.energy_provider import EnergyOffer + from ...models.pdl import PDL + + pricing_option = "BASE" + offpeak_hours: list[dict[str, str]] = [] + subscribed_power: int | None = None + + # Default prices keep output deterministic (never NaN in card). + prices: dict[str, float] = { + "base": 0.0, + "hc": 0.0, + "hp": 0.0, + **TEMPO_PRICES, + } + + pdl_result = await db.execute( + select(PDL).where(PDL.usage_point_id == pdl) + ) + pdl_record = pdl_result.scalar_one_or_none() + selected_offer_id = pdl_record.selected_offer_id if pdl_record else None + + if pdl_record and pdl_record.pricing_option: + pricing_option = str(pdl_record.pricing_option).upper() + if pdl_record and pdl_record.subscribed_power is not None: + subscribed_power = int(pdl_record.subscribed_power) + if pdl_record and pdl_record.offpeak_hours: + offpeak_hours = self._normalize_offpeak_hours(pdl_record.offpeak_hours) + + contract_result = await db.execute( + select(ContractData).where(ContractData.usage_point_id == pdl) + ) + contract = contract_result.scalar_one_or_none() + if contract and contract.pricing_option: + pricing_option = str(contract.pricing_option).upper() + if contract and contract.subscribed_power is not None: + subscribed_power = int(contract.subscribed_power) + if contract and contract.offpeak_hours: + normalized = self._normalize_offpeak_hours(contract.offpeak_hours) + if normalized: + offpeak_hours = normalized + + if selected_offer_id: + offer_result = await db.execute( + select(EnergyOffer).where(EnergyOffer.id == selected_offer_id) + ) + offer = offer_result.scalar_one_or_none() + if offer: + if offer.base_price is not None: + prices["base"] = float(offer.base_price) + if offer.hc_price is not None: + prices["hc"] = float(offer.hc_price) + if offer.hp_price is not None: + prices["hp"] = float(offer.hp_price) + if offer.tempo_blue_hc is not None: + prices["blue_hc"] = float(offer.tempo_blue_hc) + if offer.tempo_blue_hp is not None: + prices["blue_hp"] = float(offer.tempo_blue_hp) + if offer.tempo_white_hc is not None: + prices["white_hc"] = float(offer.tempo_white_hc) + if offer.tempo_white_hp is not None: + prices["white_hp"] = float(offer.tempo_white_hp) + if offer.tempo_red_hc is not None: + prices["red_hc"] = float(offer.tempo_red_hc) + if offer.tempo_red_hp is not None: + prices["red_hp"] = float(offer.tempo_red_hp) + + return pricing_option, offpeak_hours, prices, subscribed_power + + async def _get_day_hp_hc_kwh( + self, + db: AsyncSession, + pdl: str, + target_day: date, + offpeak_hours: list[dict[str, str]], + ) -> tuple[float, float, int]: + """Return (HC_kWh, HP_kWh, interval_count) for one day from detailed data.""" + from ...models.client_mode import ConsumptionData, DataGranularity + + result = await db.execute( + select(ConsumptionData.interval_start, ConsumptionData.value, ConsumptionData.raw_data) + .where(ConsumptionData.usage_point_id == pdl) + .where(ConsumptionData.granularity == DataGranularity.DETAILED) + .where(ConsumptionData.date == target_day) + ) + + hc_wh = 0.0 + hp_wh = 0.0 + interval_count = 0 + for interval_start, value, raw_data in result.all(): + interval_count += 1 + value_wh = self._detailed_value_to_wh(int(value or 0), raw_data) + if self._is_offpeak_interval(interval_start, offpeak_hours): + hc_wh += value_wh + else: + hp_wh += value_wh + + return round(hc_wh / 1000, 3), round(hp_wh / 1000, 3), interval_count + + async def _get_day_max_power( + self, + db: AsyncSession, + pdl: str, + target_day: date, + ) -> tuple[float, str]: + """Return max power (kW) and time ISO string for one day from max_power_data.""" + from ...models.client_mode import MaxPowerData + + result = await db.execute( + select(MaxPowerData.interval_start, MaxPowerData.value) + .where(MaxPowerData.usage_point_id == pdl) + .where(MaxPowerData.date == target_day) + .limit(1) + ) + row = result.first() + if not row: + return 0.0, f"{target_day.isoformat()}T00:00:00" + + interval_start, value = row + hhmm = interval_start if interval_start else "00:00" + kw = round(float(value or 0) / 1000.0, 2) + return kw, f"{target_day.isoformat()}T{hhmm}:00" + + @staticmethod + def _compute_day_costs( + pricing_option: str, + prices: dict[str, float], + total_kwh: float, + hc_kwh: float, + hp_kwh: float, + tempo_color: str | None, + ) -> tuple[float, float, float]: + """Return (total_cost, hc_cost, hp_cost) in EUR.""" + option = pricing_option.upper() + color = (tempo_color or "BLUE").upper() + + if "TEMPO" in option: + hc_price = prices.get(f"{color.lower()}_hc", 0.0) + hp_price = prices.get(f"{color.lower()}_hp", 0.0) + hc_cost = hc_kwh * hc_price + hp_cost = hp_kwh * hp_price + return round(hc_cost + hp_cost, 2), round(hc_cost, 2), round(hp_cost, 2) + + if option in ("HC/HP", "HCHP", "EJP", "HC_HP"): + hc_cost = hc_kwh * prices.get("hc", 0.0) + hp_cost = hp_kwh * prices.get("hp", 0.0) + return round(hc_cost + hp_cost, 2), round(hc_cost, 2), round(hp_cost, 2) + + base_cost = total_kwh * prices.get("base", 0.0) + return round(base_cost, 2), 0.0, 0.0 + + @staticmethod + def _choose_preferred_day_total_kwh( + daily_total_kwh: float, + hc_kwh: float, + hp_kwh: float, + detailed_interval_count: int = 0, + ) -> float: + """Choose a robust day total in kWh. + + Behavior: + - Always trust DAILY total for the day when available. + - If DAILY is missing/zero, fallback to HC+HP sum from detailed data + so the card doesn't show 0 while HC/HP/cost have real values. + """ + daily = round(float(daily_total_kwh), 2) + if daily > 0: + return daily + detailed_sum = round(hc_kwh + hp_kwh, 2) + if detailed_sum > 0 and detailed_interval_count >= 10: + return detailed_sum + return 0.0 + + async def _build_linky_card_attributes( + self, + stats: Any, + pdl: str, + requested_date: date, + data_wh: int, + history: dict[str, float], + ) -> dict[str, Any]: + """Build legacy attributes expected by content-card-linky.""" + from ...models.tempo_day import TempoDay + + db: AsyncSession = stats.db + # Derive today from the caller's requested_date (= yesterday) to avoid + # date.today() returning a stale date if the export runs around midnight. + today = requested_date + timedelta(days=1) + yesterday = requested_date + + pricing_option, offpeak_hours, prices, subscribed_power = await self._resolve_linky_pricing_context(db, pdl) + + # Last 31 days in newest -> oldest order (matches legacy card expectations). + days_desc = [today - timedelta(days=i) for i in range(1, 32)] + week_days_desc = days_desc[:7] + + # Tempo colors for table (newest -> oldest). + tempo_start = min(week_days_desc) + tempo_end = max(week_days_desc) + tempo_result = await db.execute( + select(TempoDay.id, TempoDay.color) + .where(cast(TempoDay.id, String) >= tempo_start.isoformat()) + .where(cast(TempoDay.id, String) <= tempo_end.isoformat()) + ) + tempo_by_day = { + row.id: (row.color.value if hasattr(row.color, "value") else str(row.color)) + for row in tempo_result.all() + } + + daily_values: list[float] = [] + dailyweek_dates: list[str] = [] + dailyweek_cost: list[float] = [] + dailyweek_cost_hc: list[float] = [] + dailyweek_cost_hp: list[float] = [] + dailyweek_hc: list[float] = [] + dailyweek_hp: list[float] = [] + dailyweek_tempo: list[str] = [] + dailyweek_mp: list[float] = [] + dailyweek_mp_over: list[str] = [] + dailyweek_mp_time: list[str] = [] + + for target_day in days_desc: + total_kwh = round(float(history.get(target_day.isoformat(), 0.0)), 2) + day_total_kwh = total_kwh + + if target_day in week_days_desc: + day_key = target_day.isoformat() + dailyweek_dates.append(day_key) + + hc_kwh, hp_kwh, detailed_interval_count = await self._get_day_hp_hc_kwh( + db, pdl, target_day, offpeak_hours + ) + dailyweek_hc.append(round(hc_kwh, 2)) + dailyweek_hp.append(round(hp_kwh, 2)) + day_total_kwh = self._choose_preferred_day_total_kwh( + total_kwh, + hc_kwh, + hp_kwh, + detailed_interval_count=detailed_interval_count, + ) + + tempo_color = tempo_by_day.get(day_key) + dailyweek_tempo.append(tempo_color if tempo_color else "-1") + + total_cost, hc_cost, hp_cost = self._compute_day_costs( + pricing_option=pricing_option, + prices=prices, + total_kwh=day_total_kwh, + hc_kwh=hc_kwh, + hp_kwh=hp_kwh, + tempo_color=tempo_color, + ) + dailyweek_cost.append(total_cost) + dailyweek_cost_hc.append(hc_cost) + dailyweek_cost_hp.append(hp_cost) + + max_power_kw, max_power_time = await self._get_day_max_power(db, pdl, target_day) + dailyweek_mp.append(max_power_kw) + is_over = bool(subscribed_power is not None and max_power_kw > float(subscribed_power)) + dailyweek_mp_over.append("true" if is_over else "false") + dailyweek_mp_time.append(max_power_time) + + daily_values.append(day_total_kwh) + + yesterday_hc = 0.0 + yesterday_hp = 0.0 + if week_days_desc: + # week_days_desc[0] is J-1 + yesterday_hc = dailyweek_hc[0] if len(dailyweek_hc) > 0 else 0.0 + yesterday_hp = dailyweek_hp[0] if len(dailyweek_hp) > 0 else 0.0 + + yesterday_total = daily_values[0] if daily_values else round(data_wh / 1000, 2) + day_2_total = daily_values[1] if len(daily_values) > 1 else 0.0 + + current_year = round(await stats.get_year_total(pdl, today.year, "consumption") / 1000, 2) + current_year_last_year = round(await stats.get_year_total(pdl, today.year - 1, "consumption") / 1000, 2) + + prev_month_date = today.replace(day=1) - timedelta(days=1) + last_month = round( + await stats.get_month_total(pdl, prev_month_date.year, prev_month_date.month, "consumption") / 1000, + 2, + ) + last_month_last_year = round( + await stats.get_month_total(pdl, prev_month_date.year - 1, prev_month_date.month, "consumption") / 1000, + 2, + ) + current_month = round(await stats.get_month_total(pdl, today.year, today.month, "consumption") / 1000, 2) + current_month_last_year = round( + await stats.get_month_total(pdl, today.year - 1, today.month, "consumption") / 1000, + 2, + ) + + current_iso_year, current_iso_week, _ = today.isocalendar() + current_week = round(await stats.get_week_total(pdl, current_iso_year, current_iso_week, "consumption") / 1000, 2) + previous_week_date = today - timedelta(days=7) + previous_iso_year, previous_iso_week, _ = previous_week_date.isocalendar() + last_week = round( + await stats.get_week_total(pdl, previous_iso_year, previous_iso_week, "consumption") / 1000, + 2, + ) + + peak_offpeak_percent = 0.0 + if (yesterday_hp + yesterday_hc) > 0: + peak_offpeak_percent = round((yesterday_hp / (yesterday_hp + yesterday_hc)) * 100, 2) + + return { + # Existing metadata + "pdl": pdl, + "date": yesterday.isoformat(), + "value_wh": data_wh, + "data_date": yesterday.isoformat(), + "data_source": "requested_day", + "history": history, + "last_updated": datetime.now().isoformat(), + # Legacy card attributes + "unit_of_measurement": "kWh", + "daily": daily_values, + "dailyweek": dailyweek_dates, + "dailyweek_cost": dailyweek_cost, + "dailyweek_costHC": dailyweek_cost_hc, + "dailyweek_costHP": dailyweek_cost_hp, + "dailyweek_HC": dailyweek_hc, + "dailyweek_HP": dailyweek_hp, + "dailyweek_MP": dailyweek_mp, + "dailyweek_MP_over": dailyweek_mp_over, + "dailyweek_MP_time": dailyweek_mp_time, + "dailyweek_Tempo": dailyweek_tempo, + "yesterday": yesterday_total, + "day_2": day_2_total, + "yesterday_HC": round(yesterday_hc, 2), + "yesterday_HP": round(yesterday_hp, 2), + "daily_cost": dailyweek_cost[0] if dailyweek_cost else 0.0, + "peak_offpeak_percent": peak_offpeak_percent, + "current_year": current_year, + "current_year_last_year": current_year_last_year, + "yearly_evolution": self._safe_evolution_percent(current_year, current_year_last_year), + "last_month": last_month, + "last_month_last_year": last_month_last_year, + "monthly_evolution": self._safe_evolution_percent(last_month, last_month_last_year), + "current_month": current_month, + "current_month_last_year": current_month_last_year, + "current_month_evolution": self._safe_evolution_percent(current_month, current_month_last_year), + "current_week": current_week, + "last_week": last_week, + "current_week_evolution": self._safe_evolution_percent(current_week, last_week), + "yesterday_evolution": self._safe_evolution_percent(yesterday_total, day_2_total), + # Keep optional legacy fields explicit to avoid undefined in card templates. + "errorLastCall": "", + "serviceEnedis": "myElectricalData", + "versionUpdateAvailable": False, + "versionGit": SOFTWARE_VERSION, + } + async def _export_consumption_stats( self, client: aiomqtt.Client, @@ -638,32 +1118,47 @@ async def _export_consumption_stats( count = 0 device = self._get_device_linky(pdl) - # Get yesterday's consumption (most recent complete day) - yesterday_wh = await stats.get_day_total(pdl, yesterday, "consumption") - yesterday_kwh = round(yesterday_wh / 1000, 2) + # Strict behavior: only publish J-1 daily value. + # If J-1 is missing, keep 0. + requested_date = yesterday + data_wh = await stats.get_day_total(pdl, requested_date, "consumption") + data_kwh = round(data_wh / 1000, 2) + + _, offpeak_hours, _, _ = await self._resolve_linky_pricing_context(stats.db, pdl) # Get last N days history for attributes history = {} for i in range(1, 32): # Last 31 days day = today - timedelta(days=i) day_wh = await stats.get_day_total(pdl, day, "consumption") - history[day.isoformat()] = round(day_wh / 1000, 2) + daily_kwh = round(day_wh / 1000, 2) + hc_kwh, hp_kwh, detailed_interval_count = await self._get_day_hp_hc_kwh( + stats.db, pdl, day, offpeak_hours + ) + history[day.isoformat()] = self._choose_preferred_day_total_kwh( + daily_total_kwh=daily_kwh, + hc_kwh=hc_kwh, + hp_kwh=hp_kwh, + detailed_interval_count=detailed_interval_count, + ) + + attributes = await self._build_linky_card_attributes( + stats=stats, + pdl=pdl, + requested_date=requested_date, + data_wh=data_wh, + history=history, + ) # Main consumption sensor with history in attributes await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_consumption/{pdl}", + topic=f"{self.prefix}_consumption/{pdl}", name="consumption", - unique_id=f"myelectricaldata_linky_{pdl}_consumption", + unique_id=f"{self.prefix}_linky_{pdl}_consumption", device=device, - state=yesterday_kwh, - attributes={ - "pdl": pdl, - "date": yesterday.isoformat(), - "value_wh": yesterday_wh, - "history": history, - "last_updated": datetime.now().isoformat(), - }, + state=attributes.get("yesterday", data_kwh), + attributes=attributes, unit="kWh", device_class="energy", state_class="total", @@ -676,14 +1171,13 @@ async def _export_consumption_stats( total_kwh = 0.0 for i in range(1, days_count + 1): day = today - timedelta(days=i) - day_wh = await stats.get_day_total(pdl, day, "consumption") - total_kwh += day_wh / 1000 + total_kwh += float(history.get(day.isoformat(), 0.0)) await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_consumption_last_{days_count}_day/{pdl}", + topic=f"{self.prefix}_consumption_last_{days_count}_day/{pdl}", name=f"consumption last{days_count}day", - unique_id=f"myelectricaldata_linky_{pdl}_consumption_last{days_count}day", + unique_id=f"{self.prefix}_linky_{pdl}_consumption_last{days_count}day", device=device, state=round(total_kwh, 2), attributes={ @@ -732,9 +1226,11 @@ async def _export_production_stats( logger.debug(f"[HA-MQTT] PDL {pdl} has no production, skipping") return 0 - # Get yesterday's production (most recent complete day) - yesterday_wh = await stats.get_day_total(pdl, yesterday, "production") - yesterday_kwh = round(yesterday_wh / 1000, 2) + # Strict behavior: only publish J-1 daily value. + # If J-1 is missing, keep 0. + requested_date = yesterday + data_wh = await stats.get_day_total(pdl, requested_date, "production") + data_kwh = round(data_wh / 1000, 2) # Get last N days history for attributes history = {} @@ -746,15 +1242,17 @@ async def _export_production_stats( # Main production sensor with history in attributes await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_production/{pdl}", + topic=f"{self.prefix}_production/{pdl}", name="production", - unique_id=f"myelectricaldata_linky_{pdl}_production", + unique_id=f"{self.prefix}_linky_{pdl}_production", device=device, - state=yesterday_kwh, + state=data_kwh, attributes={ "pdl": pdl, - "date": yesterday.isoformat(), - "value_wh": yesterday_wh, + "date": requested_date.isoformat(), + "value_wh": data_wh, + "data_date": requested_date.isoformat(), + "data_source": "requested_day", "history": history, "last_updated": datetime.now().isoformat(), }, @@ -775,9 +1273,9 @@ async def _export_production_stats( await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_production_last_{days_count}_day/{pdl}", + topic=f"{self.prefix}_production_last_{days_count}_day/{pdl}", name=f"production last{days_count}day", - unique_id=f"myelectricaldata_linky_{pdl}_production_last{days_count}day", + unique_id=f"{self.prefix}_linky_{pdl}_production_last{days_count}day", device=device, state=round(total_kwh, 2), attributes={ @@ -800,7 +1298,73 @@ async def _export_production_stats( # TEMPO EXPORT (Old MyElectricalData format) # ========================================================================= - async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: + async def _get_tempo_prices_for_export( + self, + db: AsyncSession, + usage_point_ids: list[str], + ) -> tuple[dict[str, float], dict[str, Any]]: + """Resolve Tempo prices for HA MQTT export. + + Priority: + 1. Selected TEMPO offer on one of the exported PDLs. + 2. Built-in fallback constants (TEMPO_PRICES). + """ + from ...models.energy_provider import EnergyOffer + from ...models.pdl import PDL + + if not usage_point_ids: + return dict(TEMPO_PRICES), {"source": "fallback_defaults", "reason": "no_usage_point_ids"} + + pdl_result = await db.execute( + select(PDL).where(PDL.usage_point_id.in_(usage_point_ids)) + ) + pdls = pdl_result.scalars().all() + + for pdl in pdls: + if not pdl.selected_offer_id: + continue + + offer_result = await db.execute( + select(EnergyOffer).where(EnergyOffer.id == pdl.selected_offer_id) + ) + offer = offer_result.scalar_one_or_none() + + if not offer or offer.offer_type != "TEMPO": + continue + + price_map: dict[str, float] = {} + if offer.tempo_blue_hc is not None: + price_map["blue_hc"] = float(offer.tempo_blue_hc) + if offer.tempo_blue_hp is not None: + price_map["blue_hp"] = float(offer.tempo_blue_hp) + if offer.tempo_white_hc is not None: + price_map["white_hc"] = float(offer.tempo_white_hc) + if offer.tempo_white_hp is not None: + price_map["white_hp"] = float(offer.tempo_white_hp) + if offer.tempo_red_hc is not None: + price_map["red_hc"] = float(offer.tempo_red_hc) + if offer.tempo_red_hp is not None: + price_map["red_hp"] = float(offer.tempo_red_hp) + + if len(price_map) == len(TEMPO_PRICES): + return price_map, { + "source": "selected_offer", + "usage_point_id": pdl.usage_point_id, + "offer_id": offer.id, + "offer_name": offer.name, + "price_updated_at": offer.price_updated_at.isoformat() if offer.price_updated_at else None, + } + + logger.warning( + "[HA-MQTT] Tempo offer '%s' for PDL %s is incomplete (%d/6 prices), using fallback constants", + offer.name, + pdl.usage_point_id, + len(price_map), + ) + + return dict(TEMPO_PRICES), {"source": "fallback_defaults", "reason": "no_complete_tempo_offer"} + + async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession, usage_point_ids: list[str]) -> int: """Export Tempo information via MQTT Discovery (old MyElectricalData format) Creates entities under two devices: @@ -844,13 +1408,15 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: today_tempo = result.scalar_one_or_none() today_color = today_tempo.color.value if today_tempo else "UNKNOWN" + today_state = self._get_tempo_state(today_color) + await self._publish_sensor_old_format( client, - topic="myelectricaldata_rte/tempo_today", + topic=f"{self.prefix}_rte/tempo_today", name="Today", - unique_id="myelectricaldata_tempo_today", + unique_id=f"{self.prefix}_tempo_today", device=device_rte, - state=today_color, + state=today_state, attributes={ "date": today_str, "color_fr": self._get_tempo_color_fr(today_color), @@ -866,13 +1432,15 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: tomorrow_tempo = result.scalar_one_or_none() tomorrow_color = tomorrow_tempo.color.value if tomorrow_tempo else "UNKNOWN" + tomorrow_state = self._get_tempo_state(tomorrow_color) + await self._publish_sensor_old_format( client, - topic="myelectricaldata_rte/tempo_tomorrow", + topic=f"{self.prefix}_rte/tempo_tomorrow", name="Tomorrow", - unique_id="myelectricaldata_tempo_tomorrow", + unique_id=f"{self.prefix}_tempo_tomorrow", device=device_rte, - state=tomorrow_color, + state=tomorrow_state, attributes={ "date": tomorrow_str, "color_fr": self._get_tempo_color_fr(tomorrow_color), @@ -896,6 +1464,16 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: season_start_str = season_start.isoformat() season_end_str = season_end.isoformat() + # Known colors for today/tomorrow should be deducted from remaining counters + # (legacy behavior expected by existing dashboards/cards). + # We rely on the two already-resolved states above to avoid enum-cast + # inconsistencies across DB backends. + known_future_counts: dict[str, int] = {c.value: 0 for c in TempoColor} + if today_color in known_future_counts: + known_future_counts[today_color] += 1 + if tomorrow_color in known_future_counts: + known_future_counts[tomorrow_color] += 1 + # Days count per color (consumed + remaining) days_data: dict[str, dict[str, int]] = {} @@ -911,34 +1489,32 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: ) used = result.scalar() or 0 - # Count remaining days (including today until season end) - result = await db.execute( - select(func.count(TempoDay.id)) - .where(TempoDay.id >= today_str) - .where(TempoDay.id <= season_end_str) - .where(cast(TempoDay.color, String) == color.value) - ) - remaining = result.scalar() or 0 - quota = TEMPO_QUOTAS.get(color.value, 0) + # Tempo "jours restants" is quota-based, not "future known rows in DB". + # DB only contains known colors (historical + limited forecast), which can + # wrongly produce tiny values like 1/0/1 if used directly. + reserved_known = known_future_counts.get(color.value, 0) + remaining = max(int(quota) - int(used) - int(reserved_known), 0) days_data[color_name] = { "used": used, "remaining": remaining, + "reserved_known_days": reserved_known, "quota": quota, } # Publish days_{color} sensor await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_edf/tempo_days_{color_name}", + topic=f"{self.prefix}_edf/tempo_days_{color_name}", name=f"Days {color.value.capitalize()}", - unique_id=f"myelectricaldata_tempo_days_{color_name}", + unique_id=f"{self.prefix}_tempo_days_{color_name}", device=device_edf, - state=used, + state=remaining, attributes={ "used": used, "remaining": remaining, + "reserved_known_days": reserved_known, "quota": quota, "season_start": season_start_str, "season_end": season_end_str, @@ -954,9 +1530,9 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: await self._publish_sensor_old_format( client, - topic="myelectricaldata_edf/tempo_info", + topic=f"{self.prefix}_edf/tempo_info", name="Tempo Info", - unique_id="myelectricaldata_tempo_info", + unique_id=f"{self.prefix}_tempo_info", device=device_edf, state=today_color, attributes={ @@ -964,7 +1540,12 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: "tomorrow": tomorrow_color, "season_start": season_start_str, "season_end": season_end_str, - **{f"days_{k}": v for k, v in days_data.items()}, + "days_blue": f"{days_data['blue']['remaining']} / {days_data['blue']['quota']}", + "days_white": f"{days_data['white']['remaining']} / {days_data['white']['quota']}", + "days_red": f"{days_data['red']['remaining']} / {days_data['red']['quota']}", + "days_blue_detail": days_data["blue"], + "days_white_detail": days_data["white"], + "days_red_detail": days_data["red"], }, icon="mdi:information", ) @@ -974,19 +1555,25 @@ async def _export_tempo(self, client: aiomqtt.Client, db: AsyncSession) -> int: # EDF TEMPO: Price sensors # ===================================================================== - for price_key, price_value in TEMPO_PRICES.items(): + tempo_prices, tempo_price_meta = await self._get_tempo_prices_for_export(db, usage_point_ids) + + for price_key, price_value in tempo_prices.items(): price_name = TEMPO_PRICE_NAMES.get(price_key, price_key) await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_edf/tempo_price_{price_key}", + topic=f"{self.prefix}_edf/tempo_price_{price_key}", name=f"Price {price_name}", - unique_id=f"myelectricaldata_tempo_price_{price_key}", + unique_id=f"{self.prefix}_tempo_price_{price_key}", device=device_edf, state=price_value, attributes={ "price_type": price_key, "name": price_name, + "source": tempo_price_meta.get("source"), + "offer_name": tempo_price_meta.get("offer_name"), + "usage_point_id": tempo_price_meta.get("usage_point_id"), + "price_updated_at": tempo_price_meta.get("price_updated_at"), }, unit="EUR/kWh", icon="mdi:currency-eur", @@ -1006,6 +1593,12 @@ def _get_tempo_color_fr(self, color: str) -> str: } return names.get(color, "Inconnu") + def _get_tempo_state(self, color: str) -> str: + """Normalize Tempo state value for MQTT (legacy compatibility).""" + if (color or "").upper() == "UNKNOWN": + return "Inconnu" + return color + def _get_tempo_icon(self, color: str) -> str: """Get MDI icon for Tempo color""" icons = { @@ -1080,9 +1673,9 @@ async def _export_ecowatt(self, client: aiomqtt.Client, db: AsyncSession) -> int # Use day_value as state (1=Normal, 2=Tendu, 3=Critique) await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_rte/ecowatt_{day_name}", + topic=f"{self.prefix}_rte/ecowatt_{day_name}", name=day_name, - unique_id=f"myelectricaldata_ecowatt_{day_name}", + unique_id=f"{self.prefix}_ecowatt_{day_name}", device=device, state=day_value, attributes=attributes, @@ -1093,9 +1686,9 @@ async def _export_ecowatt(self, client: aiomqtt.Client, db: AsyncSession) -> int # No data available for this day await self._publish_sensor_old_format( client, - topic=f"myelectricaldata_rte/ecowatt_{day_name}", + topic=f"{self.prefix}_rte/ecowatt_{day_name}", name=day_name, - unique_id=f"myelectricaldata_ecowatt_{day_name}", + unique_id=f"{self.prefix}_ecowatt_{day_name}", device=device, state="unknown", attributes={ @@ -1160,6 +1753,17 @@ async def read_metrics(self, usage_point_ids: list[str] | None = None) -> dict[s # - homeassistant/sensor/myelectricaldata_consumption/{pdl}/state # etc. topics_to_read = [ + f"{self.discovery_prefix}/sensor/{self.prefix}_rte/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_edf/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_consumption/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_consumption_last_7_day/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_consumption_last_14_day/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_consumption_last_30_day/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_production/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_production_last_7_day/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_production_last_14_day/#", + f"{self.discovery_prefix}/sensor/{self.prefix}_production_last_30_day/#", + # Legacy compatibility f"{self.discovery_prefix}/sensor/myelectricaldata_rte/#", f"{self.discovery_prefix}/sensor/myelectricaldata_edf/#", f"{self.discovery_prefix}/sensor/myelectricaldata_consumption/#", @@ -1359,23 +1963,27 @@ def _categorize_ha_topic(self, topic: str) -> str: - {discovery_prefix}/sensor/myelectricaldata_production/{pdl}/state """ topic_lower = topic.lower() + pref = self.prefix.lower() + + def _match(fragment: str) -> bool: + return f"{pref}{fragment}" in topic_lower or f"myelectricaldata{fragment}" in topic_lower # RTE Tempo sensors - if "myelectricaldata_rte/tempo_today" in topic_lower: + if _match("_rte/tempo_today"): return "Tempo Aujourd'hui" - elif "myelectricaldata_rte/tempo_tomorrow" in topic_lower: + elif _match("_rte/tempo_tomorrow"): return "Tempo Demain" # RTE EcoWatt sensors - elif "myelectricaldata_rte/ecowatt_j0" in topic_lower: + elif _match("_rte/ecowatt_j0"): return "EcoWatt Aujourd'hui" - elif "myelectricaldata_rte/ecowatt_j1" in topic_lower: + elif _match("_rte/ecowatt_j1"): return "EcoWatt Demain" - elif "myelectricaldata_rte/ecowatt_j2" in topic_lower: + elif _match("_rte/ecowatt_j2"): return "EcoWatt J+2" # EDF Tempo sensors - elif "myelectricaldata_edf/tempo_days_" in topic_lower: + elif _match("_edf/tempo_days_"): if "blue" in topic_lower: return "Tempo Jours Bleus" elif "white" in topic_lower: @@ -1383,13 +1991,13 @@ def _categorize_ha_topic(self, topic: str) -> str: elif "red" in topic_lower: return "Tempo Jours Rouges" return "Tempo Jours" - elif "myelectricaldata_edf/tempo_price_" in topic_lower: + elif _match("_edf/tempo_price_"): return "Tempo Prix" - elif "myelectricaldata_edf/tempo_info" in topic_lower: + elif _match("_edf/tempo_info"): return "Tempo Info" # Consumption sensors - elif "myelectricaldata_consumption_last_" in topic_lower: + elif _match("_consumption_last_"): if "7" in topic_lower: return "Conso 7 derniers jours" elif "14" in topic_lower: @@ -1397,11 +2005,11 @@ def _categorize_ha_topic(self, topic: str) -> str: elif "30" in topic_lower: return "Conso 30 derniers jours" return "Conso Période" - elif "myelectricaldata_consumption/" in topic_lower: + elif _match("_consumption/"): return "Conso Journalière" # Production sensors - elif "myelectricaldata_production_last_" in topic_lower: + elif _match("_production_last_"): if "7" in topic_lower: return "Prod 7 derniers jours" elif "14" in topic_lower: @@ -1409,7 +2017,7 @@ def _categorize_ha_topic(self, topic: str) -> str: elif "30" in topic_lower: return "Prod 30 derniers jours" return "Prod Période" - elif "myelectricaldata_production/" in topic_lower: + elif _match("_production/"): return "Prod Journalière" # Legacy format fallback diff --git a/apps/api/src/services/exporters/mqtt.py b/apps/api/src/services/exporters/mqtt.py index 16e751d..827710a 100644 --- a/apps/api/src/services/exporters/mqtt.py +++ b/apps/api/src/services/exporters/mqtt.py @@ -387,18 +387,20 @@ async def _get_production_stats(self, db: AsyncSession, pdl: str) -> dict[str, A async def _get_tempo_data(self, db: AsyncSession) -> dict[str, Any] | None: """Get Tempo data""" - from ...models.tempo import TempoDay + from ...models.tempo_day import TempoDay today = date.today() tomorrow = today + timedelta(days=1) + today_str = today.isoformat() + tomorrow_str = tomorrow.isoformat() # Get today's color - stmt = select(TempoDay).where(TempoDay.date == today) + stmt = select(TempoDay).where(TempoDay.id == today_str) result = await db.execute(stmt) today_row = result.scalar_one_or_none() # Get tomorrow's color - stmt = select(TempoDay).where(TempoDay.date == tomorrow) + stmt = select(TempoDay).where(TempoDay.id == tomorrow_str) result = await db.execute(stmt) tomorrow_row = result.scalar_one_or_none() @@ -410,28 +412,33 @@ async def _get_tempo_data(self, db: AsyncSession) -> dict[str, Any] | None: month = today.month season_start = date(today.year if month >= 9 else today.year - 1, 9, 1) season_end = date(today.year + 1 if month >= 9 else today.year, 8, 31) + season_start_str = season_start.isoformat() + season_end_str = season_end.isoformat() # Count used days by color stmt = select(TempoDay.color, func.count(TempoDay.id)).where( - TempoDay.date >= season_start, - TempoDay.date <= today, + TempoDay.id >= season_start_str, + TempoDay.id <= today_str, ).group_by(TempoDay.color) result = await db.execute(stmt) - used = {row[0]: row[1] for row in result.all()} + used = { + (row[0].value if hasattr(row[0], "value") else str(row[0])): row[1] + for row in result.all() + } remaining = { - "blue": TEMPO_QUOTAS["BLUE"] - used.get("BLUE", 0), - "white": TEMPO_QUOTAS["WHITE"] - used.get("WHITE", 0), - "red": TEMPO_QUOTAS["RED"] - used.get("RED", 0), + "blue": max(0, TEMPO_QUOTAS["BLUE"] - used.get("BLUE", 0)), + "white": max(0, TEMPO_QUOTAS["WHITE"] - used.get("WHITE", 0)), + "red": max(0, TEMPO_QUOTAS["RED"] - used.get("RED", 0)), } return { "today": { - "color": today_row.color if today_row else "UNKNOWN", + "color": (today_row.color.value if (today_row and hasattr(today_row.color, "value")) else "UNKNOWN"), "date": today.isoformat(), }, "tomorrow": { - "color": tomorrow_row.color if tomorrow_row else "UNKNOWN", + "color": (tomorrow_row.color.value if (tomorrow_row and hasattr(tomorrow_row.color, "value")) else "UNKNOWN"), "date": tomorrow.isoformat(), }, "remaining": remaining, @@ -440,24 +447,27 @@ async def _get_tempo_data(self, db: AsyncSession) -> dict[str, Any] | None: async def _get_ecowatt_data(self, db: AsyncSession) -> dict[str, Any] | None: """Get EcoWatt data""" - from ...models.ecowatt import EcoWattSignal + from ...models.ecowatt import EcoWatt today = date.today() now = datetime.now() # Get today's signal - stmt = select(EcoWattSignal).where( - cast(EcoWattSignal.timestamp, String).like(f"{today.isoformat()}%") - ).order_by(EcoWattSignal.timestamp.desc()).limit(1) + stmt = ( + select(EcoWatt) + .where(func.date(EcoWatt.periode) == today) + .order_by(EcoWatt.generation_datetime.desc()) + .limit(1) + ) result = await db.execute(stmt) - signal = result.scalar_one_or_none() + ecowatt = result.scalar_one_or_none() - if not signal: + if not ecowatt: return None # Parse hourly values if available - hourly_values = signal.hourly_values or [] - current_level = signal.level + hourly_values = ecowatt.values or [] + current_level = ecowatt.dvalue current_hour = now.hour # Get current hour level if available @@ -474,7 +484,7 @@ async def _get_ecowatt_data(self, db: AsyncSession) -> dict[str, Any] | None: "level": current_level, "level_label": {1: "Vert", 2: "Orange", 3: "Rouge"}.get(current_level, "Inconnu"), "next_hour_level": next_hour_level, - "message": signal.message, + "message": ecowatt.message, "timestamp": datetime.now().isoformat(), } diff --git a/apps/api/src/services/local_data.py b/apps/api/src/services/local_data.py index dd32521..ba3fa05 100644 --- a/apps/api/src/services/local_data.py +++ b/apps/api/src/services/local_data.py @@ -17,11 +17,13 @@ from typing import Any from sqlalchemy import select, and_, func +from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncSession from ..models.client_mode import ( ConsumptionData, ProductionData, + MaxPowerData, ContractData, AddressData, DataGranularity, @@ -37,6 +39,11 @@ class LocalDataService: def __init__(self, db: AsyncSession): self.db = db + @staticmethod + def _is_placeholder_raw(raw_data: Any) -> bool: + """Return True when a raw_data payload is marked as placeholder.""" + return isinstance(raw_data, dict) and bool(raw_data.get("is_placeholder")) + async def get_consumption_daily( self, usage_point_id: str, @@ -103,6 +110,55 @@ async def get_production_detail( granularity=DataGranularity.DETAILED, ) + async def get_consumption_max_power( + self, + usage_point_id: str, + start_date: date, + end_date: date, + ) -> tuple[list[dict[str, Any]], list[tuple[date, date]]]: + """Get daily max power from local database. + + Date range is inclusive on both bounds. + """ + result = await self.db.execute( + select(MaxPowerData).where( + and_( + MaxPowerData.usage_point_id == usage_point_id, + MaxPowerData.date >= start_date, + MaxPowerData.date <= end_date, + ) + ).order_by(MaxPowerData.date, MaxPowerData.interval_start) + ) + records = result.scalars().all() + + formatted = [ + { + "date": f"{rec.date.isoformat()} {(rec.interval_start or '00:00')}:00", + "value": rec.value, + } + for rec in records + ] + + existing_dates = {rec.date for rec in records} + missing_ranges = self._find_missing_ranges_inclusive( + start_date=start_date, + end_date=end_date, + existing_dates=existing_dates, + ) + + if formatted: + logger.info( + f"[{usage_point_id}] Found {len(formatted)} local max_power records " + f"from {start_date} to {end_date}" + ) + if missing_ranges: + logger.info( + f"[{usage_point_id}] Missing {len(missing_ranges)} date ranges for max_power: " + f"{missing_ranges}" + ) + + return formatted, missing_ranges + async def get_contract(self, usage_point_id: str) -> dict[str, Any] | None: """Get contract data from local database.""" result = await self.db.execute( @@ -238,6 +294,73 @@ async def save_address(self, usage_point_id: str, data: dict[str, Any]) -> None: await self.db.commit() + async def save_consumption_max_power( + self, + usage_point_id: str, + data: dict[str, Any], + ) -> int: + """Save max power readings to local database. + + Returns: + Number of day records upserted. + """ + meter_reading: dict[str, Any] = {} + if isinstance(data.get("data"), dict): + meter_reading = data["data"].get("meter_reading", {}) + if not meter_reading: + meter_reading = data.get("meter_reading", {}) + + interval_reading = meter_reading.get("interval_reading", []) + if not isinstance(interval_reading, list) or not interval_reading: + return 0 + + # Keep max value per day (tie-breaker: latest interval_start). + by_day: dict[str, dict[str, Any]] = {} + for reading in interval_reading: + date_str = str(reading.get("date", "")) + day_str, hhmm = self._split_power_datetime(date_str) + if not day_str: + continue + try: + day_date = date.fromisoformat(day_str) + value = int(float(reading.get("value", 0))) + except (TypeError, ValueError): + continue + + current = by_day.get(day_str) + if ( + current is None + or value > int(current["value"]) + or (value == int(current["value"]) and hhmm > str(current["interval_start"] or "00:00")) + ): + by_day[day_str] = { + "usage_point_id": usage_point_id, + "date": day_date, + "interval_start": hhmm, + "value": value, + "source": "myelectricaldata", + "raw_data": reading, + } + + records = [by_day[k] for k in sorted(by_day.keys())] + if not records: + return 0 + + stmt = pg_insert(MaxPowerData).values(records) + stmt = stmt.on_conflict_do_update( + constraint="uq_max_power_data", + set_={ + "interval_start": stmt.excluded.interval_start, + "value": stmt.excluded.value, + "raw_data": stmt.excluded.raw_data, + "updated_at": datetime.now(), + }, + ) + await self.db.execute(stmt) + await self.db.commit() + + return len(records) + async def _get_energy_data( self, model: type[ConsumptionData | ProductionData], @@ -260,7 +383,7 @@ async def _get_energy_data( model.usage_point_id == usage_point_id, model.granularity == granularity, model.date >= start_date, - model.date < end_date, # end_date is exclusive + model.date <= end_date, # end_date is inclusive (already capped to yesterday by adjust_date_range) ) ).order_by(model.date, model.interval_start) ) @@ -268,9 +391,24 @@ async def _get_energy_data( # Format records for API response if granularity == DataGranularity.DAILY: + # Defensive dedup: keep one value per day (highest wins) to avoid + # duplicated DAILY rows skewing downstream charts/exports. + by_date: dict[date, Any] = {} + for rec in records: + current = by_date.get(rec.date) + if current is None or int(rec.value or 0) >= int(current.value or 0): + by_date[rec.date] = rec + + deduped_records = [by_date[d] for d in sorted(by_date.keys())] + if len(deduped_records) < len(records): + logger.warning( + f"[{usage_point_id}] Deduplicated {len(records) - len(deduped_records)} " + f"{model.__tablename__} DAILY rows" + ) + formatted = [ {"date": rec.date.isoformat(), "value": rec.value} - for rec in records + for rec in deduped_records ] else: # DETAILED formatted = [ @@ -319,28 +457,51 @@ async def _find_missing_ranges( Returns list of (start, end) tuples representing missing ranges. """ - # Get distinct dates that have data - result = await self.db.execute( - select(func.distinct(model.date)).where( - and_( - model.usage_point_id == usage_point_id, - model.granularity == granularity, - model.date >= start_date, - model.date < end_date, + placeholder_only_dates: set[date] = set() + if granularity == DataGranularity.DAILY: + result = await self.db.execute( + select(model.date, model.raw_data).where( + and_( + model.usage_point_id == usage_point_id, + model.granularity == granularity, + model.date >= start_date, + model.date <= end_date, + model.interval_start.is_(None), + ) ) ) - ) - existing_dates = {row[0] for row in result.fetchall()} + real_dates: set[date] = set() + placeholder_dates: set[date] = set() + for day_value, raw_data in result.all(): + if self._is_placeholder_raw(raw_data): + placeholder_dates.add(day_value) + else: + real_dates.add(day_value) + existing_dates = real_dates + placeholder_only_dates = placeholder_dates - real_dates + else: + # Get distinct dates that have data + result = await self.db.execute( + select(func.distinct(model.date)).where( + and_( + model.usage_point_id == usage_point_id, + model.granularity == granularity, + model.date >= start_date, + model.date <= end_date, + ) + ) + ) + existing_dates = {row[0] for row in result.fetchall()} # Generate all dates in range all_dates = set() current = start_date - while current < end_date: + while current <= end_date: all_dates.add(current) current += timedelta(days=1) # Find missing dates - missing_dates = sorted(all_dates - existing_dates) + missing_dates = sorted((all_dates - existing_dates) | placeholder_only_dates) if not missing_dates: return [] @@ -366,6 +527,54 @@ async def _find_missing_ranges( return ranges + @staticmethod + def _find_missing_ranges_inclusive( + start_date: date, + end_date: date, + existing_dates: set[date], + ) -> list[tuple[date, date]]: + """Find missing inclusive ranges in [start_date, end_date].""" + if end_date < start_date: + return [] + + missing_dates: list[date] = [] + current = start_date + while current <= end_date: + if current not in existing_dates: + missing_dates.append(current) + current += timedelta(days=1) + + if not missing_dates: + return [] + + ranges: list[tuple[date, date]] = [] + range_start = missing_dates[0] + range_end = missing_dates[0] + for d in missing_dates[1:]: + if d == range_end + timedelta(days=1): + range_end = d + else: + ranges.append((range_start, range_end)) + range_start = d + range_end = d + ranges.append((range_start, range_end)) + + return ranges + + @staticmethod + def _split_power_datetime(date_str: str) -> tuple[str, str]: + """Split power datetime string to (YYYY-MM-DD, HH:MM).""" + if not date_str: + return "", "00:00" + if "T" in date_str: + day_part, time_part = date_str.split("T", 1) + elif " " in date_str: + day_part, time_part = date_str.split(" ", 1) + else: + return date_str[:10], "00:00" + hhmm = time_part[:5] if len(time_part) >= 5 else "00:00" + return day_part[:10], hhmm + async def get_sync_status( self, usage_point_id: str, diff --git a/apps/api/src/services/statistics.py b/apps/api/src/services/statistics.py index 0f107c6..e614055 100644 --- a/apps/api/src/services/statistics.py +++ b/apps/api/src/services/statistics.py @@ -138,6 +138,9 @@ async def get_day_total( ) -> int: """Get total Wh for a specific day + Tries DAILY granularity first, then falls back to summing DETAILED + (30-min) records if no DAILY data exists for that day. + Args: usage_point_id: PDL number target_date: The date to query @@ -148,13 +151,28 @@ async def get_day_total( """ model = self._get_model(direction) + # Try DAILY first result = await self.db.execute( select(func.coalesce(func.sum(model.value), 0)) .where(model.usage_point_id == usage_point_id) .where(model.granularity == DataGranularity.DAILY) .where(model.date == target_date) ) - return int(result.scalar() or 0) + daily_total = int(result.scalar() or 0) + + if daily_total > 0: + return daily_total + + # Fallback: aggregate DETAILED records (values in W, PT30M → Wh = W / 2) + result = await self.db.execute( + select(func.coalesce(func.sum(model.value), 0)) + .where(model.usage_point_id == usage_point_id) + .where(model.granularity == DataGranularity.DETAILED) + .where(model.date == target_date) + ) + detailed_sum = int(result.scalar() or 0) + # DETAILED values are in W for 30-min intervals → Wh = W / 2 + return detailed_sum // 2 if detailed_sum > 0 else 0 async def get_current_year_by_month( self, usage_point_id: str, direction: str = "consumption" diff --git a/apps/api/src/services/sync.py b/apps/api/src/services/sync.py index 728432e..653d3ba 100644 --- a/apps/api/src/services/sync.py +++ b/apps/api/src/services/sync.py @@ -13,7 +13,7 @@ from datetime import UTC, date, datetime, timedelta from typing import Any -from sqlalchemy import select, and_, func +from sqlalchemy import select, and_, delete, func, text from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncSession @@ -26,6 +26,7 @@ ConsumptionData, ContractData, DataGranularity, + MaxPowerData, ProductionData, SyncStatus, SyncStatusType, @@ -48,6 +49,76 @@ def __init__(self, db: AsyncSession) -> None: self.db = db self.adapter = get_med_adapter() + @staticmethod + def _is_placeholder_raw(raw_data: Any) -> bool: + """Return True when a raw_data payload is marked as placeholder.""" + return isinstance(raw_data, dict) and bool(raw_data.get("is_placeholder")) + + async def _deduplicate_daily_rows( + self, + model_class: type[ConsumptionData | ProductionData], + usage_point_id: str, + start_date: date, + end_date: date, + ) -> int: + """Delete duplicate DAILY rows and keep the best row for each date. + + Keep order: + 1. Real data over placeholder rows + 2. Highest value + 3. Most recently updated row + """ + table_name = model_class.__tablename__ + stmt = text(f""" + WITH ranked AS ( + SELECT + ctid, + row_number() OVER ( + PARTITION BY usage_point_id, date, granularity + ORDER BY + CASE + WHEN COALESCE((raw_data->>'is_placeholder')::boolean, false) THEN 1 + ELSE 0 + END ASC, + value DESC, + updated_at DESC NULLS LAST, + created_at DESC NULLS LAST, + ctid DESC + ) AS rn + FROM {table_name} + WHERE usage_point_id = :usage_point_id + AND granularity = :granularity + AND interval_start IS NULL + AND date >= :start_date + AND date < :end_date + ) + DELETE FROM {table_name} t + USING ranked r + WHERE t.ctid = r.ctid + AND r.rn > 1 + """) + result = await self.db.execute( + stmt, + { + "usage_point_id": usage_point_id, + "granularity": DataGranularity.DAILY.value, + "start_date": start_date, + "end_date": end_date, + }, + ) + await self.db.commit() + removed = int(result.rowcount or 0) if (result.rowcount or 0) > 0 else 0 + if removed > 0: + logger.warning( + "[SYNC] %s: removed %d duplicate daily rows for %s in [%s, %s[", + table_name, + removed, + usage_point_id, + start_date, + end_date, + ) + return removed + async def sync_pdl_list(self, user_id: str) -> list[dict[str, Any]]: """Sync PDL list from remote API to local database @@ -193,6 +264,7 @@ async def sync_pdl(self, usage_point_id: str) -> dict[str, Any]: "usage_point_id": usage_point_id, "consumption_daily": "skipped (inactive PDL)", "consumption_detail": "skipped (inactive PDL)", + "max_power": "skipped (inactive PDL)", "production_daily": "skipped (inactive PDL)", "production_detail": "skipped (inactive PDL)", "contract": "skipped (inactive PDL)", @@ -203,23 +275,31 @@ async def sync_pdl(self, usage_point_id: str) -> dict[str, Any]: "usage_point_id": usage_point_id, "consumption_daily": None, "consumption_detail": None, + "max_power": None, "production_daily": None, "production_detail": None, "contract": None, "address": None, } - # Sync contract and address first (they're small) + # Sync contract and address first (they're small), but not on every cycle. + # These values change infrequently and refreshing each 30/60 min burns API quota. try: - await self._sync_contract(usage_point_id) - result["contract"] = "success" + if await self._should_refresh_metadata(ContractData, usage_point_id, min_interval_hours=24): + await self._sync_contract(usage_point_id) + result["contract"] = "success" + else: + result["contract"] = "skipped (recent)" except Exception as e: logger.warning(f"[SYNC] Failed to sync contract for {usage_point_id}: {e}") result["contract"] = f"error: {e}" try: - await self._sync_address(usage_point_id) - result["address"] = "success" + if await self._should_refresh_metadata(AddressData, usage_point_id, min_interval_hours=24): + await self._sync_address(usage_point_id) + result["address"] = "success" + else: + result["address"] = "skipped (recent)" except Exception as e: logger.warning(f"[SYNC] Failed to sync address for {usage_point_id}: {e}") result["address"] = f"error: {e}" @@ -239,6 +319,14 @@ async def sync_pdl(self, usage_point_id: str) -> dict[str, Any]: logger.warning(f"[SYNC] Failed to sync consumption detail for {usage_point_id}: {e}") result["consumption_detail"] = f"error: {e}" + # Sync daily max power (value + hour). + try: + max_power_count = await self._sync_consumption_max_power(usage_point_id) + result["max_power"] = f"synced {max_power_count} days" + except Exception as e: + logger.warning(f"[SYNC] Failed to sync max power for {usage_point_id}: {e}") + result["max_power"] = f"error: {e}" + # Sync production data only if PDL has production pdl_result = await self.db.execute( select(PDL).where(PDL.usage_point_id == usage_point_id) @@ -266,6 +354,26 @@ async def sync_pdl(self, usage_point_id: str) -> dict[str, Any]: return result + @staticmethod + def _normalize_utc(ts: datetime | None) -> datetime | None: + if ts is None: + return None + if ts.tzinfo is None: + return ts.replace(tzinfo=UTC) + return ts.astimezone(UTC) + + async def _should_refresh_metadata( + self, model_class: Any, usage_point_id: str, min_interval_hours: int + ) -> bool: + """Return True if metadata should be refreshed from the upstream API.""" + result = await self.db.execute( + select(model_class.last_sync_at).where(model_class.usage_point_id == usage_point_id) + ) + last_sync_at = self._normalize_utc(result.scalar_one_or_none()) + if last_sync_at is None: + return True + return (datetime.now(UTC) - last_sync_at) >= timedelta(hours=min_interval_hours) + async def _sync_contract(self, usage_point_id: str) -> None: """Sync contract data for a PDL""" response = await self.adapter.get_contract(usage_point_id) @@ -372,6 +480,132 @@ async def _sync_consumption_detail(self, usage_point_id: str) -> int: model_class=ConsumptionData, ) + async def _sync_consumption_max_power(self, usage_point_id: str) -> int: + """Sync daily max power data (value + hour). + + This endpoint is distinct from detailed consumption and may expose + different values/timestamps than local reconstruction from intervals. + """ + sync_status = await self._get_or_create_sync_status( + usage_point_id=usage_point_id, + data_type="max_power", + granularity=DataGranularity.DAILY, + ) + + # Enedis requires end_date < today (today's data isn't available yet) + end_date = date.today() - timedelta(days=1) + start_date = end_date - timedelta(days=MAX_DAILY_DAYS) + + now_utc = datetime.now(UTC) + last_sync_at = self._normalize_utc(sync_status.last_sync_at) + should_force_refresh = ( + last_sync_at is None + or (now_utc - last_sync_at) >= timedelta(hours=6) + ) + force_refresh_from = ( + max(start_date, end_date - timedelta(days=2)) + if should_force_refresh + else None + ) + + missing_ranges = await self._find_missing_power_ranges( + usage_point_id=usage_point_id, + start_date=start_date, + end_date=end_date, + force_refresh_from=force_refresh_from, + ) + + if not missing_ranges: + logger.debug( + f"[SYNC] max_power pour {usage_point_id}: aucune donnée manquante" + ) + return 0 + + logger.info( + f"[SYNC] max_power pour {usage_point_id}: " + f"{len(missing_ranges)} plage(s) manquante(s) détectée(s)" + ) + + sync_status.status = SyncStatusType.RUNNING + sync_status.last_sync_at = datetime.now(UTC) + await self.db.commit() + + total_synced = 0 + errors: list[str] = [] + chunk_size = 365 + + try: + for range_start, range_end in missing_ranges: + current_start = range_start + while current_start < range_end: + current_end = min(current_start + timedelta(days=chunk_size), range_end) + try: + response = await self.adapter.get_consumption_max_power( + usage_point_id, + current_start.isoformat(), + current_end.isoformat(), + ) + records = self._parse_max_power_readings(response, usage_point_id) + if records: + await self._upsert_max_power_records(records) + total_synced += len(records) + except Exception as e: + await self.db.rollback() + logger.warning( + f"[SYNC] Erreur fetch max_power pour {usage_point_id} " + f"({current_start} - {current_end}): {e}" + ) + errors.append(str(e)) + + current_start = current_end + + if errors: + sync_status.status = SyncStatusType.PARTIAL + sync_status.error_message = "; ".join(errors[:5]) + sync_status.error_count += len(errors) + else: + sync_status.status = SyncStatusType.SUCCESS + sync_status.error_message = None + + sync_status.records_synced_last_run = total_synced + sync_status.total_records += total_synced + + if total_synced > 0: + bounds_result = await self.db.execute( + select( + func.min(MaxPowerData.date), + func.max(MaxPowerData.date), + ).where(MaxPowerData.usage_point_id == usage_point_id) + ) + min_date, max_date = bounds_result.one() + + if min_date and ( + not sync_status.oldest_data_date + or min_date < sync_status.oldest_data_date + ): + sync_status.oldest_data_date = min_date + if max_date: + sync_status.newest_data_date = max_date + + sync_status.next_sync_at = datetime.now(UTC) + timedelta(hours=1) + await self.db.commit() + + logger.info( + f"[SYNC] max_power pour {usage_point_id}: " + f"{total_synced} enregistrements synchronisés" + ) + + except Exception as e: + await self.db.rollback() + sync_status.status = SyncStatusType.FAILED + sync_status.error_message = str(e) + sync_status.error_count += 1 + sync_status.last_error_at = datetime.now(UTC) + await self.db.commit() + raise + + return total_synced + async def _sync_production_daily(self, usage_point_id: str) -> int: """Sync daily production data""" return await self._sync_energy_data( @@ -394,6 +628,135 @@ async def _sync_production_detail(self, usage_point_id: str) -> int: model_class=ProductionData, ) + async def _find_missing_power_ranges( + self, + usage_point_id: str, + start_date: date, + end_date: date, + force_refresh_from: date | None = None, + ) -> list[tuple[date, date]]: + """Detect missing dates for max_power_data and group them in ranges. + + Returns a list of tuples (start, end) where end is exclusive. + """ + result = await self.db.execute( + select(func.distinct(MaxPowerData.date)).where( + and_( + MaxPowerData.usage_point_id == usage_point_id, + MaxPowerData.date >= start_date, + MaxPowerData.date <= end_date, + ) + ) + ) + existing_dates = {row[0] for row in result.fetchall()} + + if force_refresh_from is not None: + existing_dates = {d for d in existing_dates if d < force_refresh_from} + + all_dates = set() + current = start_date + while current <= end_date: + all_dates.add(current) + current += timedelta(days=1) + + missing_dates = sorted(all_dates - existing_dates) + if not missing_dates: + return [] + + ranges: list[tuple[date, date]] = [] + range_start = missing_dates[0] + range_end = missing_dates[0] + for d in missing_dates[1:]: + if d == range_end + timedelta(days=1): + range_end = d + else: + ranges.append((range_start, range_end + timedelta(days=1))) + range_start = d + range_end = d + ranges.append((range_start, range_end + timedelta(days=1))) + + return ranges + + @staticmethod + def _split_power_datetime(date_str: str) -> tuple[str, str]: + """Split power reading date to (YYYY-MM-DD, HH:MM).""" + if not date_str: + return "", "00:00" + if "T" in date_str: + day_part, time_part = date_str.split("T", 1) + elif " " in date_str: + day_part, time_part = date_str.split(" ", 1) + else: + return date_str[:10], "00:00" + + hhmm = time_part[:5] if len(time_part) >= 5 else "00:00" + return day_part[:10], hhmm + + def _parse_max_power_readings( + self, + response: dict[str, Any], + usage_point_id: str, + ) -> list[dict[str, Any]]: + """Parse max power response and keep one peak row per day.""" + meter_reading: dict[str, Any] = {} + if isinstance(response.get("data"), dict): + meter_reading = response["data"].get("meter_reading", {}) + if not meter_reading: + meter_reading = response.get("meter_reading", {}) + + interval_reading = meter_reading.get("interval_reading", []) + + if not isinstance(interval_reading, list): + return [] + + # Keep max value per day (tie-breaker: latest hour). + by_day: dict[str, dict[str, Any]] = {} + for reading in interval_reading: + date_str = str(reading.get("date", "")) + day_str, hhmm = self._split_power_datetime(date_str) + if not day_str: + continue + try: + value = int(float(reading.get("value", 0))) + day_date = date.fromisoformat(day_str) + except (TypeError, ValueError): + continue + + current = by_day.get(day_str) + if ( + current is None + or value > int(current["value"]) + or (value == int(current["value"]) and hhmm > str(current["interval_start"] or "00:00")) + ): + by_day[day_str] = { + "usage_point_id": usage_point_id, + "date": day_date, + "interval_start": hhmm, + "value": value, + "source": "myelectricaldata", + "raw_data": reading, + } + + return [by_day[k] for k in sorted(by_day.keys())] + + async def _upsert_max_power_records(self, records: list[dict[str, Any]]) -> None: + """Upsert max power records by natural key (usage_point_id, date).""" + if not records: + return + + stmt = pg_insert(MaxPowerData).values(records) + stmt = stmt.on_conflict_do_update( + constraint="uq_max_power_data", + set_={ + "interval_start": stmt.excluded.interval_start, + "value": stmt.excluded.value, + "raw_data": stmt.excluded.raw_data, + "updated_at": datetime.now(UTC), + }, + ) + await self.db.execute(stmt) + await self.db.commit() + async def _find_missing_ranges( self, model_class: type[ConsumptionData | ProductionData], @@ -401,32 +764,61 @@ async def _find_missing_ranges( start_date: date, end_date: date, granularity: DataGranularity, + force_refresh_from: date | None = None, ) -> list[tuple[date, date]]: """Détecte les dates manquantes dans la base locale et les regroupe en plages. Interroge les dates distinctes existantes, puis identifie les trous. Retourne des tuples (start, end) avec end exclusif. + Si force_refresh_from est fourni, les dates >= force_refresh_from sont + considérées à rafraîchir même si déjà présentes localement. """ - result = await self.db.execute( - select(func.distinct(model_class.date)).where( - and_( - model_class.usage_point_id == usage_point_id, - model_class.granularity == granularity, - model_class.date >= start_date, - model_class.date < end_date, + placeholder_only_dates: set[date] = set() + if granularity == DataGranularity.DAILY: + result = await self.db.execute( + select(model_class.date, model_class.raw_data).where( + and_( + model_class.usage_point_id == usage_point_id, + model_class.granularity == granularity, + model_class.date >= start_date, + model_class.date <= end_date, + model_class.interval_start.is_(None), + ) ) ) - ) - existing_dates = {row[0] for row in result.fetchall()} + real_dates: set[date] = set() + placeholder_dates: set[date] = set() + for day_value, raw_data in result.all(): + if self._is_placeholder_raw(raw_data): + placeholder_dates.add(day_value) + else: + real_dates.add(day_value) + existing_dates = real_dates + placeholder_only_dates = placeholder_dates - real_dates + else: + result = await self.db.execute( + select(func.distinct(model_class.date)).where( + and_( + model_class.usage_point_id == usage_point_id, + model_class.granularity == granularity, + model_class.date >= start_date, + model_class.date <= end_date, + ) + ) + ) + existing_dates = {row[0] for row in result.fetchall()} + + if force_refresh_from is not None: + existing_dates = {d for d in existing_dates if d < force_refresh_from} # Générer toutes les dates attendues all_dates = set() current = start_date - while current < end_date: + while current <= end_date: all_dates.add(current) current += timedelta(days=1) - missing_dates = sorted(all_dates - existing_dates) + missing_dates = sorted((all_dates - existing_dates) | placeholder_only_dates) if not missing_dates: return [] @@ -478,13 +870,57 @@ async def _sync_energy_data( usage_point_id, data_type, granularity ) - # Plage totale : du plus ancien possible à J-1 - end_date = date.today() - timedelta(days=1) + # Plage totale en borne haute exclusive: + # [start_date, end_date[ avec end_date = today pour inclure J-1. + end_date = date.today() start_date = end_date - timedelta(days=max_days) - # Détecter les trous dans la base locale + # Periodic cleanup: remove duplicate DAILY rows caused by nullable + # UNIQUE keys on interval_start (NULL does not conflict in PostgreSQL). + if granularity == DataGranularity.DAILY: + await self._deduplicate_daily_rows( + model_class=model_class, + usage_point_id=usage_point_id, + start_date=start_date, + end_date=end_date, + ) + + # Détecter les trous dans la base locale. + # We throttle forced refresh windows to avoid repeatedly re-polling + # the same recent ranges and exhausting the upstream quota. + now_utc = datetime.now(UTC) + last_sync_at = self._normalize_utc(sync_status.last_sync_at) + + refresh_window_days = 0 + refresh_interval = None + if granularity == DataGranularity.DAILY: + # Daily values may be revised shortly after publication. + # Use 2 days to cover J-1 and J-2 (Enedis data can arrive late). + refresh_window_days = 2 + refresh_interval = timedelta(hours=6) + elif granularity == DataGranularity.DETAILED: + # Detailed data is expensive and typically stable once received. + refresh_window_days = 0 + refresh_interval = timedelta(hours=24) + + should_force_refresh = ( + refresh_window_days > 0 and ( + last_sync_at is None or + refresh_interval is None or + (now_utc - last_sync_at) >= refresh_interval + ) + ) + force_refresh_from = ( + max(start_date, end_date - timedelta(days=refresh_window_days)) + if should_force_refresh else None + ) missing_ranges = await self._find_missing_ranges( - model_class, usage_point_id, start_date, end_date, granularity + model_class, + usage_point_id, + start_date, + end_date, + granularity, + force_refresh_from=force_refresh_from, ) if not missing_ranges: @@ -526,10 +962,30 @@ async def _sync_energy_data( current_end.isoformat(), ) + # Check for gateway error (success=false or data=None) + if isinstance(response, dict) and ( + response.get("success") is False or response.get("data") is None + ): + gw_error = response.get("error") + logger.warning( + f"[SYNC] Gateway returned no data for {usage_point_id} " + f"({current_start} - {current_end}): " + f"success={response.get('success')}, error={gw_error}" + ) + # Parse and store data records = self._parse_meter_reading( response, usage_point_id, granularity ) + records = await self._inject_daily_j_minus_1_placeholder( + usage_point_id=usage_point_id, + data_type=data_type, + granularity=granularity, + model_class=model_class, + range_start=current_start, + range_end=current_end, + records=records, + ) if records: await self._upsert_energy_records(records, model_class) total_synced += len(records) @@ -557,9 +1013,26 @@ async def _sync_energy_data( sync_status.total_records += total_synced if total_synced > 0: - if not sync_status.oldest_data_date or start_date < sync_status.oldest_data_date: - sync_status.oldest_data_date = start_date - sync_status.newest_data_date = end_date + # Compute actual local bounds after upsert (not theoretical requested bounds). + bounds_result = await self.db.execute( + select( + func.min(model_class.date), + func.max(model_class.date), + ).where( + and_( + model_class.usage_point_id == usage_point_id, + model_class.granularity == granularity, + ) + ) + ) + min_date, max_date = bounds_result.one() + + if min_date and ( + not sync_status.oldest_data_date or min_date < sync_status.oldest_data_date + ): + sync_status.oldest_data_date = min_date + if max_date: + sync_status.newest_data_date = max_date sync_status.next_sync_at = datetime.now(UTC) + timedelta(minutes=30) await self.db.commit() @@ -634,18 +1107,93 @@ def _parse_meter_reading( logger.warning(f"[SYNC] Failed to parse date '{date_str}': {e}") continue + try: + value_wh = int(float(value)) + except (TypeError, ValueError): + logger.warning(f"[SYNC] Failed to parse value '{value}' for date '{date_str}'") + continue + records.append({ "usage_point_id": usage_point_id, "date": record_date, "granularity": granularity, "interval_start": interval_start, - "value": int(value), + "value": value_wh, "source": "myelectricaldata", "raw_data": reading, }) return records + async def _inject_daily_j_minus_1_placeholder( + self, + usage_point_id: str, + data_type: str, + granularity: DataGranularity, + model_class: type[ConsumptionData | ProductionData], + range_start: date, + range_end: date, + records: list[dict[str, Any]], + ) -> list[dict[str, Any]]: + """Inject a J-1 placeholder (value=0) when daily consumption is unknown. + + Behavior: + - Only for `consumption` + `daily` + - Only when J-1 is inside the fetched chunk [range_start, range_end[ + - Only if API response did not include J-1 + - Only if local DB has no existing daily row for J-1 + + This ensures Home Assistant gets a stable 0 when upstream data is late, + while allowing normal upsert to overwrite the placeholder once a real + value is published by the upstream API. + """ + if data_type != "consumption" or granularity != DataGranularity.DAILY: + return records + + target_date = date.today() - timedelta(days=1) + if not (range_start <= target_date < range_end): + return records + + has_target_in_api = any( + record.get("date") == target_date and record.get("interval_start") is None + for record in records + ) + if has_target_in_api: + return records + + existing_result = await self.db.execute( + select(func.max(model_class.value)).where( + and_( + model_class.usage_point_id == usage_point_id, + model_class.granularity == DataGranularity.DAILY, + model_class.date == target_date, + ) + ) + ) + existing_value = existing_result.scalar_one_or_none() + if existing_value is not None: + return records + + logger.info( + f"[SYNC] consumption/daily pour {usage_point_id}: " + f"J-1 ({target_date}) absent de l'API, insertion placeholder 0" + ) + placeholder = { + "usage_point_id": usage_point_id, + "date": target_date, + "granularity": DataGranularity.DAILY, + "interval_start": None, + "value": 0, + "source": "myelectricaldata", + "raw_data": { + "date": target_date.isoformat(), + "value": 0, + "is_placeholder": True, + "reason": "missing_j_minus_1_from_upstream", + }, + } + return records + [placeholder] + async def _upsert_energy_records( self, records: list[dict[str, Any]], @@ -664,35 +1212,77 @@ async def _upsert_energy_records( # Nécessaire car l'API peut renvoyer des doublons (ex: changement d'heure d'hiver, # l'heure 01:30 existe deux fois le jour du passage). PostgreSQL refuse un # ON CONFLICT DO UPDATE si le même batch contient deux lignes en conflit. - seen: dict[tuple, int] = {} - for idx, record in enumerate(records): + dedup: dict[tuple, dict[str, Any]] = {} + for record in records: key = ( record["usage_point_id"], record["date"], record["granularity"], record.get("interval_start"), ) - seen[key] = idx # Le dernier doublon gagne - if len(seen) < len(records): + existing = dedup.get(key) + if existing is None: + dedup[key] = record + continue + + # For DAILY duplicates, keep the highest value (a spurious 0 can + # appear in upstream duplicates and must not overwrite a valid day). + if record.get("interval_start") is None and existing.get("interval_start") is None: + if int(record.get("value") or 0) >= int(existing.get("value") or 0): + dedup[key] = record + continue + + # For non-daily duplicates, keep the latest item from the batch. + dedup[key] = record + + if len(dedup) < len(records): logger.debug( - f"[SYNC] Dédupliqué {len(records) - len(seen)} enregistrements en double dans le batch" + f"[SYNC] Dédupliqué {len(records) - len(dedup)} enregistrements en double dans le batch" + ) + records = sorted( + dedup.values(), + key=lambda r: (r["date"], str(r.get("interval_start") or "")), ) - records = [records[i] for i in sorted(seen.values())] - - # Use PostgreSQL upsert (INSERT ... ON CONFLICT UPDATE) - stmt = pg_insert(model_class).values(records) - # On conflict, update value and raw_data - stmt = stmt.on_conflict_do_update( - constraint=f"uq_{model_class.__tablename__}", - set_={ - "value": stmt.excluded.value, - "raw_data": stmt.excluded.raw_data, - "updated_at": datetime.now(UTC), - }, - ) + daily_records: list[dict[str, Any]] = [] + interval_records: list[dict[str, Any]] = [] + for record in records: + if ( + record.get("granularity") == DataGranularity.DAILY + and record.get("interval_start") is None + ): + daily_records.append(record) + else: + interval_records.append(record) + + # DETAILED records are protected by uq_* on (usage_point_id, date, granularity, interval_start). + if interval_records: + stmt = pg_insert(model_class).values(interval_records) + stmt = stmt.on_conflict_do_update( + constraint=f"uq_{model_class.__tablename__}", + set_={ + "value": stmt.excluded.value, + "raw_data": stmt.excluded.raw_data, + "updated_at": datetime.now(UTC), + }, + ) + await self.db.execute(stmt) + + # DAILY rows have interval_start=NULL, so PostgreSQL UNIQUE doesn't conflict on NULL. + # We therefore do an explicit delete-then-insert to enforce one row per + # (usage_point_id, date, granularity=daily). + if daily_records: + for record in daily_records: + delete_stmt = ( + delete(model_class) + .where(model_class.usage_point_id == record["usage_point_id"]) + .where(model_class.date == record["date"]) + .where(model_class.granularity == DataGranularity.DAILY) + .where(model_class.interval_start.is_(None)) + ) + await self.db.execute(delete_stmt) + await self.db.execute(pg_insert(model_class).values(record)) - await self.db.execute(stmt) await self.db.commit() async def _get_or_create_sync_status( @@ -1181,22 +1771,45 @@ async def sync_ecowatt(self) -> dict[str, Any]: } try: - # Update sync tracker - await self._update_sync_tracker("ecowatt_client") + if await self._should_skip_tracked_sync("ecowatt_client", timedelta(hours=6)): + logger.info("[SYNC] EcoWatt sync skipped (recent)") + return result + # Fetch EcoWatt forecast (includes current day + future days) response = await self.adapter.get_ecowatt_forecast() # Handle different response formats - if response.get("success") and response.get("data"): - signals = response["data"] - elif isinstance(response.get("data"), list): - signals = response["data"] - else: - # Try to get from root if it's a list directly - signals = response if isinstance(response, list) else [] + # Known shapes: + # - [{"periode": ...}, ...] + # - {"success": true, "data": [ ... ]} + # - {"success": true, "data": {"signals": [ ... ]}} + # - {"data": {"ecowatt": [ ... ]}} + signals: list[dict[str, Any]] = [] + if isinstance(response, list): + signals = [s for s in response if isinstance(s, dict)] + elif isinstance(response, dict): + data = response.get("data") + if isinstance(data, list): + signals = [s for s in data if isinstance(s, dict)] + elif isinstance(data, dict): + for key in ("signals", "ecowatt", "forecast", "items"): + maybe = data.get(key) + if isinstance(maybe, list): + signals = [s for s in maybe if isinstance(s, dict)] + break + if not signals: + for key in ("signals", "ecowatt", "forecast", "items"): + maybe = response.get(key) + if isinstance(maybe, list): + signals = [s for s in maybe if isinstance(s, dict)] + break if not signals: - logger.warning("[SYNC] No EcoWatt data received from remote gateway") + logger.warning( + "[SYNC] No EcoWatt data received from remote gateway (response keys=%s, data_type=%s)", + list(response.keys()) if isinstance(response, dict) else type(response).__name__, + type(response.get("data")).__name__ if isinstance(response, dict) and "data" in response else "n/a", + ) result["errors"].append("No EcoWatt data received") return result @@ -1271,6 +1884,7 @@ async def sync_ecowatt(self) -> dict[str, Any]: result["errors"].append(str(e)) await self.db.commit() + await self._update_sync_tracker("ecowatt_client") logger.info( f"[SYNC] EcoWatt sync complete: " f"{result['created']} created, {result['updated']} updated" @@ -1302,8 +1916,9 @@ async def sync_tempo(self) -> dict[str, Any]: } try: - # Update sync tracker - await self._update_sync_tracker("tempo_client") + if await self._should_skip_tracked_sync("tempo_client", timedelta(hours=2)): + logger.info("[SYNC] Tempo sync skipped (recent)") + return result # Fetch Tempo calendar (current season) response = await self.adapter.get_tempo_calendar() @@ -1385,6 +2000,7 @@ async def sync_tempo(self) -> dict[str, Any]: result["errors"].append(str(e)) await self.db.commit() + await self._update_sync_tracker("tempo_client") logger.info( f"[SYNC] Tempo sync complete: " f"{result['created']} created, {result['updated']} updated" @@ -1482,6 +2098,13 @@ async def get_sync_tracker(self, cache_type: str) -> datetime | None: ) return result.scalar_one_or_none() + async def _should_skip_tracked_sync(self, cache_type: str, min_interval: timedelta) -> bool: + """Return True if a tracked sync ran too recently and should be skipped.""" + last_sync = self._normalize_utc(await self.get_sync_tracker(cache_type)) + if last_sync is None: + return False + return (datetime.now(UTC) - last_sync) < min_interval + # ========================================================================= # Consumption France Sync (national data) # ========================================================================= @@ -1503,8 +2126,9 @@ async def sync_consumption_france(self) -> dict[str, Any]: } try: - # Update sync tracker - await self._update_sync_tracker("consumption_france_client") + if await self._should_skip_tracked_sync("consumption_france_client", timedelta(hours=6)): + logger.info("[SYNC] Consumption France sync skipped (recent)") + return result # Fetch consumption data from gateway response = await self.adapter.get_consumption_france() @@ -1590,6 +2214,7 @@ async def sync_consumption_france(self) -> dict[str, Any]: result["errors"].append(str(e)) await self.db.commit() + await self._update_sync_tracker("consumption_france_client") logger.info( f"[SYNC] Consumption France sync complete: " f"{result['created']} created, {result['updated']} updated" @@ -1622,8 +2247,9 @@ async def sync_generation_forecast(self) -> dict[str, Any]: } try: - # Update sync tracker - await self._update_sync_tracker("generation_forecast_client") + if await self._should_skip_tracked_sync("generation_forecast_client", timedelta(hours=12)): + logger.info("[SYNC] Generation Forecast sync skipped (recent)") + return result # Fetch generation forecast from gateway response = await self.adapter.get_generation_forecast() @@ -1713,6 +2339,7 @@ async def sync_generation_forecast(self) -> dict[str, Any]: result["errors"].append(str(e)) await self.db.commit() + await self._update_sync_tracker("generation_forecast_client") logger.info( f"[SYNC] Generation Forecast sync complete: " f"{result['created']} created, {result['updated']} updated" diff --git a/apps/web/entrypoint.sh b/apps/web/entrypoint.sh index 017d572..4b60908 100644 --- a/apps/web/entrypoint.sh +++ b/apps/web/entrypoint.sh @@ -9,6 +9,13 @@ window.__ENV__ = { VITE_API_BASE_URL: "${VITE_API_BASE_URL:-/api}", VITE_BACKEND_URL: "${VITE_BACKEND_URL:-/api}", VITE_SERVER_MODE: "${VITE_SERVER_MODE:-false}", + VITE_DEFAULT_MQTT_BROKER: "${VITE_DEFAULT_MQTT_BROKER:-}", + VITE_DEFAULT_MQTT_PORT: "${VITE_DEFAULT_MQTT_PORT:-1883}", + VITE_DEFAULT_TOPIC_PREFIX: "${VITE_DEFAULT_TOPIC_PREFIX:-myelectricaldata}", + VITE_DEFAULT_ENTITY_PREFIX: "${VITE_DEFAULT_ENTITY_PREFIX:-myelectricaldata}", + VITE_DEFAULT_DISCOVERY_PREFIX: "${VITE_DEFAULT_DISCOVERY_PREFIX:-homeassistant}", + VITE_DEFAULT_HA_URL: "${VITE_DEFAULT_HA_URL:-}", + VITE_DEFAULT_VM_URL: "${VITE_DEFAULT_VM_URL:-}", }; EOF diff --git a/apps/web/src/components/DetailedCurve.tsx b/apps/web/src/components/DetailedCurve.tsx index 46056aa..ea4e3de 100644 --- a/apps/web/src/components/DetailedCurve.tsx +++ b/apps/web/src/components/DetailedCurve.tsx @@ -1,4 +1,4 @@ -import { useState, useEffect, useRef } from 'react' +import { useState, useEffect, useRef, useMemo } from 'react' import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer } from 'recharts' import { Download, BarChart3, Loader2, CalendarDays, CalendarRange } from 'lucide-react' import { toast } from '@/stores/notificationStore' @@ -443,6 +443,32 @@ export function DetailedCurve({ return mergedData } + // Pre-compute the set of dates that have cached data (O(n) once instead of O(n × cells)) + const availableDatesSet = useMemo(() => { + const dates = new Set() + if (!selectedPDL) return dates + + const queryCache = queryClient.getQueryCache() + const allDetailQueries = queryCache.findAll({ + queryKey: [cacheKeyPrefix, selectedPDL], + exact: false, + }) + + for (const query of allDetailQueries) { + const responseData = query.state.data as any + if (!responseData?.data?.meter_reading?.interval_reading) continue + + for (const reading of responseData.data.meter_reading.interval_reading) { + if (reading.date) { + dates.add(reading.date.split(' ')[0].split('T')[0]) + } + } + } + return dates + // detailByDayData + detailWeekOffset: recalculate when the parent loads new data + // (covers week navigation, PDL switch, and initial fetch) + }, [selectedPDL, cacheKeyPrefix, queryClient, detailByDayData, detailWeekOffset]) + const renderCalendar = () => { const todayUTC = new Date() const yesterdayUTC = new Date(Date.UTC( @@ -476,33 +502,13 @@ export function DetailedCurve({ )) const isInRange = dayDate && dayDate <= yesterdayUTC && dayDate >= twoYearsAgoUTC - // Check if data exists for this day + // O(1) lookup instead of scanning all readings let hasData = false - if (isInRange && dayDate && selectedPDL) { + if (isInRange && dayDate) { const dateStr = dayDate.getFullYear() + '-' + String(dayDate.getMonth() + 1).padStart(2, '0') + '-' + String(dayDate.getDate()).padStart(2, '0') - - // Search through all cached queries for this date - const queryCache = queryClient.getQueryCache() - const allDetailQueries = queryCache.findAll({ - queryKey: [cacheKeyPrefix, selectedPDL], - exact: false, - }) - - for (const query of allDetailQueries) { - const responseData = query.state.data as any - if (!responseData?.data?.meter_reading?.interval_reading) continue - - const readings = responseData.data.meter_reading.interval_reading - hasData = readings.some((reading: any) => { - if (!reading.date) return false - const readingDate = reading.date.split(' ')[0].split('T')[0] - return readingDate === dateStr - }) - - if (hasData) break - } + hasData = availableDatesSet.has(dateStr) } // Calculate currently selected date in UTC diff --git a/apps/web/src/components/PDLDetails.tsx b/apps/web/src/components/PDLDetails.tsx index 4d105cc..c6c62a4 100644 --- a/apps/web/src/components/PDLDetails.tsx +++ b/apps/web/src/components/PDLDetails.tsx @@ -81,13 +81,18 @@ export default function PDLDetails({ usagePointId, onClose }: PDLDetailsProps) { const isTesting = testConsumptionDaily.isPending || testConsumptionDetail.isPending || testMaxPower.isPending || testProductionDaily.isPending || testProductionDetail.isPending - // Syntax highlighting for JSON + // Syntax highlighting for JSON (HTML-safe) const highlightJSON = (json: string) => { - return json.replace( - /("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, + // Escape HTML entities FIRST to prevent XSS from API data + const escaped = json + .replace(/&/g, '&') + .replace(//g, '>') + return escaped.replace( + /("|")((?:\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*)\1(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, (match) => { let cls = 'text-orange-400' // numbers - if (/^"/.test(match)) { + if (/^["&]/.test(match)) { if (/:$/.test(match)) { cls = 'text-blue-400' // keys } else { diff --git a/apps/web/src/hooks/useDataFetch.ts b/apps/web/src/hooks/useDataFetch.ts index 8b3f178..48f6f64 100644 --- a/apps/web/src/hooks/useDataFetch.ts +++ b/apps/web/src/hooks/useDataFetch.ts @@ -59,7 +59,8 @@ export function useDataFetch(selectedPDL: string, pdlDetails: PDL | undefined): )) const startDate = startDate_obj.toISOString().split('T')[0] - const endDate = yesterdayUTC.toISOString().split('T')[0] + // Use today (not yesterday) because backend uses exclusive end: date < end_date + const endDate = todayUTC.toISOString().split('T')[0] logger.log('[DataFetch] Fetching data for PDL:', selectedPDL, { has_consumption: pdlDetails.has_consumption, @@ -85,7 +86,7 @@ export function useDataFetch(selectedPDL: string, pdlDetails: PDL | undefined): name: 'Puissance maximale', fn: async () => { setProgress(prev => ({ ...prev, message: 'Récupération puissance maximale...' })) - return enedisApi.getMaxPower(selectedPDL, { start: startDate, end: endDate }) + return enedisApi.getMaxPower(selectedPDL, { start: startDate, end: endDate, use_cache: true }) }, queryKey: ['maxPower', selectedPDL, startDate, endDate] }) diff --git a/apps/web/src/pages/ConsumptionEuro/index.tsx b/apps/web/src/pages/ConsumptionEuro/index.tsx index 25cf7e4..fb9ed83 100644 --- a/apps/web/src/pages/ConsumptionEuro/index.tsx +++ b/apps/web/src/pages/ConsumptionEuro/index.tsx @@ -192,9 +192,11 @@ export default function ConsumptionEuro() { const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Use today (not yesterday) because backend uses exclusive end: date < end_date + const today_obj = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12, 0, 0, 0) + const endDate = today_obj.getFullYear() + '-' + + String(today_obj.getMonth() + 1).padStart(2, '0') + '-' + + String(today_obj.getDate()).padStart(2, '0') setDateRange({ start: startDate, end: endDate }) diff --git a/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionCalcs.ts b/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionCalcs.ts index 9940578..79881da 100644 --- a/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionCalcs.ts +++ b/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionCalcs.ts @@ -406,6 +406,7 @@ export function useConsumptionCalcs({ : `${comp.current.startDate.getFullYear() - 1}-${comp.current.startDate.getFullYear()}` const previousMonths = aggregateMonthly(comp.previous.startDate, comp.previous.endDate) + const entries = [] if (currentMonths.length >= 2) { @@ -430,12 +431,15 @@ export function useConsumptionCalcs({ }) // Dédupliquer les entrées de yearsByPreset (une période peut être "previous" d'un bloc et "current" d'un autre) - const seenLabels = new Set() - const yearsByPresetDeduped = yearsByPreset.filter(entry => { - if (seenLabels.has(entry.label)) return false - seenLabels.add(entry.label) - return true - }) + // On garde l'entrée avec le PLUS de mois pour chaque label (la version "current" est plus complète que la version "previous") + const bestByLabel = new Map() + for (const entry of yearsByPreset) { + const existing = bestByLabel.get(entry.label) + if (!existing || entry.byMonth.length > existing.byMonth.length) { + bestByLabel.set(entry.label, entry) + } + } + const yearsByPresetDeduped = [...bestByLabel.values()] return { byYear, @@ -687,7 +691,7 @@ export function useConsumptionCalcs({ const uniqueReadings = Array.from(uniqueReadingsMap.values()) - const mostRecentDate = new Date(Math.max(...uniqueReadings.map(r => r.date.getTime()))) + const mostRecentDate = uniqueReadings.reduce((max, r) => r.date > max ? r.date : max, uniqueReadings[0].date) // Define 3 rolling 365-day periods (aligned with byYear calculation) const periods = [] @@ -794,7 +798,7 @@ export function useConsumptionCalcs({ const uniqueReadings = Array.from(uniqueReadingsMap.values()) - const mostRecentDate = new Date(Math.max(...uniqueReadings.map(r => r.date.getTime()))) + const mostRecentDate = uniqueReadings.reduce((max, r) => r.date > max ? r.date : max, uniqueReadings[0].date) // Define 2 rolling 365-day periods (max 730 days from API) // Period 1: Most recent 365 days (from yesterday back 365 days) diff --git a/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionFetch.ts b/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionFetch.ts index a197b3b..9af1ca9 100644 --- a/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionFetch.ts +++ b/apps/web/src/pages/ConsumptionKwh/hooks/useConsumptionFetch.ts @@ -57,7 +57,7 @@ export function useConsumptionFetch({ } // Invalidate existing queries to force refetch - queryClient.invalidateQueries({ queryKey: ['consumption', selectedPDL] }) + queryClient.invalidateQueries({ queryKey: ['consumptionDaily', selectedPDL] }) queryClient.invalidateQueries({ queryKey: ['maxPower', selectedPDL] }) // Collapse all sections before fetching new data @@ -107,14 +107,16 @@ export function useConsumptionFetch({ logger.log(`Daily consumption: Requesting full 3 years (API will return error if too old)`) // Format dates as YYYY-MM-DD using LOCAL time (user's perspective) + // Backend uses exclusive end_date (date < end), so use TODAY to include yesterday + const today_obj = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12, 0, 0, 0) const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + const endDate = today_obj.getFullYear() + '-' + + String(today_obj.getMonth() + 1).padStart(2, '0') + '-' + + String(today_obj.getDate()).padStart(2, '0') - logger.log(`Final date range for API: ${startDate} → ${endDate}`) + logger.log(`Final date range for API: ${startDate} → ${endDate} (end exclusive, includes yesterday)`) // Setting dateRange will trigger React Query to fetch data setDateRange({ start: startDate, end: endDate }) @@ -146,11 +148,12 @@ export function useConsumptionFetch({ String(twoYearsAgo.getMonth() + 1).padStart(2, '0') + '-' + String(twoYearsAgo.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Backend uses exclusive end_date (date < end), so use TODAY to include yesterday + const endDate = today.getFullYear() + '-' + + String(today.getMonth() + 1).padStart(2, '0') + '-' + + String(today.getDate()).padStart(2, '0') - logger.log(`Detailed data: Requesting 2 years via batch endpoint (${startDate} to ${endDate}) - 729 days`) + logger.log(`Detailed data: Requesting 2 years via batch endpoint (${startDate} to ${endDate}) - 730 days`) try { // Single batch call to get all detailed data for 2 years @@ -261,7 +264,7 @@ export function useConsumptionFetch({ try { // Invalidate production queries to force refetch - queryClient.invalidateQueries({ queryKey: ['production', productionPdlUsagePointId] }) + queryClient.invalidateQueries({ queryKey: ['productionDaily', productionPdlUsagePointId] }) // Fetch production daily data (3 years) // Use LOCAL time for user's perspective @@ -283,11 +286,13 @@ export function useConsumptionFetch({ const startDate3y = threeYearsAgo.getFullYear() + '-' + String(threeYearsAgo.getMonth() + 1).padStart(2, '0') + '-' + String(threeYearsAgo.getDate()).padStart(2, '0') - const endDate = yesterdayLocal.getFullYear() + '-' + - String(yesterdayLocal.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterdayLocal.getDate()).padStart(2, '0') + // Backend uses exclusive end_date (date < end), so use today to include yesterday + const todayProd = new Date(nowLocal.getFullYear(), nowLocal.getMonth(), nowLocal.getDate(), 12, 0, 0, 0) + const endDate = todayProd.getFullYear() + '-' + + String(todayProd.getMonth() + 1).padStart(2, '0') + '-' + + String(todayProd.getDate()).padStart(2, '0') - logger.log(`Fetching production daily data: ${startDate3y} → ${endDate}`) + logger.log(`Fetching production daily data: ${startDate3y} → ${endDate} (end exclusive)`) // Note: We don't await these - they will be fetched and cached in background // The production page will use the cached data when the user navigates to it @@ -308,18 +313,10 @@ export function useConsumptionFetch({ }) // Fetch production detailed data (2 years) via batch endpoint - // Use LOCAL time for user's perspective - const todayLocal = new Date( - nowLocal.getFullYear(), - nowLocal.getMonth(), - nowLocal.getDate(), - 12, 0, 0, 0 - ) - const twoYearsAgo = new Date( - todayLocal.getFullYear() - 2, - todayLocal.getMonth(), - todayLocal.getDate(), + todayProd.getFullYear() - 2, + todayProd.getMonth(), + todayProd.getDate(), 12, 0, 0, 0 ) diff --git a/apps/web/src/pages/ConsumptionKwh/index.tsx b/apps/web/src/pages/ConsumptionKwh/index.tsx index e45fb40..bdace10 100644 --- a/apps/web/src/pages/ConsumptionKwh/index.tsx +++ b/apps/web/src/pages/ConsumptionKwh/index.tsx @@ -120,30 +120,31 @@ export default function ConsumptionKwh() { if (!dateRange) return null const now = new Date() - const yesterday = new Date( + const today = new Date( now.getFullYear(), now.getMonth(), - now.getDate() - 1, + now.getDate(), 12, 0, 0, 0 // Use noon to avoid DST edge cases ) const offsetDays = detailWeekOffset * 7 + // endDate_obj = today - offset (backend uses exclusive end: date < end_date) let endDate_obj = new Date( - yesterday.getFullYear(), - yesterday.getMonth(), - yesterday.getDate() - offsetDays, + today.getFullYear(), + today.getMonth(), + today.getDate() - offsetDays, 12, 0, 0, 0 ) - if (endDate_obj > yesterday) { - endDate_obj = new Date(yesterday) + if (endDate_obj > today) { + endDate_obj = new Date(today) } const startDate_obj = new Date( endDate_obj.getFullYear(), endDate_obj.getMonth(), - endDate_obj.getDate() - 6, + endDate_obj.getDate() - 7, 12, 0, 0, 0 ) @@ -399,11 +400,13 @@ export default function ConsumptionKwh() { const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Backend uses exclusive end_date (date < end), so use today to include yesterday + const today_obj = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12, 0, 0, 0) + const endDate = today_obj.getFullYear() + '-' + + String(today_obj.getMonth() + 1).padStart(2, '0') + '-' + + String(today_obj.getDate()).padStart(2, '0') - logger.log('[Auto-load] Setting date range:', startDate, 'to', endDate) + logger.log('[Auto-load] Setting date range:', startDate, 'to', endDate, '(end exclusive)') setDateRange({ start: startDate, end: endDate }) diff --git a/apps/web/src/pages/HomeAssistant.tsx b/apps/web/src/pages/HomeAssistant.tsx index 0ac110c..735674e 100644 --- a/apps/web/src/pages/HomeAssistant.tsx +++ b/apps/web/src/pages/HomeAssistant.tsx @@ -55,133 +55,135 @@ interface BaseEntity { } // Entités globales (RTE Tempo, EDF Tempo, EcoWatt) -const GLOBAL_ENTITIES: BaseEntity[] = [ - // RTE Tempo - { - entity_id: 'sensor.myelectricaldata_tempo_today', - name: 'Tempo Aujourd\'hui', - device: 'RTE Tempo', - icon: 'mdi:calendar-today', - description: 'Couleur du jour Tempo (BLUE, WHITE, RED)', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_tomorrow', - name: 'Tempo Demain', - device: 'RTE Tempo', - icon: 'mdi:calendar-tomorrow', - description: 'Couleur du lendemain Tempo', - }, - // EDF Tempo - { - entity_id: 'sensor.myelectricaldata_tempo_info', - name: 'Tempo Info', - device: 'EDF Tempo', - icon: 'mdi:information', - description: 'Informations générales contrat Tempo', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_days_blue', - name: 'Jours Bleus', - device: 'EDF Tempo', - icon: 'mdi:calendar', - unit: 'jours', - description: 'Nombre de jours bleus restants', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_days_white', - name: 'Jours Blancs', - device: 'EDF Tempo', - icon: 'mdi:calendar', - unit: 'jours', - description: 'Nombre de jours blancs restants', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_days_red', - name: 'Jours Rouges', - device: 'EDF Tempo', - icon: 'mdi:calendar', - unit: 'jours', - description: 'Nombre de jours rouges restants', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_blue_hc', - name: 'Prix Bleu HC', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures creuses jour bleu', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_blue_hp', - name: 'Prix Bleu HP', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures pleines jour bleu', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_white_hc', - name: 'Prix Blanc HC', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures creuses jour blanc', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_white_hp', - name: 'Prix Blanc HP', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures pleines jour blanc', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_red_hc', - name: 'Prix Rouge HC', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures creuses jour rouge', - }, - { - entity_id: 'sensor.myelectricaldata_tempo_price_red_hp', - name: 'Prix Rouge HP', - device: 'EDF Tempo', - icon: 'mdi:currency-eur', - unit: 'EUR/kWh', - description: 'Tarif heures pleines jour rouge', - }, - // EcoWatt - { - entity_id: 'sensor.myelectricaldata_ecowatt_j0', - name: 'EcoWatt J0', - device: 'RTE EcoWatt', - icon: 'mdi:leaf', - description: 'Signal EcoWatt aujourd\'hui (1=vert, 2=orange, 3=rouge)', - }, - { - entity_id: 'sensor.myelectricaldata_ecowatt_j1', - name: 'EcoWatt J+1', - device: 'RTE EcoWatt', - icon: 'mdi:leaf', - description: 'Signal EcoWatt demain', - }, - { - entity_id: 'sensor.myelectricaldata_ecowatt_j2', - name: 'EcoWatt J+2', - device: 'RTE EcoWatt', - icon: 'mdi:leaf', - description: 'Signal EcoWatt après-demain', - }, -] +function getGlobalEntities(prefix: string): BaseEntity[] { + return [ + // RTE Tempo + { + entity_id: `sensor.${prefix}_tempo_today`, + name: 'Tempo Aujourd\'hui', + device: 'RTE Tempo', + icon: 'mdi:calendar-today', + description: 'Couleur du jour Tempo (BLUE, WHITE, RED)', + }, + { + entity_id: `sensor.${prefix}_tempo_tomorrow`, + name: 'Tempo Demain', + device: 'RTE Tempo', + icon: 'mdi:calendar-tomorrow', + description: 'Couleur du lendemain Tempo', + }, + // EDF Tempo + { + entity_id: `sensor.${prefix}_tempo_info`, + name: 'Tempo Info', + device: 'EDF Tempo', + icon: 'mdi:information', + description: 'Informations générales contrat Tempo', + }, + { + entity_id: `sensor.${prefix}_tempo_days_blue`, + name: 'Jours Bleus', + device: 'EDF Tempo', + icon: 'mdi:calendar', + unit: 'jours', + description: 'Nombre de jours bleus restants', + }, + { + entity_id: `sensor.${prefix}_tempo_days_white`, + name: 'Jours Blancs', + device: 'EDF Tempo', + icon: 'mdi:calendar', + unit: 'jours', + description: 'Nombre de jours blancs restants', + }, + { + entity_id: `sensor.${prefix}_tempo_days_red`, + name: 'Jours Rouges', + device: 'EDF Tempo', + icon: 'mdi:calendar', + unit: 'jours', + description: 'Nombre de jours rouges restants', + }, + { + entity_id: `sensor.${prefix}_tempo_price_blue_hc`, + name: 'Prix Bleu HC', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures creuses jour bleu', + }, + { + entity_id: `sensor.${prefix}_tempo_price_blue_hp`, + name: 'Prix Bleu HP', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures pleines jour bleu', + }, + { + entity_id: `sensor.${prefix}_tempo_price_white_hc`, + name: 'Prix Blanc HC', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures creuses jour blanc', + }, + { + entity_id: `sensor.${prefix}_tempo_price_white_hp`, + name: 'Prix Blanc HP', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures pleines jour blanc', + }, + { + entity_id: `sensor.${prefix}_tempo_price_red_hc`, + name: 'Prix Rouge HC', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures creuses jour rouge', + }, + { + entity_id: `sensor.${prefix}_tempo_price_red_hp`, + name: 'Prix Rouge HP', + device: 'EDF Tempo', + icon: 'mdi:currency-eur', + unit: 'EUR/kWh', + description: 'Tarif heures pleines jour rouge', + }, + // EcoWatt + { + entity_id: `sensor.${prefix}_ecowatt_j0`, + name: 'EcoWatt J0', + device: 'RTE EcoWatt', + icon: 'mdi:leaf', + description: 'Signal EcoWatt aujourd\'hui (1=vert, 2=orange, 3=rouge)', + }, + { + entity_id: `sensor.${prefix}_ecowatt_j1`, + name: 'EcoWatt J+1', + device: 'RTE EcoWatt', + icon: 'mdi:leaf', + description: 'Signal EcoWatt demain', + }, + { + entity_id: `sensor.${prefix}_ecowatt_j2`, + name: 'EcoWatt J+2', + device: 'RTE EcoWatt', + icon: 'mdi:leaf', + description: 'Signal EcoWatt après-demain', + }, + ] +} /** * Génère les entités spécifiques à un PDL */ -function getPdlEntities(pdl: string): BaseEntity[] { +function getPdlEntities(pdl: string, prefix: string): BaseEntity[] { return [ { - entity_id: `sensor.myelectricaldata_linky_${pdl}_consumption`, + entity_id: `sensor.${prefix}_linky_${pdl}_consumption`, name: `Consommation ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:flash', @@ -190,7 +192,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Consommation journalière (avec historique 31j en attributs)', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_consumption_last7day`, + entity_id: `sensor.${prefix}_linky_${pdl}_consumption_last7day`, name: `Conso 7j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:flash', @@ -199,7 +201,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Total consommation des 7 derniers jours', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_consumption_last14day`, + entity_id: `sensor.${prefix}_linky_${pdl}_consumption_last14day`, name: `Conso 14j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:flash', @@ -208,7 +210,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Total consommation des 14 derniers jours', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_consumption_last30day`, + entity_id: `sensor.${prefix}_linky_${pdl}_consumption_last30day`, name: `Conso 30j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:flash', @@ -217,7 +219,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Total consommation des 30 derniers jours', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_production`, + entity_id: `sensor.${prefix}_linky_${pdl}_production`, name: `Production ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:solar-power', @@ -226,7 +228,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Production journalière (avec historique 31j en attributs)', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_production_last7day`, + entity_id: `sensor.${prefix}_linky_${pdl}_production_last7day`, name: `Prod 7j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:solar-power', @@ -235,7 +237,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Total production des 7 derniers jours', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_production_last14day`, + entity_id: `sensor.${prefix}_linky_${pdl}_production_last14day`, name: `Prod 14j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:solar-power', @@ -244,7 +246,7 @@ function getPdlEntities(pdl: string): BaseEntity[] { description: 'Total production des 14 derniers jours', }, { - entity_id: `sensor.myelectricaldata_linky_${pdl}_production_last30day`, + entity_id: `sensor.${prefix}_linky_${pdl}_production_last30day`, name: `Prod 30j ${pdl}`, device: `Linky ${pdl}`, icon: 'mdi:solar-power', @@ -257,6 +259,9 @@ function getPdlEntities(pdl: string): BaseEntity[] { export default function HomeAssistant() { const queryClient = useQueryClient() + const runtimeEnv = typeof window !== 'undefined' ? window.__ENV__ || {} : {} + const parsedDefaultPort = Number.parseInt(runtimeEnv.VITE_DEFAULT_MQTT_PORT || '1883', 10) + const defaultMqttPort = Number.isFinite(parsedDefaultPort) ? parsedDefaultPort : 1883 // State const [isEditing, setIsEditing] = useState(false) @@ -277,18 +282,18 @@ export default function HomeAssistant() { const [formIntervalMinutes, setFormIntervalMinutes] = useState(null) // MQTT Discovery config - const [mqttBroker, setMqttBroker] = useState('') - const [mqttPort, setMqttPort] = useState(1883) + const [mqttBroker, setMqttBroker] = useState(runtimeEnv.VITE_DEFAULT_MQTT_BROKER || '') + const [mqttPort, setMqttPort] = useState(defaultMqttPort) const [mqttUsername, setMqttUsername] = useState('') const [mqttPassword, setMqttPassword] = useState('') const [mqttTls, setMqttTls] = useState(false) - const [entityPrefix, setEntityPrefix] = useState('myelectricaldata') - const [discoveryPrefix, setDiscoveryPrefix] = useState('homeassistant') + const [entityPrefix, setEntityPrefix] = useState(runtimeEnv.VITE_DEFAULT_ENTITY_PREFIX || 'myelectricaldata') + const [discoveryPrefix, setDiscoveryPrefix] = useState(runtimeEnv.VITE_DEFAULT_DISCOVERY_PREFIX || 'homeassistant') // WebSocket API config - const [haUrl, setHaUrl] = useState('') + const [haUrl, setHaUrl] = useState(runtimeEnv.VITE_DEFAULT_HA_URL || '') const [haToken, setHaToken] = useState('') - const [statisticIdPrefix, setStatisticIdPrefix] = useState('myelectricaldata') + const [statisticIdPrefix, setStatisticIdPrefix] = useState(runtimeEnv.VITE_DEFAULT_ENTITY_PREFIX || 'myelectricaldata') // Query existing config const { data: configsResponse, isLoading } = useQuery({ @@ -337,8 +342,8 @@ export default function HomeAssistant() { })() const allEntities = [ - ...GLOBAL_ENTITIES, - ...pdlIds.flatMap((pdlId) => getPdlEntities(pdlId)), + ...getGlobalEntities(entityPrefix || 'myelectricaldata'), + ...pdlIds.flatMap((pdlId) => getPdlEntities(pdlId, entityPrefix || 'myelectricaldata')), ] // Group entities by device @@ -496,6 +501,7 @@ export default function HomeAssistant() { mutationFn: (id: string) => exportApi.runExport(id), onSuccess: () => { toast.success('Export lancé') + queryClient.invalidateQueries({ queryKey: ['export-configs'] }) }, onError: (error: Error) => { toast.error(`Erreur: ${error.message}`) diff --git a/apps/web/src/pages/MQTT.tsx b/apps/web/src/pages/MQTT.tsx index 5c91725..db22195 100644 --- a/apps/web/src/pages/MQTT.tsx +++ b/apps/web/src/pages/MQTT.tsx @@ -36,6 +36,9 @@ import MetricsSection from '@/components/MetricsSection' export default function MQTT() { const queryClient = useQueryClient() + const runtimeEnv = typeof window !== 'undefined' ? window.__ENV__ || {} : {} + const parsedDefaultPort = Number.parseInt(runtimeEnv.VITE_DEFAULT_MQTT_PORT || '1883', 10) + const defaultMqttPort = Number.isFinite(parsedDefaultPort) ? parsedDefaultPort : 1883 // State const [isEditing, setIsEditing] = useState(false) @@ -51,12 +54,12 @@ export default function MQTT() { const [formIntervalMinutes, setFormIntervalMinutes] = useState(null) // MQTT config - const [mqttBroker, setMqttBroker] = useState('') - const [mqttPort, setMqttPort] = useState(1883) + const [mqttBroker, setMqttBroker] = useState(runtimeEnv.VITE_DEFAULT_MQTT_BROKER || '') + const [mqttPort, setMqttPort] = useState(defaultMqttPort) const [mqttUsername, setMqttUsername] = useState('') const [mqttPassword, setMqttPassword] = useState('') const [mqttTls, setMqttTls] = useState(false) - const [topicPrefix, setTopicPrefix] = useState('myelectricaldata') + const [topicPrefix, setTopicPrefix] = useState(runtimeEnv.VITE_DEFAULT_TOPIC_PREFIX || 'myelectricaldata') const [qos, setQos] = useState(0) const [retain, setRetain] = useState(true) diff --git a/apps/web/src/pages/Production/hooks/useProductionFetch.ts b/apps/web/src/pages/Production/hooks/useProductionFetch.ts index f9055ab..10f4d25 100644 --- a/apps/web/src/pages/Production/hooks/useProductionFetch.ts +++ b/apps/web/src/pages/Production/hooks/useProductionFetch.ts @@ -43,7 +43,7 @@ export function useProductionFetch({ } // Invalidate existing queries to force refetch - queryClient.invalidateQueries({ queryKey: ['production', selectedPDL] }) + queryClient.invalidateQueries({ queryKey: ['productionDaily', selectedPDL] }) // Collapse all sections before fetching new data setIsChartsExpanded(false) @@ -84,9 +84,11 @@ export function useProductionFetch({ const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Use today (not yesterday) because backend uses exclusive end: date < end_date + const today_obj = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12, 0, 0, 0) + const endDate = today_obj.getFullYear() + '-' + + String(today_obj.getMonth() + 1).padStart(2, '0') + '-' + + String(today_obj.getDate()).padStart(2, '0') logger.log(`Final date range for API: ${startDate} → ${endDate}`) @@ -120,9 +122,10 @@ export function useProductionFetch({ String(twoYearsAgo.getMonth() + 1).padStart(2, '0') + '-' + String(twoYearsAgo.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Use today (not yesterday) because backend uses exclusive end: date < end_date + const endDate = today.getFullYear() + '-' + + String(today.getMonth() + 1).padStart(2, '0') + '-' + + String(today.getDate()).padStart(2, '0') logger.log(`Detailed data: Requesting 2 years via batch endpoint (${startDate} to ${endDate}) - 729 days`) diff --git a/apps/web/src/pages/Production/index.tsx b/apps/web/src/pages/Production/index.tsx index d9bf1cd..92dbbb3 100644 --- a/apps/web/src/pages/Production/index.tsx +++ b/apps/web/src/pages/Production/index.tsx @@ -95,26 +95,27 @@ export default function Production() { if (!dateRange) return null const now = new Date() - const yesterday = new Date( + // Use today as exclusive end (backend: date < end_date) so yesterday is included + const today = new Date( now.getFullYear(), now.getMonth(), - now.getDate() - 1, + now.getDate(), 12, 0, 0, 0 // Use noon to avoid DST edge cases ) const startDate_obj = new Date( - yesterday.getFullYear(), - yesterday.getMonth(), - yesterday.getDate() - 6, + today.getFullYear(), + today.getMonth(), + today.getDate() - 7, 12, 0, 0, 0 ) const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + const endDate = today.getFullYear() + '-' + + String(today.getMonth() + 1).padStart(2, '0') + '-' + + String(today.getDate()).padStart(2, '0') return { start: startDate, end: endDate } }, [dateRange]) @@ -420,9 +421,11 @@ export default function Production() { const startDate = startDate_obj.getFullYear() + '-' + String(startDate_obj.getMonth() + 1).padStart(2, '0') + '-' + String(startDate_obj.getDate()).padStart(2, '0') - const endDate = yesterday.getFullYear() + '-' + - String(yesterday.getMonth() + 1).padStart(2, '0') + '-' + - String(yesterday.getDate()).padStart(2, '0') + // Use today (not yesterday) because backend uses exclusive end: date < end_date + const today_obj = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12, 0, 0, 0) + const endDate = today_obj.getFullYear() + '-' + + String(today_obj.getMonth() + 1).padStart(2, '0') + '-' + + String(today_obj.getDate()).padStart(2, '0') logger.log('[Auto-load] Setting date range:', startDate, 'to', endDate) diff --git a/apps/web/src/pages/VictoriaMetrics.tsx b/apps/web/src/pages/VictoriaMetrics.tsx index c83cb6f..ab0c67d 100644 --- a/apps/web/src/pages/VictoriaMetrics.tsx +++ b/apps/web/src/pages/VictoriaMetrics.tsx @@ -35,6 +35,7 @@ import MetricsSection from '@/components/MetricsSection' export default function VictoriaMetrics() { const queryClient = useQueryClient() + const runtimeEnv = typeof window !== 'undefined' ? window.__ENV__ || {} : {} // State const [isEditing, setIsEditing] = useState(false) @@ -50,7 +51,7 @@ export default function VictoriaMetrics() { const [formIntervalMinutes, setFormIntervalMinutes] = useState(null) // VictoriaMetrics config - const [vmUrl, setVmUrl] = useState('') + const [vmUrl, setVmUrl] = useState(runtimeEnv.VITE_DEFAULT_VM_URL || '') const [vmDatabase, setVmDatabase] = useState('myelectricaldata') const [vmUsername, setVmUsername] = useState('') const [vmPassword, setVmPassword] = useState('') diff --git a/apps/web/src/vite-env.d.ts b/apps/web/src/vite-env.d.ts index 10c811a..1cca6be 100644 --- a/apps/web/src/vite-env.d.ts +++ b/apps/web/src/vite-env.d.ts @@ -22,5 +22,12 @@ interface Window { VITE_API_BASE_URL?: string VITE_BACKEND_URL?: string VITE_SERVER_MODE?: string + VITE_DEFAULT_MQTT_BROKER?: string + VITE_DEFAULT_MQTT_PORT?: string + VITE_DEFAULT_TOPIC_PREFIX?: string + VITE_DEFAULT_ENTITY_PREFIX?: string + VITE_DEFAULT_DISCOVERY_PREFIX?: string + VITE_DEFAULT_HA_URL?: string + VITE_DEFAULT_VM_URL?: string } }