Skip to content
Open
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ apps/api/data/*.db
# OS
.DS_Store
Thumbs.db
@eaDir/

# Docker
docker-compose.override.yml
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""Add max_power_data table for client mode.

Revision ID: c9d3e7f1a2b4
Revises: b2c3d4e5f6g7
Create Date: 2026-02-16 12:30:00
"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = "c9d3e7f1a2b4"
down_revision: Union[str, None] = "b2c3d4e5f6g7"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
bind = op.get_bind()
dialect = bind.dialect.name

if dialect == "postgresql":
op.execute(
"""
CREATE TABLE IF NOT EXISTS max_power_data (
id VARCHAR(36) PRIMARY KEY,
usage_point_id VARCHAR(14) NOT NULL,
date DATE NOT NULL,
interval_start VARCHAR(5),
value INTEGER NOT NULL,
source VARCHAR(50) DEFAULT 'myelectricaldata',
raw_data JSONB,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
CONSTRAINT uq_max_power_data UNIQUE (usage_point_id, date)
)
"""
)
op.execute(
"CREATE INDEX IF NOT EXISTS ix_max_power_usage_point_date ON max_power_data(usage_point_id, date)"
)
op.execute("CREATE INDEX IF NOT EXISTS ix_max_power_date ON max_power_data(date)")
else:
op.create_table(
"max_power_data",
sa.Column("id", sa.String(length=36), primary_key=True),
sa.Column("usage_point_id", sa.String(length=14), nullable=False),
sa.Column("date", sa.Date(), nullable=False),
sa.Column("interval_start", sa.String(length=5), nullable=True),
sa.Column("value", sa.Integer(), nullable=False),
sa.Column("source", sa.String(length=50), nullable=True, server_default="myelectricaldata"),
sa.Column("raw_data", sa.JSON(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint("usage_point_id", "date", name="uq_max_power_data"),
)
op.create_index(
"ix_max_power_usage_point_date",
"max_power_data",
["usage_point_id", "date"],
)
op.create_index("ix_max_power_date", "max_power_data", ["date"])


def downgrade() -> None:
bind = op.get_bind()
dialect = bind.dialect.name

if dialect == "postgresql":
op.execute("DROP INDEX IF EXISTS ix_max_power_usage_point_date")
op.execute("DROP INDEX IF EXISTS ix_max_power_date")
op.execute("DROP TABLE IF EXISTS max_power_data")
else:
op.drop_index("ix_max_power_usage_point_date", table_name="max_power_data")
op.drop_index("ix_max_power_date", table_name="max_power_data")
op.drop_table("max_power_data")
56 changes: 56 additions & 0 deletions apps/api/alembic/versions/20260223_fix_nulls_not_distinct.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""Fix unique constraints to use NULLS NOT DISTINCT

For daily records, interval_start is NULL. PostgreSQL treats NULLs as distinct
in unique constraints by default, which means ON CONFLICT never fires for daily
records and duplicates can be inserted. This migration recreates the constraints
with NULLS NOT DISTINCT (PostgreSQL 15+).

Revision ID: c3d4e5f6g7h8
Revises: b2c3d4e5f6g7
Create Date: 2026-02-23

"""
from typing import Sequence, Union

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "c3d4e5f6g7h8"
down_revision: Union[str, None] = "c9d3e7f1a2b4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# Recreate consumption_data unique constraint with NULLS NOT DISTINCT
op.execute("ALTER TABLE consumption_data DROP CONSTRAINT IF EXISTS uq_consumption_data")
op.execute("""
ALTER TABLE consumption_data
ADD CONSTRAINT uq_consumption_data
UNIQUE NULLS NOT DISTINCT (usage_point_id, date, granularity, interval_start)
""")

# Recreate production_data unique constraint with NULLS NOT DISTINCT
op.execute("ALTER TABLE production_data DROP CONSTRAINT IF EXISTS uq_production_data")
op.execute("""
ALTER TABLE production_data
ADD CONSTRAINT uq_production_data
UNIQUE NULLS NOT DISTINCT (usage_point_id, date, granularity, interval_start)
""")


def downgrade() -> None:
# Revert to standard unique constraints
op.execute("ALTER TABLE consumption_data DROP CONSTRAINT IF EXISTS uq_consumption_data")
op.execute("""
ALTER TABLE consumption_data
ADD CONSTRAINT uq_consumption_data
UNIQUE (usage_point_id, date, granularity, interval_start)
""")

op.execute("ALTER TABLE production_data DROP CONSTRAINT IF EXISTS uq_production_data")
op.execute("""
ALTER TABLE production_data
ADD CONSTRAINT uq_production_data
UNIQUE (usage_point_id, date, granularity, interval_start)
""")
10 changes: 5 additions & 5 deletions apps/api/src/adapters/myelectricaldata.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ async def get_consumption_daily(
return await self._make_request(
"GET",
f"/enedis/consumption/daily/{usage_point_id}",
params={"start": start, "end": end},
params={"start": start, "end": end, "use_cache": "true"},
)

async def get_consumption_detail(
Expand All @@ -178,7 +178,7 @@ async def get_consumption_detail(
return await self._make_request(
"GET",
f"/enedis/consumption/detail/{usage_point_id}",
params={"start": start, "end": end},
params={"start": start, "end": end, "use_cache": "true"},
)

async def get_consumption_max_power(
Expand All @@ -194,7 +194,7 @@ async def get_consumption_max_power(
return await self._make_request(
"GET",
f"/enedis/power/{usage_point_id}",
params={"start": start, "end": end},
params={"start": start, "end": end, "use_cache": "true"},
)

# =========================================================================
Expand All @@ -214,7 +214,7 @@ async def get_production_daily(
return await self._make_request(
"GET",
f"/enedis/production/daily/{usage_point_id}",
params={"start": start, "end": end},
params={"start": start, "end": end, "use_cache": "true"},
)

async def get_production_detail(
Expand All @@ -230,7 +230,7 @@ async def get_production_detail(
return await self._make_request(
"GET",
f"/enedis/production/detail/{usage_point_id}",
params={"start": start, "end": end},
params={"start": start, "end": end, "use_cache": "true"},
)

# =========================================================================
Expand Down
13 changes: 1 addition & 12 deletions apps/api/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from fastapi import FastAPI, Request, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.trustedhost import TrustedHostMiddleware

from fastapi.responses import JSONResponse, Response
from fastapi.staticfiles import StaticFiles

Expand Down Expand Up @@ -145,17 +145,6 @@ def get_servers() -> list[dict[str, str]]:
# Mount static files for custom Swagger CSS
app.mount("/static", StaticFiles(directory="/app/static"), name="static")

# Trusted Host middleware to handle proxy headers
app.add_middleware(TrustedHostMiddleware, allowed_hosts=[
"myelectricaldata.fr",
"*.myelectricaldata.fr", # Allow all subdomains
"localhost",
"127.0.0.1",
"backend",
"backend-client", # Client mode Docker service name
"host.docker.internal", # Allow client mode to connect to server mode locally
])

# CORS middleware - explicit origins required for credentials (httpOnly cookies)
def get_cors_origins() -> list[str]:
"""Build CORS origins from settings"""
Expand Down
2 changes: 2 additions & 0 deletions apps/api/src/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from .client_mode import (
ConsumptionData,
ProductionData,
MaxPowerData,
SyncStatus,
SyncStatusType,
ExportConfig,
Expand Down Expand Up @@ -44,6 +45,7 @@
# Client mode models
"ConsumptionData",
"ProductionData",
"MaxPowerData",
"SyncStatus",
"SyncStatusType",
"ExportConfig",
Expand Down
37 changes: 35 additions & 2 deletions apps/api/src/models/client_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class ConsumptionData(Base, TimestampMixin):

__tablename__ = "consumption_data"
__table_args__ = (
UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_consumption_data"),
UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_consumption_data", postgresql_nulls_not_distinct=True),
Index("ix_consumption_usage_point_date", "usage_point_id", "date"),
Index("ix_consumption_granularity_date", "granularity", "date"),
)
Expand Down Expand Up @@ -88,7 +88,7 @@ class ProductionData(Base, TimestampMixin):

__tablename__ = "production_data"
__table_args__ = (
UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_production_data"),
UniqueConstraint("usage_point_id", "date", "granularity", "interval_start", name="uq_production_data", postgresql_nulls_not_distinct=True),
Index("ix_production_usage_point_date", "usage_point_id", "date"),
Index("ix_production_granularity_date", "granularity", "date"),
)
Expand All @@ -115,6 +115,39 @@ def __repr__(self) -> str:
return f"<ProductionData({self.usage_point_id}, {self.date}, {self.granularity.value}, {self.value}Wh)>"


class MaxPowerData(Base, TimestampMixin):
"""Store daily maximum power data from MyElectricalData API.

One row per day and per usage point, containing:
- maximum power value (W)
- time of the peak interval (HH:MM)
"""

__tablename__ = "max_power_data"
__table_args__ = (
UniqueConstraint("usage_point_id", "date", name="uq_max_power_data"),
Index("ix_max_power_usage_point_date", "usage_point_id", "date"),
Index("ix_max_power_date", "date"),
)

id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
usage_point_id: Mapped[str] = mapped_column(String(14), nullable=False, index=True)
date: Mapped[date] = mapped_column(Date, nullable=False, index=True)

# Start time of interval containing the max power for this day (HH:MM).
interval_start: Mapped[str | None] = mapped_column(String(5), nullable=True)

# Maximum power value in W.
value: Mapped[int] = mapped_column(Integer, nullable=False)

# Source metadata
source: Mapped[str] = mapped_column(String(50), default="myelectricaldata")
raw_data: Mapped[dict[str, Any] | None] = mapped_column(JSON, nullable=True)

def __repr__(self) -> str:
return f"<MaxPowerData({self.usage_point_id}, {self.date}, {self.value}W)>"


class SyncStatusType(str, enum.Enum):
"""Sync operation status"""

Expand Down
4 changes: 2 additions & 2 deletions apps/api/src/routers/ecowatt.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ async def refresh_ecowatt_cache(
from ..services.sync import SyncService
sync_service = SyncService(db)
result = await sync_service.sync_ecowatt()
updated_count = result.get("synced", 0)
updated_count = int(result.get("created", 0)) + int(result.get("updated", 0))
else:
# Server mode: fetch from RTE API
updated_count = await rte_service.update_ecowatt_cache(db)
Expand All @@ -376,4 +376,4 @@ async def refresh_ecowatt_cache(
raise HTTPException(
status_code=500,
detail=f"Failed to refresh EcoWatt cache: {str(e)}"
)
)
Loading