diff --git a/README.md b/README.md index 285e7cc..d3673cc 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ **Free, open-source AI API quota monitoring for developers.** Track usage across [Synthetic](https://synthetic.new), [Z.ai](https://z.ai), [Anthropic](https://anthropic.com), [Codex](https://openai.com/codex), [GitHub Copilot](https://github.com/features/copilot), [MiniMax](https://platform.minimax.io), [Gemini CLI](docs/GEMINI_SETUP.md), and Antigravity in one place. -See history, get alerts, and open a local web dashboard before you hit throttling or run over budget. +See history, get alerts, and open a local web dashboard before you hit throttling or run over budget. Additionally, you can ingest local telemetry from your own API-driven workflows with API Integrations, keeping track of token use and spending across multiple providers. **Links:** [Website](https://onwatch.onllm.dev) | [Buy Me a Coffee](https://buymeacoffee.com/prakersh) @@ -125,6 +125,7 @@ Provider setup guides: - [Copilot Setup Guide](docs/COPILOT_SETUP.md) - [MiniMax Setup Guide](docs/MINIMAX_SETUP.md) - [Antigravity Setup Guide](docs/ANTIGRAVITY_SETUP.md) +- [API Integration Setup Guide](docs/API_INTEGRATIONS_SETUP.md) ### Run @@ -165,6 +166,7 @@ Open **http://localhost:9211** and log in with your `.env` credentials. - **MiniMax Coding Plan** -- Shared quota pool tracking for M2, M2.1, and M2.5 models with 5-hour rolling window reset cycles and **multi-account support** for tracking multiple MiniMax subscriptions via the dashboard UI - **Gemini CLI (Beta)** -- Per-model quota tracking for Gemini 2.5/3.x Pro, Flash, and Flash Lite models with 24-hour reset cycles - **Antigravity** -- Multi-model quota cards (Claude, Gemini, GPT) with grouped quota pools, logging history, and cycle overview +- **API Integrations** -- Local JSONL ingestion for custom API-driven workflows and automations. Track per-integration token volume, request counts, recent activity, costs, trends, and accumulated usage across separate API keys and providers. - **All** -- Side-by-side view of all configured providers - **PWA installable** -- Install onWatch from your browser for a native app experience (Beta) @@ -172,6 +174,8 @@ Each quota card shows: usage vs. limit with progress bar, live countdown to rese **Time-series chart** -- Chart.js area chart showing all quotas as % of limit. Time ranges: 1h, 6h, 24h, 7d, 30d. +**API Integrations chart** -- Dedicated telemetry view for custom API-driven scripts. Switch between tokens per request, request counts, accumulated token use, and cost where available. + **Insights** -- Burn rate forecasting, billing-period averages, usage variance, trend detection, and cross-quota ratio analysis (e.g., "1% weekly ~ 24% of 5-hr sprint"). Provider-specific: tokens-per-call efficiency and per-tool breakdowns for Z.ai. **Cycle Overview** -- Cross-quota correlation table showing all quota values at peak usage points within each billing period. Helps identify which quotas spike together. @@ -180,6 +184,8 @@ Each quota card shows: usage vs. limit with progress bar, live countdown to rese **Settings** -- Dedicated settings page (`/settings`) with tabs for general preferences, provider controls, notification thresholds, and SMTP email configuration. +**Custom API Integrations setup** -- Use a small wrapper around your own API calls to append normalised JSONL events into `~/.onwatch/api-integrations/`, then open the API Integrations tab to monitor cumulative and recent usage. Full setup instructions live in [docs/API_INTEGRATIONS_SETUP.md](docs/API_INTEGRATIONS_SETUP.md). + **Menubar (macOS, Beta)** -- The macOS build includes a menubar companion with two preset views: - **Standard** -- Provider cards with circular quota meters and reset metadata @@ -323,6 +329,9 @@ Additional environment variables: | `ONWATCH_ADMIN_PASS` | Initial dashboard password (default: `changeme`) | | `ONWATCH_LOG_LEVEL` | Log level: debug, info, warn, error | | `ONWATCH_HOST` | Bind address (default: `0.0.0.0`) | +| `ONWATCH_API_INTEGRATIONS_ENABLED` | Enable or disable API Integrations ingestion (default: `true`) | +| `ONWATCH_API_INTEGRATIONS_DIR` | Directory onWatch tails for API Integrations JSONL events | +| `ONWATCH_API_INTEGRATIONS_RETENTION` | How long API Integrations rows are kept in SQLite (default: `1440h` = 60 days, `0` disables pruning) | CLI flags override environment variables. @@ -350,6 +359,9 @@ All endpoints require authentication (session cookie or Basic Auth). Append `?pr | `/api/insights` | GET | Usage insights | | `/api/providers` | GET | Available providers | | `/api/settings` | GET/PUT | User settings (notifications, SMTP, providers, menubar) | +| `/api/api-integrations/current` | GET | Current aggregated usage by API integration | +| `/api/api-integrations/history` | GET | Chart-ready API integration history, `?range=` | +| `/api/api-integrations/health` | GET | API integration ingest health and file state | | `/api/settings/smtp/test` | POST | Send test email via configured SMTP | | `/api/password` | PUT | Change password | | `/api/push/vapid` | GET | Get VAPID public key for push subscription | diff --git a/docs/API_INTEGRATIONS_SETUP.md b/docs/API_INTEGRATIONS_SETUP.md new file mode 100644 index 0000000..691d93a --- /dev/null +++ b/docs/API_INTEGRATIONS_SETUP.md @@ -0,0 +1,262 @@ +# Custom API Integrations Setup Guide + +Track Custom API Integrations usage in onWatch with local JSONL ingestion. + +Ingest local JSONL files to monitor Custom API Integrations usage in onWatch. This is not for subscription or quota tracking, but for logging token usage in custom scripts and programs that make API calls. Wrap your integrations with telemetry to record per-call token usage, cost, and latency data, and track everything in onWatch. + +## Prerequisites + +- onWatch with the Custom API Integrations backend enabled +- A script or automation that already calls a supported provider API +- Ability to write a JSONL file locally + +Supported v1 providers: + +- `anthropic` +- `openai` +- `mistral` +- `openrouter` +- `gemini` + +*This list is just getting started... feel free to add more providers as you need them!* + +## How It Works + +1. Your script calls the provider API. +2. Your script reads the usage fields from the API response. +3. Your script appends one normalised JSON object per line to a file in `~/.onwatch/api-integrations/`. +4. onWatch tails `*.jsonl` files in that directory and stores the events in SQLite. + +The source files are just the ingest input. The canonical persisted data lives in `~/.onwatch/data/onwatch.db`. + +## Default Paths + +- API Integrations directory: `~/.onwatch/api-integrations` +- Database: `~/.onwatch/data/onwatch.db` +- Log file: `~/.onwatch/data/.onwatch.log` + +In containers, the default API Integrations directory is `/data/api-integrations`. + +## Configuration + +Custom API Integrations ingestion is enabled by default. + +Optional environment variables: + +```env +ONWATCH_API_INTEGRATIONS_ENABLED=true +ONWATCH_API_INTEGRATIONS_DIR=~/.onwatch/api-integrations +ONWATCH_API_INTEGRATIONS_RETENTION=1440h +``` + +If you change `ONWATCH_API_INTEGRATIONS_DIR`, point your scripts and onWatch at the same directory. + +Retention notes: + +- `ONWATCH_API_INTEGRATIONS_RETENTION` controls how long ingested API Integrations events are kept in SQLite +- default retention is `1440h` which is 60 days +- set `ONWATCH_API_INTEGRATIONS_RETENTION=0` to disable database pruning +- pruning applies only to the SQLite table, not to the source `.jsonl` files + +## Event Format + +Write one JSON object per line. + +Required fields: + +```json +{ + "ts": "2026-04-03T12:00:00Z", + "integration": "notes-organiser", + "provider": "anthropic", + "model": "claude-3-7-sonnet", + "prompt_tokens": 1200, + "completion_tokens": 300 +} +``` + +Optional fields: + +- `total_tokens` +- `cost_usd` +- `latency_ms` +- `account` +- `request_id` +- `metadata` + +Full example: + +```json +{ + "ts": "2026-04-03T12:00:00Z", + "integration": "notes-organiser", + "provider": "anthropic", + "account": "personal", + "model": "claude-3-7-sonnet", + "request_id": "req_123", + "prompt_tokens": 1200, + "completion_tokens": 300, + "total_tokens": 1500, + "cost_usd": 0.0123, + "latency_ms": 1840, + "metadata": { + "task": "weekly-meeting-notes" + } +} +``` + +Notes: + +- `ts` must be RFC3339 in UTC, for example `2026-04-03T12:00:00Z` +- `provider` must be one of the supported v1 provider names +- `metadata` must be a JSON object if present +- If `account` is omitted, onWatch stores it as `default` +- If `total_tokens` is omitted, onWatch computes `prompt_tokens + completion_tokens` + +## Python Examples + +Python-first examples are included here: + +- `examples/api_integrations/python/onwatch_api_integrations.py` +- `examples/api_integrations/python/anthropic_example.py` +- `examples/api_integrations/python/openai_example.py` +- `examples/api_integrations/python/mistral_example.py` +- `examples/api_integrations/python/openrouter_example.py` +- `examples/api_integrations/python/gemini_example.py` + +The helper in `examples/api_integrations/python/onwatch_api_integrations.py` appends normalised JSONL events to the API Integrations directory. + +These are initial examples to show the general use-case, but the logic can be expanded to any API-driven custom integration you want. + +Included example utilities currently include: + +- `examples/api_integrations/python/generate_practice_dataset.py` + +You can also build your own wrapper around the helper and write any number of integration-specific JSONL files, as long as they end in `.jsonl`. + +## Dashboard and API + +Once events are being ingested, open the `API Integrations` tab in the dashboard. + +The dashboard shows: + +- per-integration cards with request counts, token totals, providers, and optional cost +- all-time and recent usage insight panels +- a shared usage chart with metric modes for tokens per call, API calls, accumulated tokens, and cost +- ingest health, tailed files, and recent alerts + +API Integrations can also be queried through the read-only backend API: + +- `GET /api/api-integrations/current` +- `GET /api/api-integrations/history?range=6h` +- `GET /api/api-integrations/health` + +Dashboard visibility is controlled through the normal settings API via `api_integrations_visibility`, but ingestion itself is controlled by `ONWATCH_API_INTEGRATIONS_ENABLED`. + +## Start onWatch + +Foreground mode is easiest for first-time verification: + +```bash +onwatch --debug +``` + +You should see a log line showing that the API integrations ingester started. + +## Verify File Output + +Check that your script is writing JSONL events: + +```bash +ls -la ~/.onwatch/api-integrations +tail -n 20 ~/.onwatch/api-integrations/*.jsonl +``` + +Each API call should append one valid JSON line. + +## Verify Database Ingestion + +Check recently ingested API integrations usage events: + +```bash +sqlite3 ~/.onwatch/data/onwatch.db "select integration_name, provider, account_name, model, prompt_tokens, completion_tokens, total_tokens, captured_at from api_integration_usage_events order by id desc limit 20;" +``` + +Check ingest cursor state: + +```bash +sqlite3 ~/.onwatch/data/onwatch.db "select source_path, offset_bytes, file_size, partial_line from api_integration_ingest_state;" +``` + +Expected result: + +- `api_integration_usage_events` contains one row per ingested event +- `api_integration_ingest_state` contains one row per tailed file +- `offset_bytes` increases as the file grows + +## Troubleshooting + +### No rows appear in `api_integration_usage_events` + +Check: + +- onWatch is running +- `ONWATCH_API_INTEGRATIONS_ENABLED` is not set to `false` +- your script writes into the same directory that onWatch is tailing +- the file name ends with `.jsonl` +- each line is valid JSON + +Run: + +```bash +tail -f ~/.onwatch/data/.onwatch.log +``` + +### Invalid lines are skipped + +onWatch skips malformed or schema-invalid lines and creates a system alert instead of stopping ingestion. + +Check recent alerts: + +```bash +sqlite3 ~/.onwatch/data/onwatch.db "select provider, alert_type, title, message, created_at from system_alerts where provider = 'api_integrations' order by id desc limit 20;" +``` + +### Duplicate rows + +onWatch deduplicates ingested events using a derived fingerprint based on the source path and stable event fields. + +This protects against: + +- daemon restart +- file reread after truncation +- repeated scans of the same already-ingested lines + +If you intentionally want two events, they must differ in at least one meaningful field such as timestamp or request id. + +### Rotating source files + +If you want to start a fresh source log for new events, move or rename the active `.jsonl` file and let your wrapper create a new one. + +Notes: + +- onWatch will treat the new file as a new ingest source +- previously ingested history remains in SQLite until you clear or replace the stored database +- rotating the source file changes future ingestion, but it does not erase existing chart history by itself + +## Backend Storage + +Custom API Integrations data is stored in separate SQLite tables from the existing subscription/quota tracking tables: + +- `api_integration_usage_events` +- `api_integration_ingest_state` + +This means Custom API Integrations telemetry is identifiable and queryable independently from provider quota snapshots and reset cycles. + +Database retention behavior: + +- onWatch automatically prunes old rows from `api_integration_usage_events` +- the pruning cutoff is controlled by `ONWATCH_API_INTEGRATIONS_RETENTION` +- the default is 60 days +- source `.jsonl` files are not pruned or compacted by onWatch +- if you want smaller source logs, rotate or remove the JSONL files manually diff --git a/docs/screenshots/INDEX.md b/docs/screenshots/INDEX.md index 01207d7..6068af7 100644 --- a/docs/screenshots/INDEX.md +++ b/docs/screenshots/INDEX.md @@ -1,6 +1,6 @@ # Screenshots -Dashboard screenshots captured from a live onWatch v2.11.0 instance with real Anthropic, Synthetic, Z.ai, Codex, GitHub Copilot (Beta), and Antigravity data. Each screenshot shows the top half of the dashboard (quota cards, usage insights, charts, and cycle history). +Dashboard screenshots captured from live onWatch instances showing provider dashboards, API Integrations telemetry, and the macOS menubar companion. Each screenshot focuses on the top half of the dashboard or the relevant primary UI surface. ## Anthropic Provider @@ -44,11 +44,18 @@ Dashboard screenshots captured from a live onWatch v2.11.0 instance with real An | `antigravity-light.png` | Antigravity dashboard in light mode. Shows grouped quota cards for Claude+GPT and Gemini models with logging history and cycle overview. | | `antigravity-dark.png` | Antigravity dashboard in dark mode. Features model-specific colors (coral for Claude, green for Gemini). | +## API Integrations + +| File | Description | +|------|-------------| +| `api-integration-light.png` | API Integrations dashboard in light mode. Shows per-integration cards, all-time and recent usage insights, the shared usage chart, and ingest health for locally tailed JSONL telemetry. | +| `api-integration-dark.png` | API Integrations dashboard in dark mode. Same layout with the API Integrations chart and health panels adapted for dark theme. | + ## All Providers View | File | Description | |------|-------------| -| `all-light.png` | All Providers view in light mode. Anthropic, Synthetic, Z.ai, Codex, GitHub Copilot (Beta), and Antigravity quotas side-by-side with combined insights. | +| `all-light.png` | All Providers view in light mode. Anthropic, Synthetic, Z.ai, Codex, GitHub Copilot (Beta), Antigravity, and the API Integrations summary card side-by-side with combined insights. | | `all-dark.png` | All Providers view in dark mode. | ## Menubar Companion (macOS, Beta) diff --git a/docs/screenshots/api-integration-dark.png b/docs/screenshots/api-integration-dark.png new file mode 100644 index 0000000..100ba9a Binary files /dev/null and b/docs/screenshots/api-integration-dark.png differ diff --git a/docs/screenshots/api-integration-light.png b/docs/screenshots/api-integration-light.png new file mode 100644 index 0000000..b3983e8 Binary files /dev/null and b/docs/screenshots/api-integration-light.png differ diff --git a/examples/api_integrations/python/anthropic_example.py b/examples/api_integrations/python/anthropic_example.py new file mode 100644 index 0000000..fb7a848 --- /dev/null +++ b/examples/api_integrations/python/anthropic_example.py @@ -0,0 +1,55 @@ +"""Anthropic example for onWatch Custom API Integrations ingestion. + +Use this file as a pattern for your own script. + +What the user keeps: +- your normal provider API call +- your real prompt, model, and application logic + +What the user adds: +- `from onwatch_api_integrations import append_usage_event` +- the `append_usage_event(...)` block after the response is returned + +The two sections below are always: +- "Real API call" = your existing script logic +- "onWatch block to copy" = the part you append to your script +""" + +import os + +from anthropic import Anthropic + +from onwatch_api_integrations import append_usage_event # the only import you need to add + + +def main() -> None: + api_key = os.environ["ANTHROPIC_API_KEY"] + + # --- Real API call ----------------------------------------------------- + # Keep this part as your own real Anthropic request logic. + client = Anthropic(api_key=api_key) + response = client.messages.create( + model="claude-haiku-4-5-20251001", + max_tokens=128, + messages=[{"role": "user", "content": "Summarize these notes in one sentence."}], + ) + + # --- onWatch block to copy -------------------------------------------- + # Add this block after your real API call returns. + # Map the provider response usage fields into the normalised onWatch event. + output_path = append_usage_event( + integration="notes-organiser", + provider="anthropic", + model=response.model, + prompt_tokens=response.usage.input_tokens, + completion_tokens=response.usage.output_tokens, + total_tokens=response.usage.input_tokens + response.usage.output_tokens, + request_id=getattr(response, "id", None), + metadata={"example": True}, + ) + + print(f"Wrote onWatch event to: {output_path}") + + +if __name__ == "__main__": + main() diff --git a/examples/api_integrations/python/gemini_example.py b/examples/api_integrations/python/gemini_example.py new file mode 100644 index 0000000..b6b739f --- /dev/null +++ b/examples/api_integrations/python/gemini_example.py @@ -0,0 +1,54 @@ +"""Gemini example for onWatch Custom API Integrations ingestion. + +Use this file as a pattern for your own script. + +What the user keeps: +- your normal provider API call +- your real prompt, model, and application logic + +What the user adds: +- `from onwatch_api_integrations import append_usage_event` +- the `append_usage_event(...)` block after the response is returned + +The two sections below are always: +- "Real API call" = your existing script logic +- "onWatch block to copy" = the part you append to your script +""" + +import os + +from google import genai + +from onwatch_api_integrations import append_usage_event # the only import you need to add + + +def main() -> None: + api_key = os.environ["GEMINI_API_KEY"] + + # --- Real API call ----------------------------------------------------- + # Keep this part as your own real Gemini request logic. + client = genai.Client(api_key=api_key) + response = client.models.generate_content( + model="gemini-2.5-flash", + contents="Summarize these notes in one sentence.", + ) + + # --- onWatch block to copy -------------------------------------------- + # Add this block after your real API call returns. + # Map the provider response usage fields into the normalised onWatch event. + usage = response.usage_metadata + output_path = append_usage_event( + integration="notes-organiser", + provider="gemini", + model="gemini-2.5-flash", + prompt_tokens=usage.prompt_token_count, + completion_tokens=usage.candidates_token_count, + total_tokens=usage.total_token_count, + metadata={"example": True}, + ) + + print(f"Wrote onWatch event to: {output_path}") + + +if __name__ == "__main__": + main() diff --git a/examples/api_integrations/python/generate_practice_dataset.py b/examples/api_integrations/python/generate_practice_dataset.py new file mode 100644 index 0000000..6ad91a7 --- /dev/null +++ b/examples/api_integrations/python/generate_practice_dataset.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python3 +""" +Generate 30 days of fake API Integrations JSONL telemetry for testing. + +""" + +from __future__ import annotations + +import argparse +import json +import math +import random +import uuid +from collections import defaultdict +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +from pathlib import Path + + +DEFAULT_OUTPUT = Path.home() / ".onwatch" / "api-integrations" / "pr-demo-seed.jsonl" + + +@dataclass(frozen=True) +class IntegrationSpec: + name: str + providers: tuple[str, ...] + models: tuple[str, ...] + base_calls_per_day: int + prompt_range: tuple[int, int] + completion_range: tuple[int, int] + active_hours: tuple[int, int] + weekday_boost: float = 1.0 + weekend_boost: float = 0.7 + metadata: dict | None = None + + +INTEGRATIONS: tuple[IntegrationSpec, ...] = ( + IntegrationSpec( + name="model-train-fine-tune", + providers=("openai", "openrouter"), + models=("gpt-4.1", "gpt-4.1-mini", "openai/gpt-4.1"), + base_calls_per_day=24, + prompt_range=(1800, 7200), + completion_range=(350, 1400), + active_hours=(7, 22), + weekday_boost=1.35, + weekend_boost=0.45, + metadata={"pipeline": "fine-tune", "team": "ml"}, + ), + IntegrationSpec( + name="email-bot", + providers=("anthropic", "openai"), + models=("claude-3-7-sonnet", "gpt-4.1-mini"), + base_calls_per_day=36, + prompt_range=(280, 1400), + completion_range=(120, 600), + active_hours=(6, 20), + weekday_boost=1.15, + weekend_boost=0.85, + metadata={"category": "ops"}, + ), + IntegrationSpec( + name="notes-compactor", + providers=("mistral", "anthropic"), + models=("mistral-small-latest", "claude-3-5-haiku"), + base_calls_per_day=18, + prompt_range=(900, 3600), + completion_range=(180, 900), + active_hours=(8, 23), + weekday_boost=0.95, + weekend_boost=1.25, + metadata={"category": "knowledge"}, + ), + IntegrationSpec( + name="support-triage", + providers=("openrouter", "gemini"), + models=("anthropic/claude-3.5-haiku", "gemini-2.5-flash"), + base_calls_per_day=28, + prompt_range=(420, 1900), + completion_range=(140, 760), + active_hours=(5, 21), + weekday_boost=1.2, + weekend_boost=0.75, + metadata={"category": "support"}, + ), + IntegrationSpec( + name="invoice-reconciler", + providers=("gemini", "openai"), + models=("gemini-2.5-pro", "gpt-4.1-mini"), + base_calls_per_day=10, + prompt_range=(1100, 4800), + completion_range=(160, 720), + active_hours=(4, 18), + weekday_boost=1.25, + weekend_boost=0.3, + metadata={"category": "finance"}, + ), + IntegrationSpec( + name="changelog-writer", + providers=("anthropic", "mistral", "openrouter"), + models=("claude-3-7-sonnet", "mistral-small-latest", "openai/gpt-4.1-mini"), + base_calls_per_day=14, + prompt_range=(650, 2200), + completion_range=(260, 1100), + active_hours=(9, 23), + weekday_boost=1.05, + weekend_boost=0.95, + metadata={"category": "release"}, + ), +) + + +PROVIDER_COST_PER_1K = { + "anthropic": (0.0030, 0.0150), + "openai": (0.0020, 0.0080), + "mistral": (0.0010, 0.0030), + "openrouter": (0.0022, 0.0095), + "gemini": (0.0013, 0.0050), +} + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Generate fake API Integrations JSONL data for onWatch screenshots.") + parser.add_argument("--output", type=Path, default=DEFAULT_OUTPUT, help=f"Output JSONL path (default: {DEFAULT_OUTPUT})") + parser.add_argument("--days", type=int, default=30, help="How many days of history to generate") + parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducible output") + parser.add_argument("--overwrite", action="store_true", help="Overwrite the output file instead of appending") + parser.add_argument("--account", default="demo", help="Account name to assign to generated events") + return parser.parse_args() + + +def clamp(value: float, lower: float, upper: float) -> float: + return max(lower, min(upper, value)) + + +def hourly_intensity(ts: datetime, spec: IntegrationSpec) -> float: + start_hour, end_hour = spec.active_hours + hour = ts.hour + ts.minute / 60.0 + if hour < start_hour or hour > end_hour: + return 0.08 + + span = max(1.0, float(end_hour - start_hour)) + phase = (hour - start_hour) / span + workday_curve = 0.55 + 0.75 * math.sin(math.pi * phase) + weekday_factor = spec.weekday_boost if ts.weekday() < 5 else spec.weekend_boost + weekly_wave = 0.92 + 0.18 * math.sin((ts.timetuple().tm_yday / 7.0) * math.pi) + return clamp(workday_curve * weekday_factor * weekly_wave, 0.05, 2.2) + + +def campaign_multiplier(day_index: int, total_days: int, spec: IntegrationSpec) -> float: + progress = day_index / max(1, total_days - 1) + if spec.name == "model-train-fine-tune": + return 0.85 + 0.95 * progress + if spec.name == "support-triage": + return 0.9 + 0.3 * math.sin(progress * math.pi * 3.0) + if spec.name == "notes-compactor": + return 0.8 + 0.35 * math.cos(progress * math.pi * 2.0) + if spec.name == "invoice-reconciler": + month_end_push = 1.0 + (0.8 if progress > 0.7 else 0.0) + return month_end_push + return 1.0 + + +def estimate_cost(provider: str, prompt_tokens: int, completion_tokens: int) -> float | None: + rates = PROVIDER_COST_PER_1K.get(provider) + if not rates: + return None + prompt_rate, completion_rate = rates + cost = (prompt_tokens / 1000.0) * prompt_rate + (completion_tokens / 1000.0) * completion_rate + return round(cost, 6) + + +def jittered_timestamp(base: datetime, rng: random.Random) -> datetime: + return base + timedelta( + minutes=rng.randint(0, 58), + seconds=rng.randint(0, 59), + ) + + +def choose_calls_for_hour(spec: IntegrationSpec, ts: datetime, day_index: int, total_days: int, rng: random.Random) -> int: + baseline_per_hour = spec.base_calls_per_day / 24.0 + intensity = hourly_intensity(ts, spec) + campaign = campaign_multiplier(day_index, total_days, spec) + noise = rng.uniform(0.75, 1.35) + expected = baseline_per_hour * intensity * campaign * noise + floor = int(expected) + remainder = expected - floor + return floor + (1 if rng.random() < remainder else 0) + + +def generate_events(days: int, seed: int, account: str) -> list[dict]: + rng = random.Random(seed) + now = datetime.now(UTC).replace(minute=0, second=0, microsecond=0) + start = now - timedelta(days=days) + events: list[dict] = [] + provider_mix: dict[str, int] = defaultdict(int) + + for day_index in range(days): + for hour in range(24): + slot = start + timedelta(days=day_index, hours=hour) + for spec in INTEGRATIONS: + calls = choose_calls_for_hour(spec, slot, day_index, days, rng) + if calls <= 0: + continue + for _ in range(calls): + provider = rng.choice(spec.providers) + model_candidates = [m for m in spec.models if provider in m or "/" not in m] + model = rng.choice(model_candidates or spec.models) + prompt_tokens = rng.randint(*spec.prompt_range) + completion_tokens = rng.randint(*spec.completion_range) + + # Periodic spikes for prettier graphs. + if spec.name in {"model-train-fine-tune", "notes-compactor"} and rng.random() < 0.08: + prompt_tokens = int(prompt_tokens * rng.uniform(1.4, 2.1)) + completion_tokens = int(completion_tokens * rng.uniform(1.2, 1.8)) + elif spec.name == "email-bot" and rng.random() < 0.12: + completion_tokens = int(completion_tokens * rng.uniform(1.3, 1.9)) + + total_tokens = prompt_tokens + completion_tokens + latency_ms = rng.randint(500, 4200) + timestamp = jittered_timestamp(slot, rng) + provider_mix[provider] += 1 + + metadata = dict(spec.metadata or {}) + metadata["batch"] = "pr-demo" + metadata["env"] = "synthetic" + + event = { + "ts": timestamp.isoformat().replace("+00:00", "Z"), + "integration": spec.name, + "provider": provider, + "account": account, + "model": model, + "request_id": f"demo-{uuid.uuid4().hex[:16]}", + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens, + "total_tokens": total_tokens, + "latency_ms": latency_ms, + "metadata": metadata, + } + cost = estimate_cost(provider, prompt_tokens, completion_tokens) + if cost is not None: + event["cost_usd"] = cost + events.append(event) + + events.sort(key=lambda item: item["ts"]) + return events + + +def main() -> None: + args = parse_args() + events = generate_events(days=args.days, seed=args.seed, account=args.account) + + args.output.parent.mkdir(parents=True, exist_ok=True) + mode = "w" if args.overwrite else "a" + with args.output.open(mode, encoding="utf-8") as handle: + for event in events: + handle.write(json.dumps(event, separators=(",", ":"))) + handle.write("\n") + + print(f"Wrote {len(events)} fake API integration events to {args.output}") + print("Next steps:") + print(f" 1. Keep onWatch running so it tails {args.output}") + print(" 2. Refresh the API Integrations tab after ingestion completes") + print(" 3. Use a new output filename if you want another seeded dataset without duplicate suppression") + + +if __name__ == "__main__": + main() diff --git a/examples/api_integrations/python/mistral_example.py b/examples/api_integrations/python/mistral_example.py new file mode 100644 index 0000000..935d76f --- /dev/null +++ b/examples/api_integrations/python/mistral_example.py @@ -0,0 +1,55 @@ +"""Mistral example for onWatch Custom API Integrations ingestion. + +Use this file as a pattern for your own script. + +What the user keeps: +- your normal provider API call +- your real prompt, model, and application logic + +What the user adds: +- `from onwatch_api_integrations import append_usage_event` +- the `append_usage_event(...)` block after the response is returned + +The two sections below are always: +- "Real API call" = your existing script logic +- "onWatch block to copy" = the part you append to your script +""" + +import os + +from mistralai.client import Mistral + +from onwatch_api_integrations import append_usage_event # the only import you need to add + + +def main() -> None: + api_key = os.environ["MISTRAL_API_KEY"] + + # --- Real API call ----------------------------------------------------- + # Keep this part as your own real Mistral request logic. + client = Mistral(api_key=api_key) + response = client.chat.complete( + model="mistral-small-latest", + messages=[{"role": "user", "content": "Summarize these notes in one sentence."}], + ) + + # --- onWatch block to copy -------------------------------------------- + # Add this block after your real API call returns. + # Map the provider response usage fields into the normalised onWatch event. + usage = response.usage + output_path = append_usage_event( + integration="notes-organiser", + provider="mistral", + model=response.model, + prompt_tokens=usage.prompt_tokens, + completion_tokens=usage.completion_tokens, + total_tokens=usage.total_tokens, + request_id=response.id, + metadata={"example": True}, + ) + + print(f"Wrote onWatch event to: {output_path}") + + +if __name__ == "__main__": + main() diff --git a/examples/api_integrations/python/onwatch_api_integrations.py b/examples/api_integrations/python/onwatch_api_integrations.py new file mode 100644 index 0000000..89d612a --- /dev/null +++ b/examples/api_integrations/python/onwatch_api_integrations.py @@ -0,0 +1,82 @@ +"""Shared helper for writing normalized Custom API Integrations usage events to onWatch. + +How this works: +1. Your script makes a normal provider API call. +2. Your script extracts usage fields from the provider response. +3. Your script calls `append_usage_event(...)`. +4. This helper appends one JSON line to `~/.onwatch/api-integrations/.jsonl` + unless `ONWATCH_API_INTEGRATIONS_DIR` overrides the directory. +5. The onWatch daemon tails that file and stores the event in SQLite. + +The event schema written here is already normalised. onWatch validates and +stores it, but it does not need to understand the raw provider response. +""" + +import json +import os +from datetime import datetime, timezone +from pathlib import Path + + +def _api_integrations_dir() -> Path: + """Return the directory that onWatch tails for API integration usage events.""" + raw = os.getenv("ONWATCH_API_INTEGRATIONS_DIR") + if raw: + return Path(raw).expanduser() + return Path.home() / ".onwatch" / "api-integrations" + + +def append_usage_event( + *, + integration: str, + provider: str, + model: str, + prompt_tokens: int, + completion_tokens: int, + total_tokens: int | None = None, + account: str | None = None, + request_id: str | None = None, + cost_usd: float | None = None, + latency_ms: int | None = None, + metadata: dict | None = None, + file_name: str | None = None, +) -> Path: + """Append one normalized JSONL event and return the file path used. + + Required fields: + - integration + - provider + - model + - prompt_tokens + - completion_tokens + + Optional fields are only written when present. + """ + event = { + "ts": datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"), + "integration": integration, + "provider": provider, + "model": model, + "prompt_tokens": int(prompt_tokens), + "completion_tokens": int(completion_tokens), + } + if total_tokens is not None: + event["total_tokens"] = int(total_tokens) + if account: + event["account"] = account + if request_id: + event["request_id"] = request_id + if cost_usd is not None: + event["cost_usd"] = float(cost_usd) + if latency_ms is not None: + event["latency_ms"] = int(latency_ms) + if metadata: + event["metadata"] = metadata + + api_integrations_dir = _api_integrations_dir() + api_integrations_dir.mkdir(parents=True, exist_ok=True) + target = api_integrations_dir / (file_name or f"{integration}.jsonl") + with target.open("a", encoding="utf-8") as handle: + handle.write(json.dumps(event, separators=(",", ":"))) + handle.write("\n") + return target diff --git a/examples/api_integrations/python/openai_example.py b/examples/api_integrations/python/openai_example.py new file mode 100644 index 0000000..46341bb --- /dev/null +++ b/examples/api_integrations/python/openai_example.py @@ -0,0 +1,54 @@ +"""OpenAI example for onWatch Custom API Integrations ingestion. + +Use this file as a pattern for your own script. + +What the user keeps: +- your normal provider API call +- your real prompt, model, and application logic + +What the user adds: +- `from onwatch_api_integrations import append_usage_event` +- the `append_usage_event(...)` block after the response is returned + +The two sections below are always: +- "Real API call" = your existing script logic +- "onWatch block to copy" = the part you append to your script +""" + +import os + +from openai import OpenAI + +from onwatch_api_integrations import append_usage_event # the only import you need to add + + +def main() -> None: + api_key = os.environ["OPENAI_API_KEY"] + + # --- Real API call ----------------------------------------------------- + # Keep this part as your own real OpenAI request logic. + client = OpenAI(api_key=api_key) + response = client.responses.create( + model="gpt-4.1-mini", + input="Summarize these notes in one sentence.", + ) + + # --- onWatch block to copy -------------------------------------------- + # Add this block after your real API call returns. + # Map the provider response usage fields into the normalised onWatch event. + output_path = append_usage_event( + integration="notes-organiser", + provider="openai", + model=response.model, + prompt_tokens=response.usage.input_tokens, + completion_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + request_id=response.id, + metadata={"example": True}, + ) + + print(f"Wrote onWatch event to: {output_path}") + + +if __name__ == "__main__": + main() diff --git a/examples/api_integrations/python/openrouter_example.py b/examples/api_integrations/python/openrouter_example.py new file mode 100644 index 0000000..3ddc498 --- /dev/null +++ b/examples/api_integrations/python/openrouter_example.py @@ -0,0 +1,58 @@ +"""OpenRouter example for onWatch Custom API Integrations ingestion. + +Use this file as a pattern for your own script. + +What the user keeps: +- your normal provider API call +- your real prompt, model, and application logic + +What the user adds: +- `from onwatch_api_integrations import append_usage_event` +- the `append_usage_event(...)` block after the response is returned + +The two sections below are always: +- "Real API call" = your existing script logic +- "onWatch block to copy" = the part you append to your script +""" + +import os + +from openai import OpenAI + +from onwatch_api_integrations import append_usage_event # the only import you need to add + + +def main() -> None: + api_key = os.environ["OPENROUTER_API_KEY"] + + # --- Real API call ----------------------------------------------------- + # Keep this part as your own real OpenRouter request logic. + client = OpenAI( + base_url="https://openrouter.ai/api/v1", + api_key=api_key, + ) + response = client.chat.completions.create( + model="openai/gpt-4.1-mini", + messages=[{"role": "user", "content": "Summarize these notes in one sentence."}], + ) + + # --- onWatch block to copy -------------------------------------------- + # Add this block after your real API call returns. + # Map the provider response usage fields into the normalised onWatch event. + usage = response.usage + output_path = append_usage_event( + integration="notes-organiser", + provider="openrouter", + model=response.model, + prompt_tokens=usage.prompt_tokens, + completion_tokens=usage.completion_tokens, + total_tokens=usage.total_tokens, + request_id=response.id, + metadata={"example": True}, + ) + + print(f"Wrote onWatch event to: {output_path}") + + +if __name__ == "__main__": + main() diff --git a/internal/agent/api_integrations_ingest_agent.go b/internal/agent/api_integrations_ingest_agent.go new file mode 100644 index 0000000..ebfef1a --- /dev/null +++ b/internal/agent/api_integrations_ingest_agent.go @@ -0,0 +1,279 @@ +package agent + +import ( + "context" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "sort" + "strings" + "time" + + apiintegrations "github.com/onllm-dev/onwatch/v2/internal/api_integrations" + "github.com/onllm-dev/onwatch/v2/internal/store" +) + +const ( + apiIntegrationIngestIntervalDefault = 5 * time.Second + apiIntegrationIngestMaxReadBytes = 256 * 1024 + apiIntegrationIngestMaxInvalidAlertsPerFilePerScan = 10 + apiIntegrationIngestMaxFilesPerScan = 100 + apiIntegrationPruneIntervalDefault = time.Hour +) + +// APIIntegrationsIngestAgent tails normalized JSONL API integration usage files and stores the events. +type APIIntegrationsIngestAgent struct { + store *store.Store + dir string + interval time.Duration + retention time.Duration + pruneInterval time.Duration + lastPrune time.Time + scanPathCursor int + logger *slog.Logger +} + +// NewAPIIntegrationsIngestAgent creates a new API integrations file ingester. +func NewAPIIntegrationsIngestAgent(store *store.Store, dir string, retention time.Duration, logger *slog.Logger) *APIIntegrationsIngestAgent { + if logger == nil { + logger = slog.Default() + } + return &APIIntegrationsIngestAgent{ + store: store, + dir: dir, + interval: apiIntegrationIngestIntervalDefault, + retention: retention, + pruneInterval: apiIntegrationPruneIntervalDefault, + logger: logger, + } +} + +// SetInterval overrides the scan interval. Used in tests. +func (a *APIIntegrationsIngestAgent) SetInterval(interval time.Duration) { + if interval > 0 { + a.interval = interval + } +} + +// Run starts the periodic ingestion loop until context cancellation. +func (a *APIIntegrationsIngestAgent) Run(ctx context.Context) error { + a.logger.Info("API integrations ingester started", "dir", a.dir, "interval", a.interval) + defer a.logger.Info("API integrations ingester stopped") + + if err := os.MkdirAll(a.dir, 0o700); err != nil { + return fmt.Errorf("create API integrations dir: %w", err) + } + + a.scan() + + ticker := time.NewTicker(a.interval) + defer ticker.Stop() + + for { + select { + case <-ticker.C: + a.scan() + case <-ctx.Done(): + return nil + } + } +} + +func (a *APIIntegrationsIngestAgent) scan() { + pattern := filepath.Join(a.dir, "*.jsonl") + paths, err := filepath.Glob(pattern) + if err != nil { + a.logger.Error("API integrations ingester glob failed", "dir", a.dir, "error", err) + return + } + sort.Strings(paths) + if len(paths) > apiIntegrationIngestMaxFilesPerScan { + start := a.scanPathCursor % len(paths) + selected := make([]string, 0, apiIntegrationIngestMaxFilesPerScan) + for i := 0; i < apiIntegrationIngestMaxFilesPerScan; i++ { + selected = append(selected, paths[(start+i)%len(paths)]) + } + a.logger.Warn( + "API integrations ingester skipped files beyond scan cap", + "dir", a.dir, + "total_files", len(paths), + "processed_files", apiIntegrationIngestMaxFilesPerScan, + "skipped_files", len(paths)-apiIntegrationIngestMaxFilesPerScan, + ) + a.scanPathCursor = (start + len(selected)) % len(paths) + paths = selected + } else { + a.scanPathCursor = 0 + } + + for _, path := range paths { + if err := a.scanFile(path); err != nil { + a.logger.Error("API integrations ingester scan failed", "path", path, "error", err) + } + } + + if err := a.pruneExpiredUsageEvents(); err != nil { + a.logger.Error("API integrations ingester prune failed", "error", err) + } +} + +func (a *APIIntegrationsIngestAgent) scanFile(path string) error { + info, err := os.Stat(path) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return fmt.Errorf("stat file: %w", err) + } + if info.IsDir() { + return nil + } + + state, err := a.store.GetAPIIntegrationIngestState(path) + if err != nil { + return err + } + if state == nil { + state = &apiintegrations.IngestState{SourcePath: path} + } + if state.PartialLineOversized { + a.logger.Warn( + "API integrations ingester discarded oversized persisted partial line", + "path", path, + "partial_line_bytes", state.PartialLineBytes, + "max_partial_line_bytes", apiintegrations.MaxIngestPartialLineBytes, + ) + state.PartialLine = "" + } + + if info.Size() < state.Offset { + state.Offset = 0 + state.PartialLine = "" + } + + file, err := os.Open(path) + if err != nil { + return fmt.Errorf("open file: %w", err) + } + defer file.Close() + + if _, err := file.Seek(state.Offset, io.SeekStart); err != nil { + return fmt.Errorf("seek file: %w", err) + } + + data, err := io.ReadAll(io.LimitReader(file, apiIntegrationIngestMaxReadBytes)) + if err != nil { + return fmt.Errorf("read file: %w", err) + } + + if len(data) == 0 { + state.FileSize = info.Size() + state.FileModTime = info.ModTime().UTC() + return a.store.UpsertAPIIntegrationIngestState(state) + } + + state.Offset += int64(len(data)) + state.FileSize = info.Size() + state.FileModTime = info.ModTime().UTC() + + combined := state.PartialLine + string(data) + lines, remainder := splitCompleteLines(combined) + state.PartialLine = remainder + if len(state.PartialLine) > apiintegrations.MaxIngestPartialLineBytes { + a.logger.Warn( + "API integrations ingester discarded oversized partial line", + "path", path, + "partial_line_bytes", len(state.PartialLine), + "max_partial_line_bytes", apiintegrations.MaxIngestPartialLineBytes, + ) + state.PartialLine = "" + } + + invalidAlertsCreated := 0 + invalidAlertsSuppressed := 0 + + for _, line := range lines { + trimmed := strings.TrimSpace(line) + if trimmed == "" { + continue + } + event, err := apiintegrations.ParseUsageEventLine([]byte(trimmed), path) + if err != nil { + if invalidAlertsCreated < apiIntegrationIngestMaxInvalidAlertsPerFilePerScan { + a.recordInvalidLine(path, trimmed, err) + invalidAlertsCreated++ + } else { + invalidAlertsSuppressed++ + } + continue + } + if _, err := a.store.InsertAPIIntegrationUsageEvent(event); err != nil { + if errors.Is(err, store.ErrDuplicateAPIIntegrationUsageEvent) { + continue + } + return err + } + } + if invalidAlertsSuppressed > 0 { + a.logger.Warn( + "API integrations ingester suppressed invalid line alerts", + "path", path, + "alert_limit", apiIntegrationIngestMaxInvalidAlertsPerFilePerScan, + "alerts_created", invalidAlertsCreated, + "alerts_suppressed", invalidAlertsSuppressed, + ) + } + + return a.store.UpsertAPIIntegrationIngestState(state) +} + +func splitCompleteLines(data string) ([]string, string) { + if data == "" { + return nil, "" + } + if strings.HasSuffix(data, "\n") { + lines := strings.Split(strings.TrimSuffix(data, "\n"), "\n") + return lines, "" + } + lines := strings.Split(data, "\n") + if len(lines) == 1 { + return nil, data + } + return lines[:len(lines)-1], lines[len(lines)-1] +} + +func (a *APIIntegrationsIngestAgent) recordInvalidLine(path, line string, err error) { + msg := fmt.Sprintf("%s: %v", filepath.Base(path), err) + if len(line) > 180 { + line = line[:180] + "..." + } + metadata := fmt.Sprintf(`{"source_path":%q,"line":%q}`, path, line) + if _, createErr := a.store.CreateSystemAlert("api_integrations", "ingest_error", "API integrations ingest skipped invalid event", msg, "warning", metadata); createErr != nil { + a.logger.Warn("Failed to create API integrations ingest alert", "path", path, "error", createErr) + } +} + +func (a *APIIntegrationsIngestAgent) pruneExpiredUsageEvents() error { + if a.store == nil || a.retention <= 0 { + return nil + } + + now := time.Now().UTC() + if !a.lastPrune.IsZero() && now.Sub(a.lastPrune) < a.pruneInterval { + return nil + } + + cutoff := now.Add(-a.retention) + deleted, err := a.store.DeleteAPIIntegrationUsageEventsOlderThan(cutoff) + if err != nil { + return err + } + a.lastPrune = now + if deleted > 0 { + a.logger.Info("API integrations usage retention pruned events", "deleted", deleted, "cutoff", cutoff.Format(time.RFC3339)) + } + return nil +} diff --git a/internal/agent/api_integrations_ingest_agent_test.go b/internal/agent/api_integrations_ingest_agent_test.go new file mode 100644 index 0000000..4c31c2f --- /dev/null +++ b/internal/agent/api_integrations_ingest_agent_test.go @@ -0,0 +1,518 @@ +package agent + +import ( + "bytes" + "context" + "fmt" + "log/slog" + "os" + "path/filepath" + "strings" + "testing" + "time" + + apiintegrations "github.com/onllm-dev/onwatch/v2/internal/api_integrations" + "github.com/onllm-dev/onwatch/v2/internal/store" +) + +func newBufferedJSONLogger() (*slog.Logger, *bytes.Buffer) { + var buf bytes.Buffer + return slog.New(slog.NewJSONHandler(&buf, nil)), &buf +} + +func TestAPIIntegrationsIngestAgent_ScanFile_PartialLineAndCompletion(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "notes.jsonl") + if err := os.WriteFile(path, []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":2}`), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, slog.Default()) + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(1): %v", err) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 0 { + t.Fatalf("expected 0 events before newline, got %d", len(events)) + } + + f, err := os.OpenFile(path, os.O_APPEND|os.O_WRONLY, 0) + if err != nil { + t.Fatalf("OpenFile: %v", err) + } + if _, err := f.WriteString("\n"); err != nil { + t.Fatalf("WriteString: %v", err) + } + _ = f.Close() + + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(2): %v", err) + } + events, err = st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange(2): %v", err) + } + if len(events) != 1 || events[0].TotalTokens != 12 { + t.Fatalf("events=%+v", events) + } +} + +func TestAPIIntegrationsIngestAgent_ScanFile_PartialLineCap(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "oversized.jsonl") + content := strings.Repeat("a", apiIntegrationIngestMaxReadBytes*3) + if err := os.WriteFile(path, []byte(content), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + logger, logBuf := newBufferedJSONLogger() + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, logger) + + for i := 0; i < 3; i++ { + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(%d): %v", i+1, err) + } + state, err := st.GetAPIIntegrationIngestState(path) + if err != nil { + t.Fatalf("GetAPIIntegrationIngestState(%d): %v", i+1, err) + } + if state == nil { + t.Fatalf("expected ingest state after scan %d", i+1) + } + if len(state.PartialLine) > apiintegrations.MaxIngestPartialLineBytes { + t.Fatalf("partial line length=%d exceeds cap %d after scan %d", len(state.PartialLine), apiintegrations.MaxIngestPartialLineBytes, i+1) + } + } + + state, err := st.GetAPIIntegrationIngestState(path) + if err != nil { + t.Fatalf("GetAPIIntegrationIngestState(final): %v", err) + } + if state == nil { + t.Fatal("expected final ingest state") + } + if state.Offset != int64(len(content)) { + t.Fatalf("offset=%d want %d", state.Offset, len(content)) + } + if state.PartialLine != "" { + t.Fatalf("expected oversized partial line to be dropped, got len=%d", len(state.PartialLine)) + } + + logs := logBuf.String() + if !strings.Contains(logs, "API integrations ingester discarded oversized partial line") { + t.Fatalf("expected oversized partial line warning, logs=%s", logs) + } +} + +func TestAPIIntegrationsIngestAgent_ScanFile_DropsOversizedPersistedPartialLine(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "persisted.jsonl") + line := `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}` + "\n" + if err := os.WriteFile(path, []byte(line), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + oversizedPartial := strings.Repeat("x", apiintegrations.MaxIngestPartialLineBytes+1) + if err := st.UpsertAPIIntegrationIngestState(&apiintegrations.IngestState{ + SourcePath: path, + Offset: 0, + FileSize: int64(len(line)), + FileModTime: time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC), + PartialLine: oversizedPartial, + }); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState: %v", err) + } + + logger, logBuf := newBufferedJSONLogger() + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, logger) + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile: %v", err) + } + + state, err := st.GetAPIIntegrationIngestState(path) + if err != nil { + t.Fatalf("GetAPIIntegrationIngestState: %v", err) + } + if state == nil { + t.Fatal("expected ingest state") + } + if state.PartialLine != "" { + t.Fatalf("expected persisted oversized partial line to be cleared, got len=%d", len(state.PartialLine)) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 { + t.Fatalf("events=%+v", events) + } + + logs := logBuf.String() + if !strings.Contains(logs, "API integrations ingester discarded oversized persisted partial line") { + t.Fatalf("expected persisted partial line warning, logs=%s", logs) + } +} + +func TestAPIIntegrationsIngestAgent_ScanFile_InvalidLineCreatesAlert(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "bad.jsonl") + content := "{not-json}\n" + + `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"mistral","model":"mistral-small-latest","prompt_tokens":1,"completion_tokens":1}` + "\n" + if err := os.WriteFile(path, []byte(content), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, slog.Default()) + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile: %v", err) + } + + alerts, err := st.GetActiveSystemAlerts() + if err != nil { + t.Fatalf("GetActiveSystemAlerts: %v", err) + } + if len(alerts) == 0 || alerts[0].Provider != "api_integrations" { + t.Fatalf("alerts=%+v", alerts) + } + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 { + t.Fatalf("events=%+v", events) + } +} + +func TestAPIIntegrationsIngestAgent_ScanFile_InvalidLineAlertCap(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "bad.jsonl") + var content strings.Builder + for i := 0; i < apiIntegrationIngestMaxInvalidAlertsPerFilePerScan+2; i++ { + content.WriteString("{not-json}\n") + } + content.WriteString(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"mistral","model":"mistral-small-latest","prompt_tokens":1,"completion_tokens":1}` + "\n") + if err := os.WriteFile(path, []byte(content.String()), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + logger, logBuf := newBufferedJSONLogger() + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, logger) + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile: %v", err) + } + + alerts, err := st.GetActiveSystemAlertsByProvider("api_integrations", 20) + if err != nil { + t.Fatalf("GetActiveSystemAlertsByProvider: %v", err) + } + if len(alerts) != apiIntegrationIngestMaxInvalidAlertsPerFilePerScan { + t.Fatalf("len(alerts)=%d want %d", len(alerts), apiIntegrationIngestMaxInvalidAlertsPerFilePerScan) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 { + t.Fatalf("events=%+v", events) + } + + logs := logBuf.String() + if !strings.Contains(logs, "API integrations ingester suppressed invalid line alerts") { + t.Fatalf("expected invalid alert suppression warning, logs=%s", logs) + } + if !strings.Contains(logs, `"alerts_suppressed":2`) { + t.Fatalf("expected suppressed invalid alert count in logs, logs=%s", logs) + } +} + +func TestAPIIntegrationsIngestAgent_Scan_InvalidLineAlertCap_IsPerFile(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + validLine := `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}` + "\n" + for i := 0; i < 2; i++ { + path := filepath.Join(dir, fmt.Sprintf("bad-%d.jsonl", i)) + var content strings.Builder + for j := 0; j < apiIntegrationIngestMaxInvalidAlertsPerFilePerScan+2; j++ { + content.WriteString("{not-json}\n") + } + content.WriteString(validLine) + if err := os.WriteFile(path, []byte(content.String()), 0o600); err != nil { + t.Fatalf("WriteFile(%d): %v", i, err) + } + } + + logger, logBuf := newBufferedJSONLogger() + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, logger) + ag.scan() + + alerts, err := st.GetActiveSystemAlertsByProvider("api_integrations", 50) + if err != nil { + t.Fatalf("GetActiveSystemAlertsByProvider: %v", err) + } + wantAlerts := apiIntegrationIngestMaxInvalidAlertsPerFilePerScan * 2 + if len(alerts) != wantAlerts { + t.Fatalf("len(alerts)=%d want %d", len(alerts), wantAlerts) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 2 { + t.Fatalf("events=%+v", events) + } + + logs := logBuf.String() + if strings.Count(logs, "API integrations ingester suppressed invalid line alerts") != 2 { + t.Fatalf("expected suppression warning per file, logs=%s", logs) + } +} + +func TestAPIIntegrationsIngestAgent_ScanFile_DedupAndTruncation(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + path := filepath.Join(dir, "tool.jsonl") + line := `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}` + "\n" + if err := os.WriteFile(path, []byte(line), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, slog.Default()) + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(1): %v", err) + } + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(2): %v", err) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 { + t.Fatalf("expected 1 event after dedup, got %d", len(events)) + } + + if err := os.WriteFile(path, []byte(line), 0o600); err != nil { + t.Fatalf("WriteFile(truncate): %v", err) + } + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile(3): %v", err) + } + events, err = st.QueryAPIIntegrationUsageRange(time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC), time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange(2): %v", err) + } + if len(events) != 1 { + t.Fatalf("expected 1 event after truncation reread, got %d", len(events)) + } +} + +func TestAPIIntegrationsIngestAgent_Run_ProcessesMultipleFiles(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + files := map[string]string{ + "anthropic.jsonl": `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":5,"completion_tokens":2}` + "\n", + "mistral.jsonl": `{"ts":"2026-04-03T12:01:00Z","integration":"summariser","provider":"mistral","model":"mistral-small-latest","prompt_tokens":4,"completion_tokens":1}` + "\n", + } + for name, content := range files { + if err := os.WriteFile(filepath.Join(dir, name), []byte(content), 0o600); err != nil { + t.Fatalf("WriteFile(%s): %v", name, err) + } + } + + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, slog.Default()) + ag.SetInterval(10 * time.Millisecond) + ctx, cancel := context.WithCancel(context.Background()) + done := make(chan struct{}) + go func() { + defer close(done) + _ = ag.Run(ctx) + }() + time.Sleep(50 * time.Millisecond) + cancel() + <-done + + summary, err := st.QueryAPIIntegrationUsageSummary() + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageSummary: %v", err) + } + if len(summary) != 2 { + t.Fatalf("summary=%+v", summary) + } +} + +func TestAPIIntegrationsIngestAgent_Scan_FileCap_RotatesAcrossCycles(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + dir := t.TempDir() + base := time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC) + totalFiles := apiIntegrationIngestMaxFilesPerScan + 5 + for i := 0; i < totalFiles; i++ { + name := fmt.Sprintf("%03d.jsonl", i) + line := fmt.Sprintf( + `{"ts":"%s","integration":"integration-%03d","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}`+"\n", + base.Add(time.Duration(i)*time.Minute).Format(time.RFC3339), + i, + ) + if err := os.WriteFile(filepath.Join(dir, name), []byte(line), 0o600); err != nil { + t.Fatalf("WriteFile(%s): %v", name, err) + } + } + + logger, logBuf := newBufferedJSONLogger() + ag := NewAPIIntegrationsIngestAgent(st, dir, 0, logger) + ag.scan() + + events, err := st.QueryAPIIntegrationUsageRange(base.Add(-time.Hour), base.Add(24*time.Hour), totalFiles) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != apiIntegrationIngestMaxFilesPerScan { + t.Fatalf("len(events)=%d want %d", len(events), apiIntegrationIngestMaxFilesPerScan) + } + + seenPaths := make(map[string]bool, len(events)) + for _, event := range events { + seenPaths[event.SourcePath] = true + } + for i := 0; i < apiIntegrationIngestMaxFilesPerScan; i++ { + path := filepath.Join(dir, fmt.Sprintf("%03d.jsonl", i)) + if !seenPaths[path] { + t.Fatalf("expected scanned file %s to be ingested", path) + } + } + for i := apiIntegrationIngestMaxFilesPerScan; i < totalFiles; i++ { + path := filepath.Join(dir, fmt.Sprintf("%03d.jsonl", i)) + if seenPaths[path] { + t.Fatalf("did not expect skipped file %s to be ingested on first scan", path) + } + } + + ag.scan() + + events, err = st.QueryAPIIntegrationUsageRange(base.Add(-time.Hour), base.Add(24*time.Hour), totalFiles) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange(second): %v", err) + } + if len(events) != totalFiles { + t.Fatalf("len(events)=%d want %d after rotation", len(events), totalFiles) + } + seenPaths = make(map[string]bool, len(events)) + for _, event := range events { + seenPaths[event.SourcePath] = true + } + for i := 0; i < totalFiles; i++ { + path := filepath.Join(dir, fmt.Sprintf("%03d.jsonl", i)) + if !seenPaths[path] { + t.Fatalf("expected rotated scan to ingest file %s", path) + } + } + + logs := logBuf.String() + if !strings.Contains(logs, "API integrations ingester skipped files beyond scan cap") { + t.Fatalf("expected file cap warning, logs=%s", logs) + } + if !strings.Contains(logs, fmt.Sprintf(`"skipped_files":%d`, totalFiles-apiIntegrationIngestMaxFilesPerScan)) { + t.Fatalf("expected skipped file count in logs, logs=%s", logs) + } +} + +func TestAPIIntegrationsIngestAgent_Scan_PrunesExpiredDatabaseRows(t *testing.T) { + st, err := store.New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer st.Close() + + oldEvent := `{"ts":"2025-12-01T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":2,"completion_tokens":1}` + parsedOld, err := apiintegrations.ParseUsageEventLine([]byte(oldEvent), "/tmp/api-integrations/notes.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(old): %v", err) + } + if _, err := st.InsertAPIIntegrationUsageEvent(parsedOld); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(old): %v", err) + } + + dir := t.TempDir() + path := filepath.Join(dir, "notes.jsonl") + newLine := `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}` + "\n" + if err := os.WriteFile(path, []byte(newLine), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + ag := NewAPIIntegrationsIngestAgent(st, dir, 24*time.Hour, slog.Default()) + ag.pruneInterval = time.Millisecond + if err := ag.pruneExpiredUsageEvents(); err != nil { + t.Fatalf("pruneExpiredUsageEvents: %v", err) + } + if err := ag.scanFile(path); err != nil { + t.Fatalf("scanFile: %v", err) + } + + events, err := st.QueryAPIIntegrationUsageRange(time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2026, 5, 1, 0, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 { + t.Fatalf("events=%+v", events) + } + if events[0].Timestamp.Format(time.RFC3339) != "2026-04-03T12:00:00Z" { + t.Fatalf("expected retained new event, got %+v", events[0]) + } +} diff --git a/internal/api_integrations/types.go b/internal/api_integrations/types.go new file mode 100644 index 0000000..78c875a --- /dev/null +++ b/internal/api_integrations/types.go @@ -0,0 +1,192 @@ +package apiintegrations + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "strings" + "time" +) + +const ( + maxIntegrationFieldLen = 256 + maxMetadataJSONLen = 4096 + MaxIngestPartialLineBytes = 512 * 1024 +) + +var allowedProviders = map[string]struct{}{ + "anthropic": {}, + "openai": {}, + "mistral": {}, + "openrouter": {}, + "gemini": {}, +} + +// UsageEvent is the normalized API integration telemetry event stored by onWatch. +type UsageEvent struct { + Timestamp time.Time + Integration string + Provider string + Account string + Model string + RequestID string + PromptTokens int + CompletionTokens int + TotalTokens int + CostUSD *float64 + LatencyMS *int + MetadataJSON string + SourcePath string + Fingerprint string +} + +// IngestState stores the persistent cursor for a tailed JSONL file. +type IngestState struct { + SourcePath string + Offset int64 + FileSize int64 + FileModTime time.Time + PartialLine string + PartialLineBytes int + PartialLineOversized bool + UpdatedAt time.Time +} + +type usageEventWire struct { + TS string `json:"ts"` + Integration string `json:"integration"` + Provider string `json:"provider"` + Account string `json:"account"` + Model string `json:"model"` + RequestID string `json:"request_id"` + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens *int `json:"total_tokens"` + CostUSD *float64 `json:"cost_usd"` + LatencyMS *int `json:"latency_ms"` + Metadata json.RawMessage `json:"metadata"` +} + +// ParseUsageEventLine validates and normalizes a single JSONL event line. +func ParseUsageEventLine(line []byte, sourcePath string) (*UsageEvent, error) { + trimmed := strings.TrimSpace(string(line)) + if trimmed == "" { + return nil, fmt.Errorf("empty event line") + } + + var wire usageEventWire + if err := json.Unmarshal([]byte(trimmed), &wire); err != nil { + return nil, fmt.Errorf("parse API integration usage event: %w", err) + } + + ts, err := time.Parse(time.RFC3339, strings.TrimSpace(wire.TS)) + if err != nil { + return nil, fmt.Errorf("invalid ts: %w", err) + } + + integrationName := strings.TrimSpace(wire.Integration) + if integrationName == "" { + return nil, fmt.Errorf("integration is required") + } + if len(integrationName) > maxIntegrationFieldLen { + return nil, fmt.Errorf("integration exceeds %d characters", maxIntegrationFieldLen) + } + + provider := strings.ToLower(strings.TrimSpace(wire.Provider)) + if _, ok := allowedProviders[provider]; !ok { + return nil, fmt.Errorf("unsupported provider %q", provider) + } + + model := strings.TrimSpace(wire.Model) + if model == "" { + return nil, fmt.Errorf("model is required") + } + if len(model) > maxIntegrationFieldLen { + return nil, fmt.Errorf("model exceeds %d characters", maxIntegrationFieldLen) + } + + if wire.PromptTokens < 0 { + return nil, fmt.Errorf("prompt_tokens must be >= 0") + } + if wire.CompletionTokens < 0 { + return nil, fmt.Errorf("completion_tokens must be >= 0") + } + + totalTokens := wire.PromptTokens + wire.CompletionTokens + if wire.TotalTokens != nil { + if *wire.TotalTokens < 0 { + return nil, fmt.Errorf("total_tokens must be >= 0") + } + totalTokens = *wire.TotalTokens + } + + if wire.CostUSD != nil && *wire.CostUSD < 0 { + return nil, fmt.Errorf("cost_usd must be >= 0") + } + if wire.LatencyMS != nil && *wire.LatencyMS < 0 { + return nil, fmt.Errorf("latency_ms must be >= 0") + } + + account := strings.TrimSpace(wire.Account) + if account == "" { + account = "default" + } + if len(account) > maxIntegrationFieldLen { + return nil, fmt.Errorf("account exceeds %d characters", maxIntegrationFieldLen) + } + + metadataJSON := "" + if len(wire.Metadata) > 0 && string(wire.Metadata) != "null" { + var metadata map[string]interface{} + if err := json.Unmarshal(wire.Metadata, &metadata); err != nil { + return nil, fmt.Errorf("metadata must be a JSON object: %w", err) + } + compact, err := json.Marshal(metadata) + if err != nil { + return nil, fmt.Errorf("compact metadata: %w", err) + } + metadataJSON = string(compact) + } + if len(metadataJSON) > maxMetadataJSONLen { + return nil, fmt.Errorf("metadata_json exceeds %d bytes after compaction", maxMetadataJSONLen) + } + + event := &UsageEvent{ + Timestamp: ts.UTC(), + Integration: integrationName, + Provider: provider, + Account: account, + Model: model, + RequestID: strings.TrimSpace(wire.RequestID), + PromptTokens: wire.PromptTokens, + CompletionTokens: wire.CompletionTokens, + TotalTokens: totalTokens, + CostUSD: wire.CostUSD, + LatencyMS: wire.LatencyMS, + MetadataJSON: metadataJSON, + SourcePath: sourcePath, + } + event.Fingerprint = eventFingerprint(event) + return event, nil +} + +func eventFingerprint(event *UsageEvent) string { + h := sha256.New() + writeHashPart(h, event.SourcePath) + writeHashPart(h, event.Timestamp.Format(time.RFC3339Nano)) + writeHashPart(h, event.Integration) + writeHashPart(h, event.Provider) + writeHashPart(h, event.Account) + writeHashPart(h, event.Model) + writeHashPart(h, fmt.Sprintf("%d", event.PromptTokens)) + writeHashPart(h, fmt.Sprintf("%d", event.CompletionTokens)) + writeHashPart(h, fmt.Sprintf("%d", event.TotalTokens)) + writeHashPart(h, event.RequestID) + return hex.EncodeToString(h.Sum(nil)) +} + +func writeHashPart(h interface{ Write([]byte) (int, error) }, part string) { + _, _ = h.Write([]byte(part)) + _, _ = h.Write([]byte{0}) +} diff --git a/internal/api_integrations/types_test.go b/internal/api_integrations/types_test.go new file mode 100644 index 0000000..b1114b5 --- /dev/null +++ b/internal/api_integrations/types_test.go @@ -0,0 +1,106 @@ +package apiintegrations + +import ( + "fmt" + "strings" + "testing" +) + +func TestParseUsageEventLine_Success(t *testing.T) { + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes-organiser","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":12,"completion_tokens":5,"metadata":{"task":"weekly"}}`) + + event, err := ParseUsageEventLine(line, "/tmp/api-integrations/notes.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine: %v", err) + } + if event.Integration != "notes-organiser" { + t.Fatalf("integration=%q", event.Integration) + } + if event.Provider != "anthropic" { + t.Fatalf("provider=%q", event.Provider) + } + if event.Account != "default" { + t.Fatalf("account=%q", event.Account) + } + if event.TotalTokens != 17 { + t.Fatalf("total_tokens=%d", event.TotalTokens) + } + if event.MetadataJSON != `{"task":"weekly"}` { + t.Fatalf("metadata=%q", event.MetadataJSON) + } + if event.Fingerprint == "" { + t.Fatal("expected fingerprint") + } +} + +func TestParseUsageEventLine_RejectsInvalidProvider(t *testing.T) { + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"copilot","model":"x","prompt_tokens":1,"completion_tokens":1}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error") + } +} + +func TestParseUsageEventLine_RejectsInvalidMetadata(t *testing.T) { + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":1,"completion_tokens":1,"metadata":["bad"]}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error") + } +} + +func TestParseUsageEventLine_RejectsOverlongFields(t *testing.T) { + long := func(n int) string { + b := make([]byte, n) + for i := range b { + b[i] = 'a' + } + return string(b) + } + + // integration too long + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"` + long(maxIntegrationFieldLen+1) + `","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":1,"completion_tokens":1}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error for overlong integration") + } + + // model too long + line = []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"` + long(maxIntegrationFieldLen+1) + `","prompt_tokens":1,"completion_tokens":1}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error for overlong model") + } + + // account too long + line = []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","account":"` + long(maxIntegrationFieldLen+1) + `","prompt_tokens":1,"completion_tokens":1}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error for overlong account") + } +} + +func TestParseUsageEventLine_RejectsOverlongMetadata(t *testing.T) { + // Build a metadata object whose compacted JSON exceeds maxMetadataJSONLen + // by repeating a key-value pair enough times. + pairs := make([]string, 0, 200) + for i := 0; i < 200; i++ { + pairs = append(pairs, fmt.Sprintf(`"key%d":"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"`, i)) + } + metadata := "{" + strings.Join(pairs, ",") + "}" + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":1,"completion_tokens":1,"metadata":` + metadata + `}`) + if _, err := ParseUsageEventLine(line, "/tmp/test.jsonl"); err == nil { + t.Fatal("expected error for overlong metadata") + } +} + +func TestParseUsageEventLine_FingerprintDependsOnSourcePath(t *testing.T) { + line := []byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"mistral","model":"mistral-small-latest","prompt_tokens":1,"completion_tokens":1}`) + + a, err := ParseUsageEventLine(line, "/tmp/a.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(a): %v", err) + } + b, err := ParseUsageEventLine(line, "/tmp/b.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(b): %v", err) + } + if a.Fingerprint == b.Fingerprint { + t.Fatal("expected different fingerprints for different source files") + } +} diff --git a/internal/config/config.go b/internal/config/config.go index da5da44..a5b9198 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -49,8 +49,8 @@ type Config struct { AntigravityEnabled bool // true if auto-detection should be attempted // MiniMax provider configuration - MiniMaxAPIKey string // MINIMAX_API_KEY - MiniMaxRegion string // MINIMAX_REGION ( "global" | "cn", default: "global" ) + MiniMaxAPIKey string // MINIMAX_API_KEY + MiniMaxRegion string // MINIMAX_REGION ( "global" | "cn", default: "global" ) // OpenRouter provider configuration OpenRouterAPIKey string // OPENROUTER_API_KEY @@ -61,6 +61,11 @@ type Config struct { GeminiRefreshToken string // GEMINI_REFRESH_TOKEN (for Docker/headless) GeminiAccessToken string // GEMINI_ACCESS_TOKEN (for Docker/headless) + // Custom API Integrations telemetry ingestion + APIIntegrationsEnabled bool // ONWATCH_API_INTEGRATIONS_ENABLED (default: true) + APIIntegrationsDir string // ONWATCH_API_INTEGRATIONS_DIR (default: ~/.onwatch/api-integrations or /data/api-integrations) + APIIntegrationsRetention time.Duration // ONWATCH_API_INTEGRATIONS_RETENTION (example: 720h, 0 disables pruning) + // Shared configuration PollInterval time.Duration // ONWATCH_POLL_INTERVAL (seconds → Duration) Port int // ONWATCH_PORT @@ -297,6 +302,21 @@ func loadFromEnvAndFlags(flags *flagValues) (*Config, error) { } // File-based auto-detection is done later in main.go + // Custom API Integrations telemetry ingestion + cfg.APIIntegrationsDir = strings.TrimSpace(os.Getenv("ONWATCH_API_INTEGRATIONS_DIR")) + cfg.APIIntegrationsEnabled = true + cfg.APIIntegrationsRetention = 60 * 24 * time.Hour + if env := strings.ToLower(strings.TrimSpace(os.Getenv("ONWATCH_API_INTEGRATIONS_ENABLED"))); env != "" { + cfg.APIIntegrationsEnabled = env == "true" || env == "1" || env == "yes" || env == "on" + } + if env := strings.TrimSpace(os.Getenv("ONWATCH_API_INTEGRATIONS_RETENTION")); env != "" { + if env == "0" { + cfg.APIIntegrationsRetention = 0 + } else if v, err := time.ParseDuration(env); err == nil { + cfg.APIIntegrationsRetention = v + } + } + // Poll Interval (seconds) - ONWATCH_* first, SYNTRACK_* fallback if flags.interval > 0 { cfg.PollInterval = time.Duration(flags.interval) * time.Second @@ -412,6 +432,18 @@ func (c *Config) applyDefaults() { if c.SessionIdleTimeout == 0 { c.SessionIdleTimeout = 600 * time.Second } + if c.APIIntegrationsDir == "" { + if c.IsDockerEnvironment() { + c.APIIntegrationsDir = "/data/api-integrations" + } else { + home, err := os.UserHomeDir() + if err != nil || home == "" { + c.APIIntegrationsDir = "./api-integrations" + } else { + c.APIIntegrationsDir = filepath.Join(home, ".onwatch", "api-integrations") + } + } + } } // Validate checks the configuration for errors. @@ -435,6 +467,9 @@ func (c *Config) Validate() error { if c.Port < 1024 || c.Port > 65535 { return fmt.Errorf("port must be between 1024 and 65535") } + if c.APIIntegrationsRetention < 0 { + return fmt.Errorf("API integrations retention must be non-negative") + } return nil } @@ -566,6 +601,9 @@ func (c *Config) String() string { // Redact MiniMax token minimaxDisplay := redactAPIKey(c.MiniMaxAPIKey, "") fmt.Fprintf(&sb, " MiniMaxAPIKey: %s,\n", minimaxDisplay) + fmt.Fprintf(&sb, " APIIntegrationsEnabled: %v,\n", c.APIIntegrationsEnabled) + fmt.Fprintf(&sb, " APIIntegrationsDir: %s,\n", c.APIIntegrationsDir) + fmt.Fprintf(&sb, " APIIntegrationsRetention: %v,\n", c.APIIntegrationsRetention) fmt.Fprintf(&sb, " PollInterval: %v,\n", c.PollInterval) fmt.Fprintf(&sb, " SessionIdleTimeout: %v,\n", c.SessionIdleTimeout) diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 58ed709..49e2af4 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -173,6 +173,37 @@ func TestConfig_DefaultValues(t *testing.T) { if cfg.LogLevel != "info" { t.Errorf("LogLevel = %q, want %q", cfg.LogLevel, "info") } + if cfg.APIIntegrationsRetention != 60*24*time.Hour { + t.Errorf("APIIntegrationsRetention = %v, want %v", cfg.APIIntegrationsRetention, 60*24*time.Hour) + } +} + +func TestConfig_APIIntegrationsRetention_LoadsFromEnv(t *testing.T) { + os.Clearenv() + os.Setenv("ONWATCH_API_INTEGRATIONS_RETENTION", "168h") + defer os.Clearenv() + + cfg, err := Load() + if err != nil { + t.Fatalf("Load() failed: %v", err) + } + if cfg.APIIntegrationsRetention != 168*time.Hour { + t.Errorf("APIIntegrationsRetention = %v, want %v", cfg.APIIntegrationsRetention, 168*time.Hour) + } +} + +func TestConfig_APIIntegrationsRetention_Disabled(t *testing.T) { + os.Clearenv() + os.Setenv("ONWATCH_API_INTEGRATIONS_RETENTION", "0") + defer os.Clearenv() + + cfg, err := Load() + if err != nil { + t.Fatalf("Load() failed: %v", err) + } + if cfg.APIIntegrationsRetention != 0 { + t.Errorf("APIIntegrationsRetention = %v, want 0", cfg.APIIntegrationsRetention) + } } func TestConfig_OnlySyntheticProvider(t *testing.T) { @@ -1348,9 +1379,9 @@ func TestLoadEnvFile_IgnoresNonOnwatchLocalEnv(t *testing.T) { func TestConfig_CodexShowAvailable(t *testing.T) { tests := []struct { - name string - envVal string - want string + name string + envVal string + want string }{ {"empty defaults to usage", "", "usage"}, {"usage passes through", "usage", "usage"}, diff --git a/internal/store/api_integrations_store.go b/internal/store/api_integrations_store.go new file mode 100644 index 0000000..73fd8a9 --- /dev/null +++ b/internal/store/api_integrations_store.go @@ -0,0 +1,413 @@ +package store + +import ( + "database/sql" + "errors" + "fmt" + "time" + + apiintegrations "github.com/onllm-dev/onwatch/v2/internal/api_integrations" + sqlite "modernc.org/sqlite" + sqlite3 "modernc.org/sqlite/lib" +) + +const ( + apiIntegrationUsageSummaryLimit = 500 + apiIntegrationUsageBucketsLimit = 5000 +) + +// APIIntegrationUsageSummaryRow contains grouped usage totals for backend reporting. +type APIIntegrationUsageSummaryRow struct { + IntegrationName string + Provider string + AccountName string + Model string + RequestCount int + PromptTokens int + CompletionTokens int + TotalTokens int + TotalCostUSD float64 + LastCapturedAt time.Time +} + +// APIIntegrationUsageBucketRow contains aggregated usage for one integration and time bucket. +type APIIntegrationUsageBucketRow struct { + IntegrationName string + BucketStart time.Time + RequestCount int + PromptTokens int + CompletionTokens int + TotalTokens int + TotalCostUSD float64 +} + +// APIIntegrationIngestHealthRow contains persisted ingest state with last seen event time. +type APIIntegrationIngestHealthRow struct { + SourcePath string + OffsetBytes int64 + FileSize int64 + FileModTime *time.Time + PartialLine string + UpdatedAt time.Time + LastCapturedAt *time.Time +} + +// InsertAPIIntegrationUsageEvent stores a normalized API integrations telemetry event. +func (s *Store) InsertAPIIntegrationUsageEvent(event *apiintegrations.UsageEvent) (int64, error) { + if event == nil { + return 0, fmt.Errorf("API integration usage event is nil") + } + res, err := s.db.Exec(` + INSERT INTO api_integration_usage_events ( + captured_at, integration_name, provider, account_name, model, request_id, + prompt_tokens, completion_tokens, total_tokens, cost_usd, latency_ms, + metadata_json, source_path, fingerprint, created_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + event.Timestamp.Format(time.RFC3339Nano), + event.Integration, + event.Provider, + event.Account, + event.Model, + event.RequestID, + event.PromptTokens, + event.CompletionTokens, + event.TotalTokens, + event.CostUSD, + event.LatencyMS, + event.MetadataJSON, + event.SourcePath, + event.Fingerprint, + time.Now().UTC().Format(time.RFC3339Nano), + ) + if err != nil { + if isSQLiteUniqueConstraintError(err) { + return 0, ErrDuplicateAPIIntegrationUsageEvent + } + return 0, fmt.Errorf("failed to insert API integration usage event: %w", err) + } + id, err := res.LastInsertId() + if err != nil { + return 0, fmt.Errorf("failed to get API integration usage event id: %w", err) + } + return id, nil +} + +// QueryAPIIntegrationUsageRange returns API integration usage events ordered by capture time ascending. +func (s *Store) QueryAPIIntegrationUsageRange(start, end time.Time, limit ...int) ([]apiintegrations.UsageEvent, error) { + query := ` + SELECT captured_at, integration_name, provider, account_name, model, request_id, + prompt_tokens, completion_tokens, total_tokens, cost_usd, latency_ms, + metadata_json, source_path, fingerprint + FROM api_integration_usage_events + WHERE captured_at BETWEEN ? AND ? + ORDER BY captured_at ASC + ` + args := []interface{}{start.Format(time.RFC3339Nano), end.Format(time.RFC3339Nano)} + if len(limit) > 0 && limit[0] > 0 { + query += ` LIMIT ?` + args = append(args, limit[0]) + } + + rows, err := s.db.Query(query, args...) + if err != nil { + return nil, fmt.Errorf("failed to query API integration usage range: %w", err) + } + defer rows.Close() + + var events []apiintegrations.UsageEvent + for rows.Next() { + var event apiintegrations.UsageEvent + var capturedAt string + var costUSD sql.NullFloat64 + var latencyMS sql.NullInt64 + if err := rows.Scan( + &capturedAt, + &event.Integration, + &event.Provider, + &event.Account, + &event.Model, + &event.RequestID, + &event.PromptTokens, + &event.CompletionTokens, + &event.TotalTokens, + &costUSD, + &latencyMS, + &event.MetadataJSON, + &event.SourcePath, + &event.Fingerprint, + ); err != nil { + return nil, fmt.Errorf("failed to scan API integration usage event: %w", err) + } + event.Timestamp, _ = time.Parse(time.RFC3339Nano, capturedAt) + if costUSD.Valid { + v := costUSD.Float64 + event.CostUSD = &v + } + if latencyMS.Valid { + v := int(latencyMS.Int64) + event.LatencyMS = &v + } + events = append(events, event) + } + return events, rows.Err() +} + +// DeleteAPIIntegrationUsageEventsOlderThan removes stored usage events older than the cutoff. +func (s *Store) DeleteAPIIntegrationUsageEventsOlderThan(cutoff time.Time) (int64, error) { + res, err := s.db.Exec(` + DELETE FROM api_integration_usage_events + WHERE captured_at < ? + `, cutoff.UTC().Format(time.RFC3339Nano)) + if err != nil { + return 0, fmt.Errorf("failed to delete expired API integration usage events: %w", err) + } + deleted, err := res.RowsAffected() + if err != nil { + return 0, fmt.Errorf("failed to count deleted API integration usage events: %w", err) + } + return deleted, nil +} + +// QueryAPIIntegrationUsageSummary groups usage by integration/provider/account/model. +func (s *Store) QueryAPIIntegrationUsageSummary() ([]APIIntegrationUsageSummaryRow, error) { + rows, err := s.db.Query(` + SELECT integration_name, provider, account_name, model, + COUNT(*), + COALESCE(SUM(prompt_tokens), 0), + COALESCE(SUM(completion_tokens), 0), + COALESCE(SUM(total_tokens), 0), + COALESCE(SUM(cost_usd), 0), + MAX(captured_at) + FROM api_integration_usage_events + GROUP BY integration_name, provider, account_name, model + ORDER BY integration_name, provider, account_name, model + LIMIT ? + `, apiIntegrationUsageSummaryLimit) + if err != nil { + return nil, fmt.Errorf("failed to query API integration usage summary: %w", err) + } + defer rows.Close() + + var summary []APIIntegrationUsageSummaryRow + for rows.Next() { + var row APIIntegrationUsageSummaryRow + var lastCapturedAt string + if err := rows.Scan( + &row.IntegrationName, + &row.Provider, + &row.AccountName, + &row.Model, + &row.RequestCount, + &row.PromptTokens, + &row.CompletionTokens, + &row.TotalTokens, + &row.TotalCostUSD, + &lastCapturedAt, + ); err != nil { + return nil, fmt.Errorf("failed to scan API integration usage summary: %w", err) + } + row.LastCapturedAt, _ = time.Parse(time.RFC3339Nano, lastCapturedAt) + summary = append(summary, row) + } + return summary, rows.Err() +} + +// QueryAPIIntegrationUsageBuckets groups usage into time buckets over a range. +func (s *Store) QueryAPIIntegrationUsageBuckets(start, end time.Time, bucketSize time.Duration) ([]APIIntegrationUsageBucketRow, error) { + if bucketSize <= 0 { + return nil, fmt.Errorf("bucket size must be positive") + } + + bucketSeconds := int64(bucketSize / time.Second) + rows, err := s.db.Query(` + SELECT integration_name, + strftime('%Y-%m-%dT%H:%M:%SZ', (CAST(strftime('%s', captured_at) AS INTEGER) / ?) * ?, 'unixepoch'), + COUNT(*), + COALESCE(SUM(prompt_tokens), 0), + COALESCE(SUM(completion_tokens), 0), + COALESCE(SUM(total_tokens), 0), + COALESCE(SUM(cost_usd), 0) + FROM api_integration_usage_events + WHERE captured_at BETWEEN ? AND ? + GROUP BY integration_name, 2 + ORDER BY integration_name, 2 + LIMIT ? + `, bucketSeconds, bucketSeconds, start.Format(time.RFC3339Nano), end.Format(time.RFC3339Nano), apiIntegrationUsageBucketsLimit) + if err != nil { + return nil, fmt.Errorf("failed to query API integration usage buckets: %w", err) + } + defer rows.Close() + + var buckets []APIIntegrationUsageBucketRow + for rows.Next() { + var row APIIntegrationUsageBucketRow + var bucketStart string + if err := rows.Scan( + &row.IntegrationName, + &bucketStart, + &row.RequestCount, + &row.PromptTokens, + &row.CompletionTokens, + &row.TotalTokens, + &row.TotalCostUSD, + ); err != nil { + return nil, fmt.Errorf("failed to scan API integration usage bucket: %w", err) + } + row.BucketStart, _ = time.Parse(time.RFC3339Nano, bucketStart) + buckets = append(buckets, row) + } + return buckets, rows.Err() +} + +// GetAPIIntegrationIngestState returns the persisted tail cursor for a source file. +func (s *Store) GetAPIIntegrationIngestState(sourcePath string) (*apiintegrations.IngestState, error) { + var state apiintegrations.IngestState + var modTime sql.NullString + var partialLineBytes int64 + var updatedAt string + err := s.db.QueryRow(` + SELECT source_path, offset_bytes, file_size, file_mod_time, + CASE + WHEN length(CAST(partial_line AS BLOB)) > ? THEN '' + ELSE partial_line + END, + length(CAST(partial_line AS BLOB)), + updated_at + FROM api_integration_ingest_state + WHERE source_path = ? + `, apiintegrations.MaxIngestPartialLineBytes, sourcePath).Scan( + &state.SourcePath, + &state.Offset, + &state.FileSize, + &modTime, + &state.PartialLine, + &partialLineBytes, + &updatedAt, + ) + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("failed to get API integration ingest state: %w", err) + } + state.PartialLineBytes = int(partialLineBytes) + state.PartialLineOversized = partialLineBytes > apiintegrations.MaxIngestPartialLineBytes + if modTime.Valid { + state.FileModTime, _ = time.Parse(time.RFC3339Nano, modTime.String) + } + state.UpdatedAt, _ = time.Parse(time.RFC3339Nano, updatedAt) + return &state, nil +} + +// UpsertAPIIntegrationIngestState persists the current tail cursor for a source file. +func (s *Store) UpsertAPIIntegrationIngestState(state *apiintegrations.IngestState) error { + if state == nil { + return fmt.Errorf("API integration ingest state is nil") + } + var modTime interface{} + if !state.FileModTime.IsZero() { + modTime = state.FileModTime.Format(time.RFC3339Nano) + } + _, err := s.db.Exec(` + INSERT INTO api_integration_ingest_state (source_path, offset_bytes, file_size, file_mod_time, partial_line, updated_at) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT(source_path) DO UPDATE SET + offset_bytes = excluded.offset_bytes, + file_size = excluded.file_size, + file_mod_time = excluded.file_mod_time, + partial_line = excluded.partial_line, + updated_at = excluded.updated_at + `, state.SourcePath, state.Offset, state.FileSize, modTime, state.PartialLine, time.Now().UTC().Format(time.RFC3339Nano)) + if err != nil { + return fmt.Errorf("failed to upsert API integration ingest state: %w", err) + } + return nil +} + +// QueryAPIIntegrationIngestHealth returns ingest cursor state plus last event timestamp per file. +func (s *Store) QueryAPIIntegrationIngestHealth() ([]APIIntegrationIngestHealthRow, error) { + rows, err := s.db.Query(` + SELECT s.source_path, s.offset_bytes, s.file_size, s.file_mod_time, s.partial_line, s.updated_at, + MAX(e.captured_at) as last_captured_at + FROM api_integration_ingest_state s + LEFT JOIN api_integration_usage_events e ON e.source_path = s.source_path + GROUP BY s.source_path, s.offset_bytes, s.file_size, s.file_mod_time, s.partial_line, s.updated_at + ORDER BY s.source_path + `) + if err != nil { + return nil, fmt.Errorf("failed to query API integration ingest health: %w", err) + } + defer rows.Close() + + var result []APIIntegrationIngestHealthRow + for rows.Next() { + var row APIIntegrationIngestHealthRow + var fileModTime sql.NullString + var updatedAt string + var lastCapturedAt sql.NullString + if err := rows.Scan( + &row.SourcePath, + &row.OffsetBytes, + &row.FileSize, + &fileModTime, + &row.PartialLine, + &updatedAt, + &lastCapturedAt, + ); err != nil { + return nil, fmt.Errorf("failed to scan API integration ingest health row: %w", err) + } + if fileModTime.Valid { + t, _ := time.Parse(time.RFC3339Nano, fileModTime.String) + row.FileModTime = &t + } + row.UpdatedAt, _ = time.Parse(time.RFC3339Nano, updatedAt) + if lastCapturedAt.Valid { + t, _ := time.Parse(time.RFC3339Nano, lastCapturedAt.String) + row.LastCapturedAt = &t + } + result = append(result, row) + } + return result, rows.Err() +} + +// GetActiveSystemAlertsByProvider returns active alerts for a provider, most recent first. +func (s *Store) GetActiveSystemAlertsByProvider(provider string, limit int) ([]SystemAlert, error) { + if limit <= 0 { + limit = 20 + } + rows, err := s.db.Query(` + SELECT id, provider, alert_type, title, message, severity, created_at, metadata + FROM system_alerts + WHERE dismissed_at IS NULL AND provider = ? + ORDER BY created_at DESC + LIMIT ? + `, provider, limit) + if err != nil { + return nil, fmt.Errorf("store.GetActiveSystemAlertsByProvider: %w", err) + } + defer rows.Close() + + var alerts []SystemAlert + for rows.Next() { + var a SystemAlert + var createdAt, metadata string + if err := rows.Scan(&a.ID, &a.Provider, &a.AlertType, &a.Title, &a.Message, &a.Severity, &createdAt, &metadata); err != nil { + return nil, fmt.Errorf("store.GetActiveSystemAlertsByProvider: scan: %w", err) + } + if t, err := time.Parse(time.RFC3339Nano, createdAt); err == nil { + a.CreatedAt = t + } + a.Metadata = metadata + alerts = append(alerts, a) + } + return alerts, rows.Err() +} + +func isSQLiteUniqueConstraintError(err error) bool { + var sqliteErr *sqlite.Error + if !errors.As(err, &sqliteErr) { + return false + } + return sqliteErr.Code() == sqlite3.SQLITE_CONSTRAINT_UNIQUE +} diff --git a/internal/store/api_integrations_store_test.go b/internal/store/api_integrations_store_test.go new file mode 100644 index 0000000..5b87e30 --- /dev/null +++ b/internal/store/api_integrations_store_test.go @@ -0,0 +1,426 @@ +package store + +import ( + "errors" + "fmt" + "strings" + "testing" + "time" + + apiintegrations "github.com/onllm-dev/onwatch/v2/internal/api_integrations" +) + +func TestStore_InsertAPIIntegrationUsageEvent_Dedup(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + event, err := apiintegrations.ParseUsageEventLine([]byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5}`), "/tmp/api-integrations/notes.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine: %v", err) + } + + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent: %v", err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); !errors.Is(err, ErrDuplicateAPIIntegrationUsageEvent) { + t.Fatalf("expected ErrDuplicateAPIIntegrationUsageEvent, got %v", err) + } +} + +func TestStore_QueryAPIIntegrationUsageSummary(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + lines := []string{ + `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5,"cost_usd":0.1}`, + `{"ts":"2026-04-03T12:01:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":2,"completion_tokens":3,"cost_usd":0.2}`, + `{"ts":"2026-04-03T12:02:00Z","integration":"notes","provider":"mistral","model":"mistral-small-latest","prompt_tokens":4,"completion_tokens":1}`, + } + for i, line := range lines { + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/api-integrations/test.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + summary, err := s.QueryAPIIntegrationUsageSummary() + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageSummary: %v", err) + } + if len(summary) != 2 { + t.Fatalf("len(summary)=%d want 2", len(summary)) + } + if summary[0].Provider != "anthropic" || summary[0].RequestCount != 2 || summary[0].TotalTokens != 20 { + t.Fatalf("anthropic summary=%+v", summary[0]) + } + if summary[0].TotalCostUSD != 0.30000000000000004 && summary[0].TotalCostUSD != 0.3 { + t.Fatalf("anthropic cost=%v", summary[0].TotalCostUSD) + } +} + +func TestStore_QueryAPIIntegrationUsageSummary_BoundedAndOrdered(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + base := time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC) + totalGroups := apiIntegrationUsageSummaryLimit + 10 + for i := 0; i < totalGroups; i++ { + line := fmt.Sprintf(`{"ts":"%s","integration":"integration-%03d","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":1,"completion_tokens":1}`, + base.Add(time.Duration(i)*time.Minute).Format(time.RFC3339), + i, + ) + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/api-integrations/bounded.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + summary, err := s.QueryAPIIntegrationUsageSummary() + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageSummary: %v", err) + } + if len(summary) != apiIntegrationUsageSummaryLimit { + t.Fatalf("len(summary)=%d want %d", len(summary), apiIntegrationUsageSummaryLimit) + } + if summary[0].IntegrationName != "integration-000" { + t.Fatalf("first summary row=%+v", summary[0]) + } + last := summary[len(summary)-1] + if last.IntegrationName != fmt.Sprintf("integration-%03d", apiIntegrationUsageSummaryLimit-1) { + t.Fatalf("last summary row=%+v", last) + } +} + +func TestStore_QueryAPIIntegrationUsageRange_AndIngestState(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + event, err := apiintegrations.ParseUsageEventLine([]byte(`{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":7,"completion_tokens":2}`), "/tmp/api-integrations/notes.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine: %v", err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent: %v", err) + } + + start := time.Date(2026, 4, 3, 11, 0, 0, 0, time.UTC) + end := time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC) + events, err := s.QueryAPIIntegrationUsageRange(start, end) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 || events[0].TotalTokens != 9 { + t.Fatalf("events=%+v", events) + } + + state := &apiintegrations.IngestState{ + SourcePath: "/tmp/api-integrations/notes.jsonl", + Offset: 42, + FileSize: 100, + FileModTime: time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC), + PartialLine: `{"ts":"2026`, + } + if err := s.UpsertAPIIntegrationIngestState(state); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState: %v", err) + } + got, err := s.GetAPIIntegrationIngestState(state.SourcePath) + if err != nil { + t.Fatalf("GetAPIIntegrationIngestState: %v", err) + } + if got == nil || got.Offset != 42 || got.PartialLine != state.PartialLine { + t.Fatalf("state=%+v", got) + } + if got.PartialLineBytes != len(state.PartialLine) || got.PartialLineOversized { + t.Fatalf("unexpected partial line metadata: %+v", got) + } +} + +func TestStore_GetAPIIntegrationIngestState_BoundsOversizedPartialLine(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + state := &apiintegrations.IngestState{ + SourcePath: "/tmp/api-integrations/oversized.jsonl", + Offset: 7, + FileSize: 9, + FileModTime: time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC), + PartialLine: strings.Repeat("x", apiintegrations.MaxIngestPartialLineBytes+1), + } + if err := s.UpsertAPIIntegrationIngestState(state); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState: %v", err) + } + + got, err := s.GetAPIIntegrationIngestState(state.SourcePath) + if err != nil { + t.Fatalf("GetAPIIntegrationIngestState: %v", err) + } + if got == nil { + t.Fatal("expected ingest state") + } + if got.PartialLine != "" { + t.Fatalf("expected bounded partial line to be empty, got len=%d", len(got.PartialLine)) + } + if !got.PartialLineOversized { + t.Fatalf("expected oversized flag, got %+v", got) + } + if got.PartialLineBytes != len(state.PartialLine) { + t.Fatalf("partial line bytes=%d want %d", got.PartialLineBytes, len(state.PartialLine)) + } +} + +func TestStore_DeleteAPIIntegrationUsageEventsOlderThan(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + lines := []string{ + `{"ts":"2026-01-01T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":1,"completion_tokens":1}`, + `{"ts":"2026-03-15T12:00:00Z","integration":"notes","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":2,"completion_tokens":2}`, + } + for i, line := range lines { + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/api-integrations/retention.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + deleted, err := s.DeleteAPIIntegrationUsageEventsOlderThan(time.Date(2026, 2, 1, 0, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("DeleteAPIIntegrationUsageEventsOlderThan: %v", err) + } + if deleted != 1 { + t.Fatalf("deleted=%d want 1", deleted) + } + + events, err := s.QueryAPIIntegrationUsageRange(time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2026, 4, 1, 0, 0, 0, 0, time.UTC)) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageRange: %v", err) + } + if len(events) != 1 || events[0].Timestamp.Format(time.RFC3339) != "2026-03-15T12:00:00Z" { + t.Fatalf("events=%+v", events) + } +} + +func TestStore_QueryAPIIntegrationUsageBuckets(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + lines := []string{ + `{"ts":"2026-04-03T12:01:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5,"cost_usd":0.1}`, + `{"ts":"2026-04-03T12:04:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":2,"completion_tokens":3,"cost_usd":0.2}`, + `{"ts":"2026-04-03T12:16:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":4,"completion_tokens":1}`, + `{"ts":"2026-04-03T12:08:00Z","integration":"daily-report","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":6,"completion_tokens":2}`, + } + for i, line := range lines { + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/api-integrations/test.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + start := time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC) + end := time.Date(2026, 4, 3, 13, 0, 0, 0, time.UTC) + rows, err := s.QueryAPIIntegrationUsageBuckets(start, end, 15*time.Minute) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageBuckets: %v", err) + } + if len(rows) != 3 { + t.Fatalf("len(rows)=%d want 3", len(rows)) + } + + if rows[0].IntegrationName != "daily-report" || rows[0].BucketStart.Format(time.RFC3339) != "2026-04-03T12:00:00Z" || rows[0].TotalTokens != 8 { + t.Fatalf("unexpected first bucket: %+v", rows[0]) + } + if rows[1].IntegrationName != "notes" || rows[1].BucketStart.Format(time.RFC3339) != "2026-04-03T12:00:00Z" || rows[1].RequestCount != 2 || rows[1].TotalTokens != 20 { + t.Fatalf("unexpected second bucket: %+v", rows[1]) + } + if rows[1].TotalCostUSD < 0.299 || rows[1].TotalCostUSD > 0.301 { + t.Fatalf("unexpected second bucket cost: %+v", rows[1]) + } + if rows[2].IntegrationName != "notes" || rows[2].BucketStart.Format(time.RFC3339) != "2026-04-03T12:15:00Z" || rows[2].TotalTokens != 5 { + t.Fatalf("unexpected third bucket: %+v", rows[2]) + } +} + +func TestStore_QueryAPIIntegrationUsageBuckets_HourlyRange(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + lines := []string{ + `{"ts":"2026-04-03T12:10:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":3,"completion_tokens":2}`, + `{"ts":"2026-04-03T12:50:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":4,"completion_tokens":1}`, + `{"ts":"2026-04-03T13:05:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":5,"completion_tokens":5,"cost_usd":0.5}`, + `{"ts":"2026-04-03T13:25:00Z","integration":"report","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":7,"completion_tokens":3}`, + } + for i, line := range lines { + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/api-integrations/hourly.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + start := time.Date(2026, 4, 3, 12, 0, 0, 0, time.UTC) + end := time.Date(2026, 4, 3, 14, 0, 0, 0, time.UTC) + rows, err := s.QueryAPIIntegrationUsageBuckets(start, end, time.Hour) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageBuckets: %v", err) + } + if len(rows) != 3 { + t.Fatalf("len(rows)=%d want 3", len(rows)) + } + if rows[0].IntegrationName != "notes" || rows[0].BucketStart.Format(time.RFC3339) != "2026-04-03T12:00:00Z" || rows[0].TotalTokens != 10 { + t.Fatalf("unexpected first hourly bucket: %+v", rows[0]) + } + if rows[1].IntegrationName != "notes" || rows[1].BucketStart.Format(time.RFC3339) != "2026-04-03T13:00:00Z" || rows[1].TotalCostUSD != 0.5 { + t.Fatalf("unexpected second hourly bucket: %+v", rows[1]) + } + if rows[2].IntegrationName != "report" || rows[2].BucketStart.Format(time.RFC3339) != "2026-04-03T13:00:00Z" || rows[2].TotalTokens != 10 { + t.Fatalf("unexpected third hourly bucket: %+v", rows[2]) + } +} + +func TestStore_QueryAPIIntegrationUsageBuckets_Bounded(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + // Insert apiIntegrationUsageBucketsLimit + 10 events, each in its own 1-minute bucket + // across different integrations so GROUP BY produces many rows. + base := time.Date(2026, 4, 3, 0, 0, 0, 0, time.UTC) + total := apiIntegrationUsageBucketsLimit + 10 + for i := 0; i < total; i++ { + line := fmt.Sprintf(`{"ts":"%s","integration":"integ-%04d","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":1,"completion_tokens":1}`, + base.Add(time.Duration(i)*time.Minute).Format(time.RFC3339), i) + event, err := apiintegrations.ParseUsageEventLine([]byte(line), "/tmp/bounded.jsonl") + if err != nil { + t.Fatalf("ParseUsageEventLine(%d): %v", i, err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent(%d): %v", i, err) + } + } + + start := base + end := base.Add(time.Duration(total+1) * time.Minute) + rows, err := s.QueryAPIIntegrationUsageBuckets(start, end, time.Minute) + if err != nil { + t.Fatalf("QueryAPIIntegrationUsageBuckets: %v", err) + } + if len(rows) != apiIntegrationUsageBucketsLimit { + t.Fatalf("len(rows)=%d want %d", len(rows), apiIntegrationUsageBucketsLimit) + } +} + +func TestStore_QueryAPIIntegrationIngestHealth_AndAlertsByProvider(t *testing.T) { + s, err := New(":memory:") + if err != nil { + t.Fatalf("New: %v", err) + } + defer s.Close() + + stateA := &apiintegrations.IngestState{ + SourcePath: "/tmp/api-integrations/notes.jsonl", + Offset: 128, + FileSize: 256, + FileModTime: time.Date(2026, 4, 3, 12, 5, 0, 0, time.UTC), + PartialLine: `{"ts":"2026-04`, + } + stateB := &apiintegrations.IngestState{ + SourcePath: "/tmp/api-integrations/report.jsonl", + Offset: 64, + FileSize: 64, + FileModTime: time.Date(2026, 4, 3, 12, 6, 0, 0, time.UTC), + } + if err := s.UpsertAPIIntegrationIngestState(stateA); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState(stateA): %v", err) + } + if err := s.UpsertAPIIntegrationIngestState(stateB); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState(stateB): %v", err) + } + + event, err := apiintegrations.ParseUsageEventLine([]byte(`{"ts":"2026-04-03T12:07:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5}`), stateA.SourcePath) + if err != nil { + t.Fatalf("ParseUsageEventLine: %v", err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent: %v", err) + } + + if _, err := s.CreateSystemAlert("api_integrations", "ingest_warning", "Bad line", "Skipped malformed JSON", "warning", `{"sourcePath":"/tmp/api-integrations/notes.jsonl"}`); err != nil { + t.Fatalf("CreateSystemAlert(api_integrations): %v", err) + } + if _, err := s.CreateSystemAlert("anthropic", "auth_error", "Nope", "ignore me", "error", ""); err != nil { + t.Fatalf("CreateSystemAlert(anthropic): %v", err) + } + + healthRows, err := s.QueryAPIIntegrationIngestHealth() + if err != nil { + t.Fatalf("QueryAPIIntegrationIngestHealth: %v", err) + } + if len(healthRows) != 2 { + t.Fatalf("len(healthRows)=%d want 2", len(healthRows)) + } + if healthRows[0].SourcePath != stateA.SourcePath { + t.Fatalf("unexpected first health row: %+v", healthRows[0]) + } + if healthRows[0].LastCapturedAt == nil || healthRows[0].LastCapturedAt.Format(time.RFC3339) != "2026-04-03T12:07:00Z" { + t.Fatalf("unexpected first health lastCapturedAt: %+v", healthRows[0]) + } + if healthRows[1].SourcePath != stateB.SourcePath || healthRows[1].LastCapturedAt != nil { + t.Fatalf("unexpected second health row: %+v", healthRows[1]) + } + + alerts, err := s.GetActiveSystemAlertsByProvider("api_integrations", 10) + if err != nil { + t.Fatalf("GetActiveSystemAlertsByProvider: %v", err) + } + if len(alerts) != 1 { + t.Fatalf("len(alerts)=%d want 1", len(alerts)) + } + if alerts[0].Provider != "api_integrations" || alerts[0].AlertType != "ingest_warning" { + t.Fatalf("unexpected alert: %+v", alerts[0]) + } + if alerts[0].CreatedAt.Format(time.RFC3339) == "0001-01-01T00:00:00Z" { + t.Fatalf("expected parsed alert createdAt, got %+v", alerts[0]) + } +} diff --git a/internal/store/store.go b/internal/store/store.go index 35bed19..afa7751 100644 --- a/internal/store/store.go +++ b/internal/store/store.go @@ -70,6 +70,9 @@ type CrossQuotaEntry struct { Delta float64 // Percent - StartPercent } +// ErrDuplicateAPIIntegrationUsageEvent indicates an API integrations telemetry event already exists. +var ErrDuplicateAPIIntegrationUsageEvent = errors.New("store: duplicate API integration usage event") + func preflightDatabasePath(dbPath string) error { trimmed := strings.TrimSpace(dbPath) if trimmed == "" { @@ -623,6 +626,41 @@ func (s *Store) createTables() error { CREATE INDEX IF NOT EXISTS idx_openrouter_snapshots_captured ON openrouter_snapshots(captured_at); CREATE INDEX IF NOT EXISTS idx_openrouter_cycles_type_start ON openrouter_reset_cycles(quota_type, cycle_start); CREATE INDEX IF NOT EXISTS idx_openrouter_cycles_type_active ON openrouter_reset_cycles(quota_type, cycle_end) WHERE cycle_end IS NULL; + + -- API integrations telemetry ingestion tables + CREATE TABLE IF NOT EXISTS api_integration_usage_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + captured_at TEXT NOT NULL, + integration_name TEXT NOT NULL, + provider TEXT NOT NULL, + account_name TEXT NOT NULL DEFAULT 'default', + model TEXT NOT NULL, + request_id TEXT NOT NULL DEFAULT '', + prompt_tokens INTEGER NOT NULL, + completion_tokens INTEGER NOT NULL, + total_tokens INTEGER NOT NULL, + cost_usd REAL, + latency_ms INTEGER, + metadata_json TEXT NOT NULL DEFAULT '', + source_path TEXT NOT NULL, + fingerprint TEXT NOT NULL, + created_at TEXT NOT NULL + ); + + CREATE UNIQUE INDEX IF NOT EXISTS idx_api_integration_usage_events_fingerprint ON api_integration_usage_events(fingerprint); + CREATE INDEX IF NOT EXISTS idx_api_integration_usage_events_captured ON api_integration_usage_events(captured_at); + CREATE INDEX IF NOT EXISTS idx_api_integration_usage_events_integration_provider ON api_integration_usage_events(integration_name, provider, captured_at); + CREATE INDEX IF NOT EXISTS idx_api_integration_usage_events_provider_model ON api_integration_usage_events(provider, model, captured_at); + CREATE INDEX IF NOT EXISTS idx_api_integration_usage_events_source ON api_integration_usage_events(source_path); + + CREATE TABLE IF NOT EXISTS api_integration_ingest_state ( + source_path TEXT PRIMARY KEY, + offset_bytes INTEGER NOT NULL DEFAULT 0, + file_size INTEGER NOT NULL DEFAULT 0, + file_mod_time TEXT, + partial_line TEXT NOT NULL DEFAULT '', + updated_at TEXT NOT NULL + ); ` if _, err := s.db.Exec(schema); err != nil { @@ -887,6 +925,19 @@ func (s *Store) migrateSchema() error { } } + // Drop raw_line column from api_integration_usage_events - no longer stored. + // Ignore "no such column" (new DB or already migrated) and "no such table" + // (migrateSchema called directly on a partial DB in tests, or pre-api-integrations DB). + // TODO: remove this migration after all users have upgraded past the version that + // introduced raw_line (feat/api-integrations). Just to keep pulls clean for the limited + // number of users who are using this fork. + if _, err := s.db.Exec(`ALTER TABLE api_integration_usage_events DROP COLUMN raw_line`); err != nil { + if !strings.Contains(err.Error(), "no such column") && + !strings.Contains(err.Error(), "no such table") { + return fmt.Errorf("failed to drop raw_line from api_integration_usage_events: %w", err) + } + } + return nil } diff --git a/internal/web/api_integrations_handlers.go b/internal/web/api_integrations_handlers.go new file mode 100644 index 0000000..c7da66e --- /dev/null +++ b/internal/web/api_integrations_handlers.go @@ -0,0 +1,371 @@ +package web + +import ( + "net/http" + "sort" + "time" +) + +type apiIntegrationCurrentModelBreakdown struct { + Model string `json:"model"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD *float64 `json:"totalCostUsd,omitempty"` + LastCapturedAt string `json:"lastCapturedAt"` +} + +type apiIntegrationCurrentAccountBreakdown struct { + Account string `json:"account"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD *float64 `json:"totalCostUsd,omitempty"` + LastCapturedAt string `json:"lastCapturedAt"` + Models []apiIntegrationCurrentModelBreakdown `json:"models"` +} + +type apiIntegrationCurrentProviderBreakdown struct { + Provider string `json:"provider"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD *float64 `json:"totalCostUsd,omitempty"` + LastCapturedAt string `json:"lastCapturedAt"` + Accounts []apiIntegrationCurrentAccountBreakdown `json:"accounts"` +} + +// APIIntegrationsCurrent returns grouped current API integration usage totals. +func (h *Handler) APIIntegrationsCurrent(w http.ResponseWriter, r *http.Request) { + respondJSON(w, http.StatusOK, h.buildAPIIntegrationsCurrent()) +} + +func (h *Handler) buildAPIIntegrationsCurrent() map[string]interface{} { + response := map[string]interface{}{} + if h.store == nil { + return response + } + + rows, err := h.store.QueryAPIIntegrationUsageSummary() + if err != nil { + h.logger.Error("failed to query API integrations current", "error", err) + return response + } + + type modelNode struct { + row apiIntegrationCurrentModelBreakdown + } + type accountNode struct { + row apiIntegrationCurrentAccountBreakdown + models map[string]*modelNode + } + type providerNode struct { + row apiIntegrationCurrentProviderBreakdown + accounts map[string]*accountNode + } + type integrationNode struct { + RequestCount int + PromptTokens int + CompletionTokens int + TotalTokens int + TotalCostUSD float64 + HasCost bool + LastCapturedAt time.Time + Providers map[string]*providerNode + } + + integrationsMap := make(map[string]*integrationNode) + for _, entry := range rows { + integrationState, ok := integrationsMap[entry.IntegrationName] + if !ok { + integrationState = &integrationNode{Providers: make(map[string]*providerNode)} + integrationsMap[entry.IntegrationName] = integrationState + } + providerState, ok := integrationState.Providers[entry.Provider] + if !ok { + providerState = &providerNode{ + row: apiIntegrationCurrentProviderBreakdown{Provider: entry.Provider}, + accounts: make(map[string]*accountNode), + } + integrationState.Providers[entry.Provider] = providerState + } + accountState, ok := providerState.accounts[entry.AccountName] + if !ok { + accountState = &accountNode{ + row: apiIntegrationCurrentAccountBreakdown{Account: entry.AccountName}, + models: make(map[string]*modelNode), + } + providerState.accounts[entry.AccountName] = accountState + } + + model := apiIntegrationCurrentModelBreakdown{ + Model: entry.Model, + RequestCount: entry.RequestCount, + PromptTokens: entry.PromptTokens, + CompletionTokens: entry.CompletionTokens, + TotalTokens: entry.TotalTokens, + LastCapturedAt: entry.LastCapturedAt.UTC().Format(time.RFC3339), + } + if entry.TotalCostUSD > 0 { + cost := entry.TotalCostUSD + model.TotalCostUSD = &cost + } + accountState.models[entry.Model] = &modelNode{row: model} + + acc := &accountState.row + acc.RequestCount += entry.RequestCount + acc.PromptTokens += entry.PromptTokens + acc.CompletionTokens += entry.CompletionTokens + acc.TotalTokens += entry.TotalTokens + acc.LastCapturedAt = laterTimeString(acc.LastCapturedAt, entry.LastCapturedAt) + if entry.TotalCostUSD > 0 { + var current float64 + if acc.TotalCostUSD != nil { + current = *acc.TotalCostUSD + } + current += entry.TotalCostUSD + acc.TotalCostUSD = ¤t + } + + prov := &providerState.row + prov.RequestCount += entry.RequestCount + prov.PromptTokens += entry.PromptTokens + prov.CompletionTokens += entry.CompletionTokens + prov.TotalTokens += entry.TotalTokens + prov.LastCapturedAt = laterTimeString(prov.LastCapturedAt, entry.LastCapturedAt) + if entry.TotalCostUSD > 0 { + var current float64 + if prov.TotalCostUSD != nil { + current = *prov.TotalCostUSD + } + current += entry.TotalCostUSD + prov.TotalCostUSD = ¤t + } + + integrationState.RequestCount += entry.RequestCount + integrationState.PromptTokens += entry.PromptTokens + integrationState.CompletionTokens += entry.CompletionTokens + integrationState.TotalTokens += entry.TotalTokens + integrationState.TotalCostUSD += entry.TotalCostUSD + integrationState.HasCost = integrationState.HasCost || entry.TotalCostUSD > 0 + if entry.LastCapturedAt.After(integrationState.LastCapturedAt) { + integrationState.LastCapturedAt = entry.LastCapturedAt + } + } + + for integrationName, integrationState := range integrationsMap { + providers := make([]apiIntegrationCurrentProviderBreakdown, 0, len(integrationState.Providers)) + for _, providerState := range integrationState.Providers { + accounts := make([]apiIntegrationCurrentAccountBreakdown, 0, len(providerState.accounts)) + for _, accountState := range providerState.accounts { + models := make([]apiIntegrationCurrentModelBreakdown, 0, len(accountState.models)) + for _, modelState := range accountState.models { + models = append(models, modelState.row) + } + sortAPIIntegrationModels(models) + accountState.row.Models = models + accounts = append(accounts, accountState.row) + } + sortAPIIntegrationAccounts(accounts) + providerState.row.Accounts = accounts + providers = append(providers, providerState.row) + } + sortAPIIntegrationProviders(providers) + + item := map[string]interface{}{ + "integration": integrationName, + "requestCount": integrationState.RequestCount, + "promptTokens": integrationState.PromptTokens, + "completionTokens": integrationState.CompletionTokens, + "totalTokens": integrationState.TotalTokens, + "lastCapturedAt": integrationState.LastCapturedAt.UTC().Format(time.RFC3339), + "providers": providers, + } + if integrationState.HasCost { + item["totalCostUsd"] = integrationState.TotalCostUSD + } + response[integrationName] = item + } + + return response +} + +// APIIntegrationsHistory returns chart-ready aggregated history grouped by integration. +func (h *Handler) APIIntegrationsHistory(w http.ResponseWriter, r *http.Request) { + rangeStr := r.URL.Query().Get("range") + duration, err := parseTimeRange(rangeStr) + if err != nil { + respondError(w, http.StatusBadRequest, err.Error()) + return + } + respondJSON(w, http.StatusOK, h.buildAPIIntegrationsHistory(duration)) +} + +func (h *Handler) buildAPIIntegrationsHistory(duration time.Duration) map[string]interface{} { + response := map[string]interface{}{} + if h.store == nil { + return response + } + + now := time.Now().UTC() + start := now.Add(-duration) + bucketSize := apiIntegrationHistoryBucketSize(duration) + rows, err := h.store.QueryAPIIntegrationUsageBuckets(start, now, bucketSize) + if err != nil { + h.logger.Error("failed to query API integrations history", "error", err) + return response + } + + byIntegration := make(map[string][]map[string]interface{}) + for _, row := range rows { + entry := map[string]interface{}{ + "capturedAt": row.BucketStart.UTC().Format(time.RFC3339), + "requestCount": row.RequestCount, + "promptTokens": row.PromptTokens, + "completionTokens": row.CompletionTokens, + "totalTokens": row.TotalTokens, + } + if row.TotalCostUSD > 0 { + entry["totalCostUsd"] = row.TotalCostUSD + } + byIntegration[row.IntegrationName] = append(byIntegration[row.IntegrationName], entry) + } + + for integrationName, entries := range byIntegration { + step := downsampleStep(len(entries), maxChartPoints) + if step <= 1 { + response[integrationName] = entries + continue + } + downsampled := make([]map[string]interface{}, 0, min(len(entries), maxChartPoints)) + last := len(entries) - 1 + for index, entry := range entries { + if index != 0 && index != last && index%step != 0 { + continue + } + downsampled = append(downsampled, entry) + } + response[integrationName] = downsampled + } + + return response +} + +// APIIntegrationsHealth returns ingest subsystem status for API integrations telemetry. +func (h *Handler) APIIntegrationsHealth(w http.ResponseWriter, r *http.Request) { + respondJSON(w, http.StatusOK, h.buildAPIIntegrationsHealth()) +} + +func (h *Handler) buildAPIIntegrationsHealth() map[string]interface{} { + response := map[string]interface{}{ + "enabled": false, + "dir": "", + "running": false, + "files": []map[string]interface{}{}, + "alerts": []map[string]interface{}{}, + } + if h.config != nil { + response["enabled"] = h.config.APIIntegrationsEnabled + response["dir"] = h.config.APIIntegrationsDir + } + if enabled, _ := response["enabled"].(bool); !enabled { + return response + } + if h.agentManager != nil { + response["running"] = h.agentManager.IsRunning("api_integrations") + } + if h.store == nil { + return response + } + + files, err := h.store.QueryAPIIntegrationIngestHealth() + if err == nil { + payload := make([]map[string]interface{}, 0, len(files)) + for _, file := range files { + item := map[string]interface{}{ + "sourcePath": file.SourcePath, + "offsetBytes": file.OffsetBytes, + "fileSize": file.FileSize, + "partialLine": file.PartialLine, + "updatedAt": file.UpdatedAt.UTC().Format(time.RFC3339), + } + if file.FileModTime != nil { + item["fileModTime"] = file.FileModTime.UTC().Format(time.RFC3339) + } + if file.LastCapturedAt != nil { + item["lastCapturedAt"] = file.LastCapturedAt.UTC().Format(time.RFC3339) + } + payload = append(payload, item) + } + response["files"] = payload + } + + alerts, err := h.store.GetActiveSystemAlertsByProvider("api_integrations", 20) + if err == nil { + payload := make([]map[string]interface{}, 0, len(alerts)) + for _, alert := range alerts { + item := map[string]interface{}{ + "id": alert.ID, + "type": alert.AlertType, + "title": alert.Title, + "message": alert.Message, + "severity": alert.Severity, + "createdAt": alert.CreatedAt.UTC().Format(time.RFC3339), + } + if alert.Metadata != "" { + item["metadata"] = alert.Metadata + } + payload = append(payload, item) + } + response["alerts"] = payload + } + + return response +} + +func apiIntegrationHistoryBucketSize(duration time.Duration) time.Duration { + switch { + case duration <= time.Hour: + return time.Minute + case duration <= 6*time.Hour: + return 5 * time.Minute + case duration <= 24*time.Hour: + return 15 * time.Minute + case duration <= 7*24*time.Hour: + return time.Hour + default: + return 6 * time.Hour + } +} + +func laterTimeString(current string, candidate time.Time) string { + if current == "" { + return candidate.UTC().Format(time.RFC3339) + } + parsed, err := time.Parse(time.RFC3339, current) + if err != nil || candidate.After(parsed) { + return candidate.UTC().Format(time.RFC3339) + } + return current +} + +func sortAPIIntegrationProviders(items []apiIntegrationCurrentProviderBreakdown) { + sort.Slice(items, func(i, j int) bool { + return items[i].Provider < items[j].Provider + }) +} + +func sortAPIIntegrationAccounts(items []apiIntegrationCurrentAccountBreakdown) { + sort.Slice(items, func(i, j int) bool { + return items[i].Account < items[j].Account + }) +} + +func sortAPIIntegrationModels(items []apiIntegrationCurrentModelBreakdown) { + sort.Slice(items, func(i, j int) bool { + return items[i].Model < items[j].Model + }) +} diff --git a/internal/web/api_integrations_handlers_test.go b/internal/web/api_integrations_handlers_test.go new file mode 100644 index 0000000..21735ee --- /dev/null +++ b/internal/web/api_integrations_handlers_test.go @@ -0,0 +1,361 @@ +package web + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + apiintegrations "github.com/onllm-dev/onwatch/v2/internal/api_integrations" + "github.com/onllm-dev/onwatch/v2/internal/config" + "github.com/onllm-dev/onwatch/v2/internal/store" +) + +func insertAPIIntegrationEventForTest(t *testing.T, s *store.Store, line, sourcePath string) { + t.Helper() + event, err := apiintegrations.ParseUsageEventLine([]byte(line), sourcePath) + if err != nil { + t.Fatalf("ParseUsageEventLine: %v", err) + } + if _, err := s.InsertAPIIntegrationUsageEvent(event); err != nil { + t.Fatalf("InsertAPIIntegrationUsageEvent: %v", err) + } +} + +func TestHandler_APIIntegrationsCurrent_Empty(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/current", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsCurrent(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + var response map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + if len(response) != 0 { + t.Fatalf("response=%v want empty object", response) + } +} + +func TestHandler_APIIntegrationsCurrent_GroupedTotals(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","account":"personal","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5,"cost_usd":0.1}`, "/tmp/api-integrations/notes.jsonl") + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:02:00Z","integration":"notes","provider":"anthropic","account":"personal","model":"claude-3-7-haiku","prompt_tokens":4,"completion_tokens":1,"cost_usd":0.05}`, "/tmp/api-integrations/notes.jsonl") + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:04:00Z","integration":"notes","provider":"mistral","account":"team","model":"mistral-small-latest","prompt_tokens":6,"completion_tokens":2}`, "/tmp/api-integrations/notes.jsonl") + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:05:00Z","integration":"report","provider":"openai","model":"gpt-4.1-mini","prompt_tokens":3,"completion_tokens":2}`, "/tmp/api-integrations/report.jsonl") + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/current", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsCurrent(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + + var response map[string]struct { + Integration string `json:"integration"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD float64 `json:"totalCostUsd"` + LastCapturedAt string `json:"lastCapturedAt"` + Providers []struct { + Provider string `json:"provider"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD float64 `json:"totalCostUsd"` + LastCapturedAt string `json:"lastCapturedAt"` + Accounts []struct { + Account string `json:"account"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + Models []struct { + Model string `json:"model"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + } `json:"models"` + } `json:"accounts"` + } `json:"providers"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + + notes := response["notes"] + if notes.Integration != "notes" || notes.RequestCount != 3 || notes.TotalTokens != 28 { + t.Fatalf("notes=%+v", notes) + } + if notes.LastCapturedAt != "2026-04-03T12:04:00Z" { + t.Fatalf("notes lastCapturedAt=%q", notes.LastCapturedAt) + } + if len(notes.Providers) != 2 || notes.Providers[0].Provider != "anthropic" || notes.Providers[1].Provider != "mistral" { + t.Fatalf("notes providers=%+v", notes.Providers) + } + if notes.Providers[0].Accounts[0].Account != "personal" || len(notes.Providers[0].Accounts[0].Models) != 2 { + t.Fatalf("notes anthropic account breakdown=%+v", notes.Providers[0].Accounts) + } + if notes.Providers[0].Accounts[0].Models[0].Model != "claude-3-7-haiku" || notes.Providers[0].Accounts[0].Models[1].Model != "claude-3-7-sonnet" { + t.Fatalf("notes anthropic models=%+v", notes.Providers[0].Accounts[0].Models) + } + + report := response["report"] + if report.Integration != "report" || report.RequestCount != 1 || report.TotalTokens != 5 { + t.Fatalf("report=%+v", report) + } + if len(report.Providers) != 1 || report.Providers[0].Accounts[0].Account != "default" { + t.Fatalf("report providers=%+v", report.Providers) + } +} + +func TestHandler_APIIntegrationsHistory_RangeAndDownsample(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + base := time.Now().UTC().Add(-30 * time.Minute).Truncate(time.Minute) + for i := 0; i < 520; i++ { + line := `{"ts":"` + base.Add(time.Duration(i)*time.Minute).Format(time.RFC3339) + `","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":1,"completion_tokens":1}` + insertAPIIntegrationEventForTest(t, s, line, "/tmp/api-integrations/notes.jsonl") + } + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/history?range=30d", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsHistory(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + + var response map[string][]struct { + CapturedAt string `json:"capturedAt"` + RequestCount int `json:"requestCount"` + PromptTokens int `json:"promptTokens"` + CompletionTokens int `json:"completionTokens"` + TotalTokens int `json:"totalTokens"` + TotalCostUSD float64 `json:"totalCostUsd"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + + buckets := response["notes"] + if len(buckets) == 0 { + t.Fatal("expected history buckets for notes") + } + if len(buckets) > maxChartPoints { + t.Fatalf("len(buckets)=%d exceeds maxChartPoints=%d", len(buckets), maxChartPoints) + } + if buckets[0].RequestCount < 1 || buckets[0].TotalTokens < 2 { + t.Fatalf("unexpected first bucket: %+v", buckets[0]) + } +} + +func TestHandler_APIIntegrationsHistory_InvalidRange(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/history?range=2h", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsHistory(rr, req) + + if rr.Code != http.StatusBadRequest { + t.Fatalf("status=%d want 400", rr.Code) + } +} + +func TestHandler_APIIntegrationsHealth_StatusFilesAndAlerts(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + if err := s.UpsertAPIIntegrationIngestState(&apiintegrations.IngestState{ + SourcePath: "/tmp/api-integrations/notes.jsonl", + Offset: 200, + FileSize: 256, + FileModTime: time.Date(2026, 4, 3, 12, 10, 0, 0, time.UTC), + PartialLine: `{"ts":"2026`, + }); err != nil { + t.Fatalf("UpsertAPIIntegrationIngestState: %v", err) + } + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:09:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5}`, "/tmp/api-integrations/notes.jsonl") + if _, err := s.CreateSystemAlert("api_integrations", "ingest_warning", "Malformed line", "Skipped one malformed event", "warning", `{"sourcePath":"/tmp/api-integrations/notes.jsonl"}`); err != nil { + t.Fatalf("CreateSystemAlert: %v", err) + } + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + h.agentManager = &mockProviderAgentController{running: map[string]bool{"api_integrations": true}} + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/health", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsHealth(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + + var response struct { + Enabled bool `json:"enabled"` + Dir string `json:"dir"` + Running bool `json:"running"` + Files []struct { + SourcePath string `json:"sourcePath"` + OffsetBytes int64 `json:"offsetBytes"` + FileSize int64 `json:"fileSize"` + PartialLine string `json:"partialLine"` + FileModTime string `json:"fileModTime"` + UpdatedAt string `json:"updatedAt"` + LastCapturedAt string `json:"lastCapturedAt"` + } `json:"files"` + Alerts []struct { + ID int64 `json:"id"` + Type string `json:"type"` + Title string `json:"title"` + Message string `json:"message"` + Severity string `json:"severity"` + CreatedAt string `json:"createdAt"` + Metadata string `json:"metadata"` + } `json:"alerts"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + + if !response.Enabled || !response.Running || response.Dir != "/tmp/api-integrations" { + t.Fatalf("unexpected health response: %+v", response) + } + if len(response.Files) != 1 || response.Files[0].SourcePath != "/tmp/api-integrations/notes.jsonl" || response.Files[0].LastCapturedAt != "2026-04-03T12:09:00Z" { + t.Fatalf("unexpected files payload: %+v", response.Files) + } + if len(response.Alerts) != 1 || response.Alerts[0].Type != "ingest_warning" { + t.Fatalf("unexpected alerts payload: %+v", response.Alerts) + } +} + +func TestHandler_APIIntegrationsHealth_Disabled(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: false, APIIntegrationsDir: ""}) + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/health", nil) + rr := httptest.NewRecorder() + + h.APIIntegrationsHealth(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + var response struct { + Enabled bool `json:"enabled"` + Dir string `json:"dir"` + Running bool `json:"running"` + Files []interface{} `json:"files"` + Alerts []interface{} `json:"alerts"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + if response.Enabled || response.Running || len(response.Files) != 0 || len(response.Alerts) != 0 { + t.Fatalf("unexpected disabled response: %+v", response) + } +} + +func TestHandler_Current_DoesNotIncludeAPIIntegrationsTelemetry(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5}`, "/tmp/api-integrations/notes.jsonl") + + cfg := createTestConfigWithSynthetic() + h := NewHandler(s, nil, nil, nil, cfg) + req := httptest.NewRequest(http.MethodGet, "/api/current", nil) + rr := httptest.NewRecorder() + + h.Current(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + var response map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + if _, ok := response["notes"]; ok { + t.Fatalf("unexpected API integrations telemetry in /api/current: %v", response) + } +} + +func TestServer_APIIntegrationsRoute_UsesAuthMiddleware(t *testing.T) { + s, err := store.New(":memory:") + if err != nil { + t.Fatalf("store.New: %v", err) + } + defer s.Close() + + insertAPIIntegrationEventForTest(t, s, `{"ts":"2026-04-03T12:00:00Z","integration":"notes","provider":"anthropic","model":"claude-3-7-sonnet","prompt_tokens":10,"completion_tokens":5}`, "/tmp/api-integrations/notes.jsonl") + + h := NewHandler(s, nil, nil, nil, &config.Config{APIIntegrationsEnabled: true, APIIntegrationsDir: "/tmp/api-integrations"}) + passHash, err := HashPassword("secret123") + if err != nil { + t.Fatalf("HashPassword: %v", err) + } + server := NewServer(0, h, nil, "admin", passHash, "", "") + + req := httptest.NewRequest(http.MethodGet, "/api/api-integrations/current", nil) + req.SetBasicAuth("admin", "secret123") + rr := httptest.NewRecorder() + + server.httpServer.Handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status=%d want 200", rr.Code) + } + var response map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + if _, ok := response["notes"]; !ok { + t.Fatalf("expected notes payload, got %v", response) + } +} diff --git a/internal/web/handlers.go b/internal/web/handlers.go index a7ac783..3a75095 100644 --- a/internal/web/handlers.go +++ b/internal/web/handlers.go @@ -437,8 +437,8 @@ func (h *Handler) codexProfileSave(w http.ResponseWriter, r *http.Request) { } respondJSON(w, http.StatusCreated, map[string]interface{}{ - "message": "profile saved", - "name": req.Name, + "message": "profile saved", + "name": req.Name, "accountID": creds.AccountID, }) } @@ -479,7 +479,7 @@ func (h *Handler) codexProfileDelete(w http.ResponseWriter, r *http.Request) { respondJSON(w, http.StatusOK, map[string]interface{}{ "message": "profile deleted", - "name": name, + "name": name, }) } @@ -600,9 +600,9 @@ func (h *Handler) codexProfileRefresh(w http.ResponseWriter, r *http.Request) { } respondJSON(w, http.StatusOK, map[string]interface{}{ - "message": "profile refreshed", - "name": name, - "accountID": creds.AccountID, + "message": "profile refreshed", + "name": name, + "accountID": creds.AccountID, }) } @@ -1051,6 +1051,40 @@ func (h *Handler) providerVisibilitySettings() map[string]interface{} { return vis } +func (h *Handler) apiIntegrationsVisibilityMap() map[string]bool { + if h.store == nil { + return map[string]bool{} + } + raw, err := h.store.GetSetting("api_integrations_visibility") + if err != nil || raw == "" { + return map[string]bool{} + } + var vis map[string]bool + if err := json.Unmarshal([]byte(raw), &vis); err != nil { + return map[string]bool{} + } + return vis +} + +func (h *Handler) apiIntegrationsDashboardVisible() bool { + vis := h.apiIntegrationsVisibilityMap() + if dashboard, ok := vis["dashboard"]; ok { + return dashboard + } + return true +} + +func (h *Handler) saveAPIIntegrationsVisibility(vis map[string]bool) error { + if h.store == nil { + return fmt.Errorf("store not available") + } + data, err := json.Marshal(vis) + if err != nil { + return err + } + return h.store.SetSetting("api_integrations_visibility", string(data)) +} + func providerPollingValue(entry interface{}) (bool, bool) { switch v := entry.(type) { case map[string]interface{}: @@ -1316,8 +1350,8 @@ func stripProviderSecrets(providers map[string]interface{}) { continue } if str, ok := v.(string); ok && str != "" { - m[k] = "" // Don't send actual value - m[k+"_set"] = true // Signal that it's configured + m[k] = "" // Don't send actual value + m[k+"_set"] = true // Signal that it's configured } } } @@ -1635,6 +1669,8 @@ func (h *Handler) Dashboard(w http.ResponseWriter, r *http.Request) { providers := []string{} currentProvider := "" + hasTools := false + toolsVisible := false if h.config != nil { providers = h.config.AvailableProviders() @@ -1655,6 +1691,12 @@ func (h *Handler) Dashboard(w http.ResponseWriter, r *http.Request) { } } + hasTools = h.config.APIIntegrationsEnabled + toolsVisible = hasTools && h.apiIntegrationsDashboardVisible() + if toolsVisible { + providers = append(providers, "api-integrations") + } + // Always add "both" (All tab) when multiple providers configured if h.config.HasMultipleProviders() { providers = append(providers, "both") @@ -1691,6 +1733,7 @@ func (h *Handler) Dashboard(w http.ResponseWriter, r *http.Request) { hasAntigravity := hasVisibleProvider("antigravity") hasMiniMax := hasVisibleProvider("minimax") hasOpenRouter := hasVisibleProvider("openrouter") + hasToolsVisible := hasVisibleProvider("api-integrations") _ = hasOpenRouter // used by template if needed data := map[string]interface{}{ "Title": "Dashboard", @@ -1704,6 +1747,8 @@ func (h *Handler) Dashboard(w http.ResponseWriter, r *http.Request) { "HasCodex": hasCodex, "HasAntigravity": hasAntigravity, "HasMiniMax": hasMiniMax, + "HasTools": hasToolsVisible, + "ToolsVisible": toolsVisible, "PollIntervalSec": h.getPollIntervalSec(), "BasePath": h.getBasePath(), } @@ -5725,6 +5770,14 @@ func (h *Handler) GetSettings(w http.ResponseWriter, r *http.Request) { } } + toolsVisJSON, _ := h.store.GetSetting("api_integrations_visibility") + if toolsVisJSON != "" { + var toolsVis map[string]bool + if json.Unmarshal([]byte(toolsVisJSON), &toolsVis) == nil { + result["api_integrations_visibility"] = toolsVis + } + } + // Provider-specific settings (overrides .env) provJSON, _ := h.store.GetSetting("provider_settings") if provJSON != "" { @@ -5992,6 +6045,29 @@ func (h *Handler) UpdateSettings(w http.ResponseWriter, r *http.Request) { result["provider_visibility"] = vis } + if raw, ok := body["api_integrations_visibility"]; ok { + var vis map[string]bool + if err := json.Unmarshal(raw, &vis); err != nil { + respondError(w, http.StatusBadRequest, "invalid api_integrations_visibility value") + return + } + if vis == nil { + vis = map[string]bool{} + } + normalized := map[string]bool{ + "dashboard": true, + } + if dashboard, exists := vis["dashboard"]; exists { + normalized["dashboard"] = dashboard + } + if err := h.saveAPIIntegrationsVisibility(normalized); err != nil { + h.logger.Error("failed to save API integrations visibility settings", "error", err) + respondError(w, http.StatusInternalServerError, "failed to save API integrations visibility settings") + return + } + result["api_integrations_visibility"] = normalized + } + // Handle menubar settings if raw, ok := body["menubar"]; ok { var settings menubar.Settings diff --git a/internal/web/handlers_test.go b/internal/web/handlers_test.go index 0c3f671..79a2fb8 100644 --- a/internal/web/handlers_test.go +++ b/internal/web/handlers_test.go @@ -79,6 +79,55 @@ func TestHandler_Dashboard_ReturnsHTML(t *testing.T) { } } +func TestHandler_Dashboard_IncludesAPIIntegrationsTabWhenVisible(t *testing.T) { + s, _ := store.New(":memory:") + defer s.Close() + + cfg := createTestConfigWithSynthetic() + cfg.APIIntegrationsEnabled = true + h := NewHandler(s, nil, nil, nil, cfg) + + req := httptest.NewRequest(http.MethodGet, "/?provider=api-integrations", nil) + rr := httptest.NewRecorder() + h.Dashboard(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected status 200, got %d", rr.Code) + } + body := rr.Body.String() + if !strings.Contains(body, `data-provider="api-integrations"`) { + t.Fatalf("expected API Integrations tab in dashboard, got %s", body) + } + if !strings.Contains(body, `id="api-integrations-dashboard"`) { + t.Fatalf("expected API integrations dashboard shell, got %s", body) + } +} + +func TestHandler_Dashboard_OmitsAPIIntegrationsTabWhenHidden(t *testing.T) { + s, _ := store.New(":memory:") + defer s.Close() + + if err := s.SetSetting("api_integrations_visibility", `{"dashboard":false}`); err != nil { + t.Fatalf("SetSetting: %v", err) + } + + cfg := createTestConfigWithSynthetic() + cfg.APIIntegrationsEnabled = true + h := NewHandler(s, nil, nil, nil, cfg) + + req := httptest.NewRequest(http.MethodGet, "/", nil) + rr := httptest.NewRecorder() + h.Dashboard(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected status 200, got %d", rr.Code) + } + body := rr.Body.String() + if strings.Contains(body, `data-provider="api-integrations"`) { + t.Fatalf("did not expect API Integrations tab in dashboard") + } +} + func TestHandler_Current_ReturnsJSON(t *testing.T) { s, _ := store.New(":memory:") defer s.Close() @@ -3572,6 +3621,64 @@ func TestHandler_UpdateSettings_ProviderVisibility(t *testing.T) { } } +func TestHandler_GetSettings_APIIntegrationsVisibility(t *testing.T) { + s, _ := store.New(":memory:") + defer s.Close() + + if err := s.SetSetting("api_integrations_visibility", `{"dashboard":false}`); err != nil { + t.Fatalf("SetSetting: %v", err) + } + + cfg := createTestConfigWithSynthetic() + cfg.APIIntegrationsEnabled = true + h := NewHandler(s, nil, nil, nil, cfg) + + req := httptest.NewRequest(http.MethodGet, "/api/settings", nil) + rr := httptest.NewRecorder() + h.GetSettings(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected status 200, got %d", rr.Code) + } + + var response map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil { + t.Fatalf("json.Unmarshal: %v", err) + } + toolsVis, ok := response["api_integrations_visibility"].(map[string]interface{}) + if !ok { + t.Fatalf("expected api_integrations_visibility in response, got %v", response["api_integrations_visibility"]) + } + if toolsVis["dashboard"] != false { + t.Fatalf("expected api_integrations_visibility.dashboard=false, got %v", toolsVis["dashboard"]) + } +} + +func TestHandler_UpdateSettings_APIIntegrationsVisibility(t *testing.T) { + s, _ := store.New(":memory:") + defer s.Close() + + cfg := createTestConfigWithSynthetic() + cfg.APIIntegrationsEnabled = true + h := NewHandler(s, nil, nil, nil, cfg) + + body := strings.NewReader(`{"api_integrations_visibility":{"dashboard":false}}`) + req := httptest.NewRequest(http.MethodPut, "/api/settings", body) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + h.UpdateSettings(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected status 200, got %d; body: %s", rr.Code, rr.Body.String()) + } + + val, _ := s.GetSetting("api_integrations_visibility") + if !strings.Contains(val, `"dashboard":false`) { + t.Fatalf("expected api_integrations_visibility to be saved, got %s", val) + } +} + func TestHandler_UpdateSettings_Notifications(t *testing.T) { s, _ := store.New(":memory:") defer s.Close() @@ -3641,14 +3748,14 @@ type mockNotifier struct { reloadCalled bool } -func (m *mockNotifier) Reload() error { m.reloadCalled = true; return nil } -func (m *mockNotifier) ConfigureSMTP() error { return nil } -func (m *mockNotifier) ConfigurePush() error { return nil } -func (m *mockNotifier) SendTestEmail() error { return m.sendTestErr } -func (m *mockNotifier) SendTestPush() error { return nil } -func (m *mockNotifier) TestSMTPDiag() (string, error) { return "", m.sendTestErr } -func (m *mockNotifier) SetEncryptionKey(_ string) {} -func (m *mockNotifier) GetVAPIDPublicKey() string { return "" } +func (m *mockNotifier) Reload() error { m.reloadCalled = true; return nil } +func (m *mockNotifier) ConfigureSMTP() error { return nil } +func (m *mockNotifier) ConfigurePush() error { return nil } +func (m *mockNotifier) SendTestEmail() error { return m.sendTestErr } +func (m *mockNotifier) SendTestPush() error { return nil } +func (m *mockNotifier) TestSMTPDiag() (string, error) { return "", m.sendTestErr } +func (m *mockNotifier) SetEncryptionKey(_ string) {} +func (m *mockNotifier) GetVAPIDPublicKey() string { return "" } func TestHandler_SMTPTest_Success(t *testing.T) { cfg := createTestConfigWithSynthetic() @@ -5939,14 +6046,14 @@ type mockNotifierWithVAPID struct { vapidKey string } -func (m *mockNotifierWithVAPID) Reload() error { m.reloadCalled = true; return nil } -func (m *mockNotifierWithVAPID) ConfigureSMTP() error { return nil } -func (m *mockNotifierWithVAPID) ConfigurePush() error { return nil } -func (m *mockNotifierWithVAPID) SendTestEmail() error { return m.sendTestErr } -func (m *mockNotifierWithVAPID) SendTestPush() error { return m.sendPushErr } -func (m *mockNotifierWithVAPID) TestSMTPDiag() (string, error) { return "", m.sendTestErr } -func (m *mockNotifierWithVAPID) SetEncryptionKey(_ string) {} -func (m *mockNotifierWithVAPID) GetVAPIDPublicKey() string { return m.vapidKey } +func (m *mockNotifierWithVAPID) Reload() error { m.reloadCalled = true; return nil } +func (m *mockNotifierWithVAPID) ConfigureSMTP() error { return nil } +func (m *mockNotifierWithVAPID) ConfigurePush() error { return nil } +func (m *mockNotifierWithVAPID) SendTestEmail() error { return m.sendTestErr } +func (m *mockNotifierWithVAPID) SendTestPush() error { return m.sendPushErr } +func (m *mockNotifierWithVAPID) TestSMTPDiag() (string, error) { return "", m.sendTestErr } +func (m *mockNotifierWithVAPID) SetEncryptionKey(_ string) {} +func (m *mockNotifierWithVAPID) GetVAPIDPublicKey() string { return m.vapidKey } func TestHandler_PushVAPIDKey_Success(t *testing.T) { h := NewHandler(nil, nil, nil, nil, createTestConfigWithSynthetic()) diff --git a/internal/web/server.go b/internal/web/server.go index 2c573a1..ddb3dcb 100644 --- a/internal/web/server.go +++ b/internal/web/server.go @@ -94,6 +94,9 @@ func NewServer(port int, handler *Handler, logger *slog.Logger, username, passwo }) mux.HandleFunc(p("/api/minimax/accounts"), handler.MiniMaxAccounts) mux.HandleFunc(p("/api/minimax/accounts/usage"), handler.MiniMaxAccountsUsage) + mux.HandleFunc(p("/api/api-integrations/current"), handler.APIIntegrationsCurrent) + mux.HandleFunc(p("/api/api-integrations/history"), handler.APIIntegrationsHistory) + mux.HandleFunc(p("/api/api-integrations/health"), handler.APIIntegrationsHealth) // System alerts (in-dashboard notifications) mux.HandleFunc(p("/api/alerts"), handler.SystemAlerts) diff --git a/internal/web/static/app.js b/internal/web/static/app.js index 11a79b2..4ab25b1 100644 --- a/internal/web/static/app.js +++ b/internal/web/static/app.js @@ -48,6 +48,8 @@ async function authFetch(url, options) { function getCurrentProvider() { const bothView = document.getElementById('both-view') || document.getElementById('all-providers-container'); if (bothView) return 'both'; + const apiIntegrationsDashboard = document.getElementById('api-integrations-dashboard'); + if (apiIntegrationsDashboard) return 'api-integrations'; const anthropicGrid = document.getElementById('quota-grid-anthropic'); if (anthropicGrid) return 'anthropic'; const copilotGrid = document.getElementById('quota-grid-copilot'); @@ -81,7 +83,7 @@ function providerParam() { } function shouldShowHistoryTables(provider = getCurrentProvider()) { - return provider !== 'both'; + return provider !== 'both' && provider !== 'api-integrations'; } function getBothViewProviders() { @@ -161,6 +163,12 @@ const State = { cyclesRequestSeq: 0, sessionsRequestSeq: 0, overviewRequestSeq: 0, + apiIntegrationsCurrent: null, + apiIntegrationsHistory: null, + apiIntegrationsHealth: null, + apiIntegrationsVisibility: { dashboard: true }, + apiIntegrationsSelectedMetric: 'tokenPerCall', + apiIntegrationsActiveWindow: '8d', }; // ── Persistence ── @@ -540,6 +548,11 @@ async function loadHiddenInsights() { } else { State.providerVisibility = {}; } + if (data.api_integrations_visibility && typeof data.api_integrations_visibility === 'object') { + State.apiIntegrationsVisibility = data.api_integrations_visibility; + } else { + State.apiIntegrationsVisibility = { dashboard: true }; + } if (getCurrentProvider() === 'both' && (State.allProvidersCurrent || State.allProvidersInsights || State.allProvidersHistory)) { renderAllProvidersView(); @@ -598,6 +611,75 @@ function saveDefaultProvider(provider) { } } +const apiIntegrationsMetricOptions = new Set([ + 'tokenPerCall', + 'requestCount', + 'accumulatedTokens', + 'totalCostUsd', + 'totalTokens', +]); + +function normalizeAPIIntegrationsMetric(metric) { + const value = String(metric || '').trim(); + return apiIntegrationsMetricOptions.has(value) ? value : 'tokenPerCall'; +} + +function loadAPIIntegrationsPreferences() { + try { + const metric = localStorage.getItem('onwatch-api-integrations-metric'); + State.apiIntegrationsSelectedMetric = normalizeAPIIntegrationsMetric(metric); + const activeWindow = localStorage.getItem('onwatch-api-integrations-active-window'); + if (activeWindow) { + State.apiIntegrationsActiveWindow = activeWindow; + } + } catch (e) { + // silent + } +} + +function saveAPIIntegrationsMetric(metric) { + try { + localStorage.setItem('onwatch-api-integrations-metric', normalizeAPIIntegrationsMetric(metric)); + } catch (e) { + // silent + } +} + +function saveAPIIntegrationsActiveWindow(value) { + try { + localStorage.setItem('onwatch-api-integrations-active-window', value); + } catch (e) { + // silent + } +} + +function formatBytes(value) { + const bytes = Number(value || 0); + if (!Number.isFinite(bytes) || bytes <= 0) return '0 B'; + const units = ['B', 'KB', 'MB', 'GB']; + let size = bytes; + let unit = 0; + while (size >= 1024 && unit < units.length - 1) { + size /= 1024; + unit += 1; + } + return `${size >= 10 || unit === 0 ? Math.round(size) : size.toFixed(1)} ${units[unit]}`; +} + +function parseAPIIntegrationsWindow(value = State.apiIntegrationsActiveWindow) { + switch (value) { + case '24h': + return 24 * 60 * 60 * 1000; + case '3d': + return 3 * 24 * 60 * 60 * 1000; + case '30d': + return 30 * 24 * 60 * 60 * 1000; + case '8d': + default: + return 8 * 24 * 60 * 60 * 1000; + } +} + function toggleQuotaVisibility(quotaType) { if (State.hiddenQuotas.has(quotaType)) { State.hiddenQuotas.delete(quotaType); @@ -2629,6 +2711,15 @@ function formatNumber(num) { return num.toLocaleString('en-US', { maximumFractionDigits: 1 }); } +function formatCurrencyUSD(num) { + return new Intl.NumberFormat('en-US', { + style: 'currency', + currency: 'USD', + minimumFractionDigits: num < 1 ? 4 : 2, + maximumFractionDigits: num < 1 ? 4 : 2 + }).format(num || 0); +} + function formatDateTime(isoString) { const d = new Date(isoString); const opts = { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit' }; @@ -3096,10 +3187,50 @@ async function fetchCurrent() { State.currentRequestSeq = requestSeq; try { + if (requestProvider === 'api-integrations') { + const [currentRes, healthRes] = await Promise.all([ + authFetch(`${API_BASE}/api/api-integrations/current`), + authFetch(`${API_BASE}/api/api-integrations/health`) + ]); + if (!currentRes.ok || !healthRes.ok) throw new Error('Failed to fetch API integrations'); + const [currentData, healthData] = await Promise.all([currentRes.json(), healthRes.json()]); + + requestAnimationFrame(() => { + if (State.currentRequestSeq !== requestSeq) return; + if (getCurrentProvider() !== requestProvider) return; + State.apiIntegrationsCurrent = currentData; + State.apiIntegrationsHealth = healthData; + renderAPIIntegrationsCards(); + renderAPIIntegrationsHealth(); + renderAPIIntegrationsInsights(); + + const lastUpdated = document.getElementById('last-updated'); + if (lastUpdated) lastUpdated.textContent = `Last updated: ${new Date().toLocaleTimeString()}`; + const statusDot = document.getElementById('status-dot'); + if (statusDot) statusDot.classList.remove('stale'); + }); + return; + } + const res = await authFetch(`${API_BASE}/api/current?${providerParam()}`); if (!res.ok) throw new Error('Failed to fetch'); const data = await res.json(); + let apiIntegrationsCurrentData = null; + let apiIntegrationsHealthData = null; + if (requestProvider === 'both' && State.apiIntegrationsVisibility?.dashboard !== false) { + try { + const [apiIntegrationsCurrentRes, apiIntegrationsHealthRes] = await Promise.all([ + authFetch(`${API_BASE}/api/api-integrations/current`), + authFetch(`${API_BASE}/api/api-integrations/health`) + ]); + if (apiIntegrationsCurrentRes.ok) apiIntegrationsCurrentData = await apiIntegrationsCurrentRes.json(); + if (apiIntegrationsHealthRes.ok) apiIntegrationsHealthData = await apiIntegrationsHealthRes.json(); + } catch (e) { + // silent - API integrations summary should not break all-provider current load + } + } + requestAnimationFrame(() => { if (State.currentRequestSeq !== requestSeq) return; if (getCurrentProvider() !== requestProvider) return; @@ -3107,6 +3238,11 @@ async function fetchCurrent() { const provider = requestProvider; if (provider === 'both') { + if (apiIntegrationsCurrentData || apiIntegrationsHealthData) { + data.apiIntegrations = { current: apiIntegrationsCurrentData || {}, health: apiIntegrationsHealthData || null }; + State.apiIntegrationsCurrent = apiIntegrationsCurrentData || {}; + State.apiIntegrationsHealth = apiIntegrationsHealthData || null; + } State.allProvidersCurrent = data; renderAllProvidersView(); } else if (provider === 'copilot') { @@ -3341,6 +3477,10 @@ const insightIcons = { async function fetchDeepInsights() { const provider = getCurrentProvider(); + if (provider === 'api-integrations') { + renderAPIIntegrationsInsights(); + return; + } const requestProvider = provider; const requestAccount = requestProvider === 'codex' ? State.codexAccount : null; const requestRange = State.insightsRange; @@ -3409,6 +3549,147 @@ async function fetchDeepInsights() { } } +function ensureAPIIntegrationsInsightsControls() { + const header = document.querySelector('#api-integrations-recent-insights-panel .section-header'); + if (!header || header.querySelector('.api-integrations-insights-controls')) return; + + const controls = document.createElement('div'); + controls.className = 'api-integrations-insights-controls'; + controls.innerHTML = ` + Active Window + + `; + const select = controls.querySelector('#api-integrations-active-window-select'); + if (select) { + select.value = State.apiIntegrationsActiveWindow || '8d'; + select.addEventListener('change', () => { + State.apiIntegrationsActiveWindow = select.value || '8d'; + saveAPIIntegrationsActiveWindow(State.apiIntegrationsActiveWindow); + renderAPIIntegrationsInsights(); + }); + } + header.appendChild(controls); +} + +function renderAPIIntegrationsInsights() { + const allTimeEl = document.getElementById('api-integrations-all-time-stats'); + const recentEl = document.getElementById('api-integrations-recent-stats'); + if (!allTimeEl || !recentEl || getCurrentProvider() !== 'api-integrations') return; + + ensureAPIIntegrationsInsightsControls(); + + const entries = getAPIIntegrationEntries(); + const history = State.apiIntegrationsHistory || {}; + if (entries.length === 0) { + allTimeEl.innerHTML = '

Run your integrations to populate all-time totals here.

'; + recentEl.innerHTML = '

Recent activity appears here after data is ingested.

'; + return; + } + + const now = Date.now(); + const activeWindowMs = parseAPIIntegrationsWindow(); + const activeThreshold = now - activeWindowMs; + + const totals = entries.reduce((acc, entry) => { + acc.inputTokens += Number(entry.promptTokens || 0); + acc.outputTokens += Number(entry.completionTokens || 0); + acc.totalTokens += Number(entry.totalTokens || 0); + acc.requestCount += Number(entry.requestCount || 0); + const lastCapturedAt = entry.lastCapturedAt ? Date.parse(entry.lastCapturedAt) : NaN; + if (Number.isFinite(lastCapturedAt) && lastCapturedAt >= activeThreshold) { + acc.activeIntegrations += 1; + } + return acc; + }, { + inputTokens: 0, + outputTokens: 0, + totalTokens: 0, + requestCount: 0, + activeIntegrations: 0, + }); + + let recentInputTokens = 0; + let recentOutputTokens = 0; + let recentRequestCount = 0; + let recentWindowTokens = 0; + let firstHalfTokens = 0; + let secondHalfTokens = 0; + let busiestRecentIntegration = null; + let busiestRecentTokens = -1; + Object.entries(history).forEach(([integrationName, rows]) => { + const typedRows = Array.isArray(rows) ? rows : []; + if (typedRows.length === 0) return; + const halfIndex = Math.ceil(typedRows.length / 2); + let integrationRecentTokens = 0; + typedRows.forEach((row, index) => { + const value = Number(row.totalTokens || 0); + const inputValue = Number(row.promptTokens || 0); + const outputValue = Number(row.completionTokens || 0); + const requestValue = Number(row.requestCount || 0); + recentWindowTokens += value; + recentInputTokens += inputValue; + recentOutputTokens += outputValue; + recentRequestCount += requestValue; + integrationRecentTokens += value; + if (index < halfIndex) { + firstHalfTokens += value; + } else { + secondHalfTokens += value; + } + }); + if (integrationRecentTokens > busiestRecentTokens) { + busiestRecentTokens = integrationRecentTokens; + busiestRecentIntegration = integrationName; + } + }); + + const totalProviders = new Set(entries.flatMap((entry) => + (Array.isArray(entry.providers) ? entry.providers : []).map((provider) => provider.provider).filter(Boolean) + )); + const avgTokensPerCall = totals.requestCount > 0 ? totals.totalTokens / totals.requestCount : 0; + const trendDelta = secondHalfTokens - firstHalfTokens; + const trendPct = firstHalfTokens > 0 ? (trendDelta / firstHalfTokens) * 100 : 0; + const recentAvgTokensPerCall = recentRequestCount > 0 ? recentWindowTokens / recentRequestCount : 0; + + allTimeEl.innerHTML = [ + { label: 'Tracked Integrations', value: formatNumber(entries.length), sublabel: 'Integrations seen since records started' }, + { label: 'Providers', value: formatNumber(totalProviders.size), sublabel: 'Distinct providers across all integrations' }, + { label: 'Total Tokens', value: formatNumber(totals.totalTokens), sublabel: 'Accumulated token volume' }, + { label: 'Input Tokens', value: formatNumber(totals.inputTokens), sublabel: 'Prompt-side tokens across all time' }, + { label: 'Output Tokens', value: formatNumber(totals.outputTokens), sublabel: 'Completion-side tokens across all time' }, + { label: 'API Calls', value: formatNumber(totals.requestCount), sublabel: 'Recorded requests since this dataset started' }, + { label: 'Average Tokens per Call', value: avgTokensPerCall > 0 ? formatNumber(avgTokensPerCall.toFixed(1)) : '0.0', sublabel: 'Average request size across all recorded calls' }, + ].map((stat) => ` +
+
${stat.value}
+
${stat.label}
+
${stat.sublabel}
+
+ `).join(''); + + recentEl.innerHTML = [ + { label: `Active Integrations (${State.apiIntegrationsActiveWindow})`, value: formatNumber(totals.activeIntegrations), sublabel: 'Integrations used inside the active window' }, + { label: 'Tokens in Visible Range', value: formatNumber(recentWindowTokens), sublabel: 'Total token volume in the selected chart range' }, + { label: 'Input Tokens in Range', value: formatNumber(recentInputTokens), sublabel: 'Prompt-side tokens in the selected range' }, + { label: 'Output Tokens in Range', value: formatNumber(recentOutputTokens), sublabel: 'Completion-side tokens in the selected range' }, + { label: 'API Calls in Range', value: formatNumber(recentRequestCount), sublabel: 'Recorded requests in the selected chart range' }, + { label: 'Usage Change vs Earlier Half', value: `${trendDelta >= 0 ? '+' : '-'}${Math.abs(trendPct).toFixed(1)}%`, sublabel: `Compared with the earlier half of the visible window (${trendDelta >= 0 ? 'up' : 'down'} ${formatNumber(Math.abs(trendDelta))} tokens)` }, + { label: 'Average Tokens per Call in Range', value: recentAvgTokensPerCall > 0 ? formatNumber(recentAvgTokensPerCall.toFixed(1)) : '0.0', sublabel: 'Average request size inside the visible window' }, + { label: 'Busiest Integration in Range', value: busiestRecentIntegration ? escapeHTML(busiestRecentIntegration) : '--', sublabel: busiestRecentTokens > 0 ? `${formatNumber(busiestRecentTokens)} tokens in the selected range` : 'Waiting for more recent activity' }, + ].map((stat) => ` +
+
${stat.value}
+
${stat.label}
+
${stat.sublabel}
+
+ `).join(''); +} + function renderInsightsRangePills() { const header = document.querySelector('.insights-panel .section-header'); if (!header || header.querySelector('.insights-range-selector')) return; @@ -3635,7 +3916,7 @@ const crosshairPlugin = { // ── Chart Init & Update ── -function computeYMax(datasets, chart) { +function computeYMax(datasets, chart, options = {}) { // Filter out hidden datasets - check both ds.hidden and chart metadata visibility const visibleDatasets = datasets.filter((ds, i) => { if (ds.hidden) return false; @@ -3643,6 +3924,8 @@ function computeYMax(datasets, chart) { return ds.data && ds.data.length > 0; }); + const cap = options.cap === false ? Number.POSITIVE_INFINITY : 100; + // If no visible datasets, return default 10% if (visibleDatasets.length === 0) return 10; @@ -3662,7 +3945,7 @@ function computeYMax(datasets, chart) { // Add 30% headroom above the max value for better visualization // Round up to nearest 5 for cleaner axis labels const paddedMax = maxVal * 1.3; - const yMax = Math.min(Math.max(Math.ceil(paddedMax / 5) * 5, 10), 100); + const yMax = Math.min(Math.max(Math.ceil(paddedMax / 5) * 5, 10), cap); return yMax; } @@ -3679,7 +3962,9 @@ function initChart() { // Map dataset indices to quota types for visibility toggle const provider = getCurrentProvider(); let defaultDatasets; - if (provider === 'antigravity') { + if (provider === 'api-integrations') { + defaultDatasets = []; + } else if (provider === 'antigravity') { defaultDatasets = []; // Antigravity datasets are dynamic - populated when history data arrives } else if (provider === 'minimax') { defaultDatasets = []; // MiniMax datasets are dynamic - populated when history data arrives @@ -3708,8 +3993,11 @@ function initChart() { ? [] : provider === 'openrouter' ? [] + : provider === 'api-integrations' + ? [] : ['subscription', 'search', 'toolCalls']; + const isAPIIntegrations = provider === 'api-integrations'; State.chart = new Chart(ctx, { type: 'line', data: { @@ -3749,6 +4037,17 @@ function initChart() { usePointStyle: true, callbacks: { label: function(ctx) { + if (ctx.parsed.y == null) return null; + if (isAPIIntegrations) { + const metric = State.apiIntegrationsSelectedMetric || 'tokenPerCall'; + if (metric === 'totalCostUsd') { + return `${ctx.dataset.label}: ${formatCurrencyUSD(Number(ctx.parsed.y || 0))}`; + } + if (metric === 'tokenPerCall') { + return `${ctx.dataset.label}: ${formatNumber(Number(ctx.parsed.y || 0).toFixed(1))} tokens/call`; + } + return `${ctx.dataset.label}: ${formatNumber(Number(ctx.parsed.y || 0))}`; + } return `${ctx.dataset.label}: ${ctx.parsed.y.toFixed(1)}%`; } } @@ -3761,13 +4060,36 @@ function initChart() { grid: { color: colors.grid, drawBorder: false }, ticks: { color: colors.text, maxTicksLimit: 6, source: 'auto' } }, - y: { grid: { color: colors.grid, drawBorder: false }, ticks: { color: colors.text, callback: v => v + '%' }, min: 0, max: State.chartYMax } + y: { + grid: { color: colors.grid, drawBorder: false }, + ticks: { + color: colors.text, + callback: v => isAPIIntegrations + ? ((State.apiIntegrationsSelectedMetric || 'tokenPerCall') === 'totalCostUsd' + ? formatCurrencyUSD(Number(v || 0)) + : ((State.apiIntegrationsSelectedMetric || 'tokenPerCall') === 'tokenPerCall' + ? formatNumber(Number(v || 0).toFixed(1)) + : formatNumber(Number(v || 0)))) + : v + '%' + }, + title: { + display: isAPIIntegrations, + text: isAPIIntegrations ? 'Tokens per Call' : '', + color: colors.text, + }, + min: 0, + max: State.chartYMax + } } } }); } function updateChartTheme() { + if (getCurrentProvider() === 'api-integrations') { + fetchHistory(State.currentRange || '6h'); + return; + } if (getCurrentProvider() === 'both') { // Re-render both-mode provider cards so Chart.js picks up updated theme tokens. if (State.allProvidersCurrent || State.allProvidersInsights || State.allProvidersHistory) { @@ -3817,10 +4139,37 @@ async function fetchHistory(range) { State.historyRequestSeq = requestSeq; try { + if (requestProvider === 'api-integrations') { + const res = await authFetch(`${API_BASE}/api/api-integrations/history?range=${range}`); + if (!res.ok) throw new Error('Failed to fetch API integrations history'); + const data = await res.json(); + + if (State.historyRequestSeq !== requestSeq) return; + if (getCurrentProvider() !== requestProvider) return; + if (State.currentRange !== requestRange) return; + + State.apiIntegrationsHistory = data; + renderAPIIntegrationsChart(range); + renderAPIIntegrationsInsights(); + return; + } + const res = await authFetch(`${API_BASE}/api/history?range=${range}&${providerParam()}`); if (!res.ok) throw new Error('Failed to fetch history'); const data = await res.json(); + let apiIntegrationsHistoryData = null; + if (requestProvider === 'both' && State.apiIntegrationsVisibility?.dashboard !== false) { + try { + const apiIntegrationsRes = await authFetch(`${API_BASE}/api/api-integrations/history?range=${range}`); + if (apiIntegrationsRes.ok) { + apiIntegrationsHistoryData = await apiIntegrationsRes.json(); + } + } catch (e) { + // silent - API integrations summary should not break all-provider history load + } + } + if (State.historyRequestSeq !== requestSeq) return; if (getCurrentProvider() !== requestProvider) return; if (requestProvider === 'codex' && State.codexAccount !== requestAccount) return; @@ -3829,6 +4178,10 @@ async function fetchHistory(range) { const provider = requestProvider; if (provider === 'both') { + if (apiIntegrationsHistoryData) { + data.apiIntegrations = apiIntegrationsHistoryData; + State.apiIntegrationsHistory = apiIntegrationsHistoryData; + } State.allProvidersHistory = data; renderAllProvidersView(); return; @@ -4148,6 +4501,7 @@ const bothProviderNames = { antigravity: 'Antigravity', minimax: 'MiniMax', gemini: 'Gemini', + 'api-integrations': 'API Integrations', }; function escapeHTML(value) { @@ -4338,6 +4692,28 @@ function buildAllProviderEntries() { const entries = []; const addProviderEntry = (provider) => { + if (provider === 'api-integrations') { + const payload = current.apiIntegrations; + if (!payload || State.apiIntegrationsVisibility?.dashboard === false) return; + const summaryCurrent = payload.current && typeof payload.current === 'object' ? payload.current : {}; + const integrationEntries = Object.entries(summaryCurrent); + const summary = integrationEntries.reduce((acc, [, integration]) => { + acc.integrationCount++; + acc.requestCount += Number(integration.requestCount || 0); + acc.totalTokens += Number(integration.totalTokens || 0); + return acc; + }, { integrationCount: 0, requestCount: 0, totalTokens: 0 }); + entries.push({ + provider: 'api-integrations', + cardKey: sanitizeProviderCardKey('api-integrations-summary'), + title: 'API Integrations', + summary, + health: payload.health || null, + summaryOnly: true, + }); + return; + } + if (provider === 'codex') { const currentAccounts = Array.isArray(current.codexAccounts) ? current.codexAccounts @@ -4514,6 +4890,32 @@ function compactInsightText(text, maxLength = 84) { return `${candidate.slice(0, maxLength - 3).trimEnd()}...`; } +function renderAPIIntegrationsSummaryCard(entry, collapsed) { + const summary = entry.summary || {}; + const statusMeta = getAPIIntegrationsStatusMeta(entry.health); + return `
+
+
+ ${escapeHTML(entry.title)} + ${statusMeta.label} +
+ +
+
+
+
Tracked Integrations:${formatNumber(Number(summary.integrationCount || 0))}
+
Requests:${formatNumber(Number(summary.requestCount || 0))}
+
Tokens:${formatNumber(Number(summary.totalTokens || 0))}
+
Status:${statusMeta.label}
+
+
+
`; +} + function getSingleViewInsightStats(provider, stats) { if (provider !== 'minimax' && provider !== 'gemini') return stats; return sortItemsByPreference( @@ -4581,6 +4983,257 @@ function renderProviderInsightsHTML(provider, payload) { return items.join(''); } +const apiIntegrationsChartColorFallback = [ + { border: '#0D9488', bg: 'rgba(13, 148, 136, 0.06)' }, + { border: '#F59E0B', bg: 'rgba(245, 158, 11, 0.06)' }, + { border: '#3B82F6', bg: 'rgba(59, 130, 246, 0.06)' }, + { border: '#EF4444', bg: 'rgba(239, 68, 68, 0.06)' }, + { border: '#8B5CF6', bg: 'rgba(139, 92, 246, 0.06)' }, + { border: '#10B981', bg: 'rgba(16, 185, 129, 0.06)' }, +]; + +function getAPIIntegrationEntries(current = State.apiIntegrationsCurrent) { + if (!current || typeof current !== 'object') return []; + return Object.entries(current) + .map(([integration, payload]) => ({ integration, ...(payload || {}) })) + .sort((a, b) => { + const totalDiff = Number(b.totalTokens || 0) - Number(a.totalTokens || 0); + if (totalDiff !== 0) return totalDiff; + return String(a.integration || '').localeCompare(String(b.integration || '')); + }); +} + +function getAPIIntegrationsHealthStatus(health = State.apiIntegrationsHealth) { + if (!health || health.enabled === false) return 'disabled'; + if (Array.isArray(health.alerts) && health.alerts.length > 0) return 'alert'; + if (health.running) return 'running'; + return 'idle'; +} + +function getAPIIntegrationsStatusMeta(health = State.apiIntegrationsHealth) { + const status = getAPIIntegrationsHealthStatus(health); + if (status === 'disabled') return { label: 'Disabled', badgeStatus: 'critical' }; + if (status === 'alert') return { label: 'Alert', badgeStatus: 'warning' }; + if (status === 'running') return { label: 'Running', badgeStatus: 'healthy' }; + return { label: 'Idle', badgeStatus: 'danger' }; +} + +function renderAPIIntegrationsCards() { + const container = document.getElementById('api-integrations-grid'); + if (!container) return; + + const entries = getAPIIntegrationEntries(); + if (entries.length === 0) { + container.innerHTML = '

No API integration usage yet.

'; + return; + } + + container.innerHTML = entries.map((entry) => { + const providers = Array.isArray(entry.providers) ? entry.providers : []; + const providerNames = providers.map(p => p.provider).filter(Boolean); + const providerSummary = providerNames.length > 2 + ? `${providerNames.slice(0, 2).join(', ')} +${providerNames.length - 2}` + : providerNames.join(', '); + const promptTokens = Number(entry.promptTokens || 0); + const completionTokens = Number(entry.completionTokens || 0); + return `
+
+
+

${escapeHTML(entry.integration)}

+
+ Providers: ${escapeHTML(providerSummary || 'No providers yet')} +
+
+ ${entry.lastCapturedAt ? escapeHTML(formatDateTime(entry.lastCapturedAt)) : '--'} +
+
+
Requests: ${formatNumber(Number(entry.requestCount || 0))}
+
Total Tokens: ${formatNumber(Number(entry.totalTokens || 0))}
+
Input / Output: ${formatNumber(promptTokens)} / ${formatNumber(completionTokens)}
+
Cost (where available): ${entry.totalCostUsd != null ? formatCurrencyUSD(Number(entry.totalCostUsd || 0)) : '--'}
+
+
`; + }).join(''); +} + +function renderAPIIntegrationsHealth() { + const summaryEl = document.getElementById('api-integrations-health-summary'); + const alertsEl = document.getElementById('api-integrations-health-alerts'); + const tbody = document.getElementById('api-integrations-health-tbody'); + if (!summaryEl || !alertsEl || !tbody) return; + + const health = State.apiIntegrationsHealth; + if (!health) { + summaryEl.innerHTML = '

Loading API integrations health...

'; + alertsEl.innerHTML = ''; + tbody.innerHTML = 'No API integration ingest state yet.'; + return; + } + + const statusMeta = getAPIIntegrationsStatusMeta(health); + summaryEl.innerHTML = ` +
+
Status: ${statusMeta.label}
+
Tracked Files: ${formatNumber((Array.isArray(health.files) ? health.files : []).length)}
+
Alerts: ${formatNumber((Array.isArray(health.alerts) ? health.alerts : []).length)}
+
+
+

Rotating files: Move or rename the active .jsonl file, then let your script create a new one. That starts a fresh source log for new events. Historical charts remain in the database until you clear or replace the stored onWatch data.

+
+ `; + + const alerts = Array.isArray(health.alerts) ? health.alerts : []; + alertsEl.innerHTML = alerts.length > 0 + ? alerts.slice(0, 3).map((alert) => ` +
+
+ ${escapeHTML(alert.title || 'Alert')} +
+
${escapeHTML(alert.message || '')}
+
+ `).join('') + : ''; + + const files = Array.isArray(health.files) ? health.files : []; + if (files.length === 0) { + tbody.innerHTML = 'No API integration ingest state yet.'; + return; + } + tbody.innerHTML = files.map((file) => ` + + ${escapeHTML(file.sourcePath || '--')} + ${formatBytes(Number(file.fileSize || 0))} + ${file.lastCapturedAt ? escapeHTML(formatDateTime(file.lastCapturedAt)) : '--'} + + `).join(''); +} + +function buildAPIIntegrationsChartDatasets(historyRows, range, metric) { + const integrationNames = Object.keys(historyRows || {}).sort((a, b) => { + const aTotal = (historyRows[a] || []).reduce((sum, row) => sum + Number(row.totalTokens || 0), 0); + const bTotal = (historyRows[b] || []).reduce((sum, row) => sum + Number(row.totalTokens || 0), 0); + if (bTotal !== aTotal) return bTotal - aTotal; + return a.localeCompare(b); + }); + + let colorIndex = 0; + return integrationNames.reduce((datasets, integrationName) => { + const rows = Array.isArray(historyRows[integrationName]) ? historyRows[integrationName] : []; + if (metric === 'totalCostUsd' && !rows.some((row) => row.totalCostUsd != null)) { + return datasets; + } + const integrationTotalTokens = Number(State.apiIntegrationsCurrent?.[integrationName]?.totalTokens || 0); + const visibleTotalTokens = rows.reduce((sum, row) => sum + Number(row.totalTokens || 0), 0); + const accumulatedBaseline = Math.max(0, integrationTotalTokens - visibleTotalTokens); + const color = apiIntegrationsChartColorFallback[colorIndex++ % apiIntegrationsChartColorFallback.length]; + let runningTotal = accumulatedBaseline; + const rawData = rows.map((row) => { + let value = 0; + if (metric === 'tokenPerCall') { + const requestCount = Number(row.requestCount || 0); + value = requestCount > 0 ? Number(row.totalTokens || 0) / requestCount : 0; + } else if (metric === 'accumulatedTokens') { + runningTotal += Number(row.totalTokens || 0); + value = runningTotal; + } else { + value = Number(row[metric] || 0); + } + return { + x: new Date(row.capturedAt), + y: value, + }; + }); + const processed = processDataWithGaps(rawData, range); + datasets.push({ + label: integrationName, + data: processed.data, + borderColor: color.border, + backgroundColor: color.bg, + fill: true, + tension: 0.4, + borderWidth: 2, + pointRadius: processed.pointRadii, + pointHoverRadius: 4, + spanGaps: true, + segment: getSegmentStyle(processed.gapSegments, color.border), + }); + return datasets; + }, []); +} + +function renderAPIIntegrationsChart(range = State.currentRange || '6h') { + if (!State.chart) initChart(); + if (!State.chart) return; + + const metric = normalizeAPIIntegrationsMetric(State.apiIntegrationsSelectedMetric); + State.apiIntegrationsSelectedMetric = metric; + const datasets = buildAPIIntegrationsChartDatasets(State.apiIntegrationsHistory || {}, range, metric); + State.chart.data.datasets = datasets; + updateTimeScale(State.chart, range); + State.chartYMax = computeYMax(State.chart.data.datasets, State.chart, { cap: false }); + State.chart.options.scales.y.max = State.chartYMax; + const yAxisTitles = { + tokenPerCall: 'Tokens per Call', + requestCount: 'API Calls', + accumulatedTokens: 'Accumulated Tokens', + totalCostUsd: 'Cost (USD)', + }; + const chartConfig = State.chart.config.options || {}; + const configScales = chartConfig.scales || {}; + const currentYScale = configScales.y || {}; + const currentYTitle = currentYScale.title || {}; + const currentYTicks = currentYScale.ticks || {}; + const configPlugins = chartConfig.plugins || {}; + const currentTooltip = configPlugins.tooltip || {}; + const currentTooltipCallbacks = currentTooltip.callbacks || {}; + + const tickFormatter = (value) => { + if (metric === 'totalCostUsd') return formatCurrencyUSD(Number(value || 0)); + if (metric === 'tokenPerCall') return formatNumber(Number(value || 0).toFixed(1)); + return formatNumber(Number(value || 0)); + }; + const tooltipLabelFormatter = (ctx) => { + if (ctx.parsed.y == null) return null; + if (metric === 'totalCostUsd') { + return `${ctx.dataset.label}: ${formatCurrencyUSD(Number(ctx.parsed.y || 0))}`; + } + if (metric === 'tokenPerCall') { + return `${ctx.dataset.label}: ${formatNumber(Number(ctx.parsed.y || 0).toFixed(1))} tokens/call`; + } + return `${ctx.dataset.label}: ${formatNumber(Number(ctx.parsed.y || 0))}`; + }; + + State.chart.config.options.scales = { + ...configScales, + y: { + ...currentYScale, + max: State.chartYMax, + title: { + ...currentYTitle, + display: true, + text: yAxisTitles[metric] || 'Value', + }, + ticks: { + ...currentYTicks, + callback: tickFormatter, + }, + }, + }; + + State.chart.config.options.plugins = { + ...configPlugins, + tooltip: { + ...currentTooltip, + callbacks: { + ...currentTooltipCallbacks, + label: tooltipLabelFormatter, + }, + }, + }; + + State.chart.update(); +} + function buildFixedDatasetsForRows(rows, range, configs) { const datasets = []; configs.forEach((cfg) => { @@ -4715,6 +5368,9 @@ function renderAllProvidersView() { : `

Collecting data...

`; + if (entry.summaryOnly) { + return renderAPIIntegrationsSummaryCard(entry, collapsed); + } return `
@@ -4753,6 +5409,14 @@ function renderAllProvidersView() { }); }); + container.querySelectorAll('.provider-card[data-api-integrations-link="true"]').forEach((card) => { + card.addEventListener('click', (event) => { + if (event.target.closest('.provider-card-collapse-btn')) return; + saveDefaultProvider('api-integrations'); + window.location.href = '/?provider=api-integrations'; + }); + }); + const chartRange = State.currentRange || '6h'; const colors = getThemeColors(); entries.forEach((entry) => { @@ -6287,6 +6951,23 @@ function setupRangeSelector() { }); } +function setupAPIIntegrationsMetricSelector() { + const select = document.getElementById('api-integrations-metric-select'); + if (!select) return; + const metric = normalizeAPIIntegrationsMetric(State.apiIntegrationsSelectedMetric); + State.apiIntegrationsSelectedMetric = metric; + if (![...select.options].some((option) => option.value === metric)) { + select.value = 'tokenPerCall'; + } else { + select.value = metric; + } + select.addEventListener('change', () => { + State.apiIntegrationsSelectedMetric = normalizeAPIIntegrationsMetric(select.value); + saveAPIIntegrationsMetric(State.apiIntegrationsSelectedMetric); + renderAPIIntegrationsChart(State.currentRange || '6h'); + }); +} + function setupCycleFilters() { // Range pills const rangePills = document.getElementById('cycle-range-pills'); @@ -7128,6 +7809,7 @@ async function loadSettings() { // Provider settings - store in State for modal use State.providerSettings = data.provider_settings || {}; + State.apiIntegrationsVisibility = data.api_integrations_visibility || { dashboard: true }; // Provider visibility + dynamic provider status await populateProviderToggles(data.provider_visibility || {}); @@ -7367,6 +8049,17 @@ async function populateProviderToggles(visibility) { ]; } + let apiIntegrationsHealth = null; + try { + const res = await authFetch(`${API_BASE}/api/api-integrations/health`); + if (res.ok) { + apiIntegrationsHealth = await res.json(); + State.apiIntegrationsHealth = apiIntegrationsHealth; + } + } catch (e) { + // silent - API integrations health should not block provider toggles + } + const providerByKey = new Map(providers.map(p => [p.key, p])); const codexStatus = providerByKey.get('codex') || null; const minimaxStatus = providerByKey.get('minimax') || null; @@ -7471,6 +8164,8 @@ async function populateProviderToggles(visibility) { isPolling: !!fallbackMinimax.isPolling })); } + + container.appendChild(createAPIIntegrationsToggleRow(State.apiIntegrationsVisibility || { dashboard: true }, apiIntegrationsHealth)); } async function fetchMenubarProviders() { @@ -7801,6 +8496,57 @@ function createProviderToggleRow({ key, name, desc, vis, configured, autoDetecta return row; } +function createAPIIntegrationsToggleRow(visibility, health) { + const row = document.createElement('div'); + row.className = 'settings-toggle-row settings-toggle-row-dual'; + const statusMeta = getAPIIntegrationsStatusMeta(health); + row.innerHTML = ` +
+
API Integrations ${statusMeta.label}
+
Local JSONL API telemetry tracking for your own automated integrations.
+
+
+
+
Dashboard
+
Show as a dedicated dashboard tab
+ +
+
+ `; + + row.querySelector('input[type="checkbox"]')?.addEventListener('change', async (event) => { + const input = event.target; + const enabled = input.checked; + const feedback = document.getElementById('settings-feedback'); + input.disabled = true; + try { + const res = await authFetch(`${API_BASE}/api/settings`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ api_integrations_visibility: { dashboard: enabled } }), + }); + const data = await res.json(); + if (!res.ok) { + input.checked = !enabled; + showSettingsFeedback(feedback, data.error || 'Failed to update API Integrations visibility.', 'error'); + return; + } + State.apiIntegrationsVisibility = data.api_integrations_visibility || { dashboard: enabled }; + showSettingsFeedback(feedback, `API Integrations dashboard ${enabled ? 'enabled' : 'disabled'}. Reload dashboard to apply tab visibility changes.`, 'success'); + } catch (e) { + input.checked = !enabled; + showSettingsFeedback(feedback, 'API Integrations visibility update failed.', 'error'); + } finally { + input.disabled = false; + } + }); + + return row; +} + // ── Provider Settings Modal ── // Configuration for each provider's settings fields. @@ -8363,12 +9109,17 @@ function gatherSettings() { toggles.forEach(t => { const prov = t.dataset.provider; const role = t.dataset.role; + if (prov === 'api-integrations' || role === 'api-integrations-dashboard') return; if (!vis[prov]) vis[prov] = {}; vis[prov][role] = t.checked; }); settings.provider_visibility = vis; } + settings.api_integrations_visibility = { + dashboard: State.apiIntegrationsVisibility?.dashboard !== false, + }; + // Timezone const tzSelect = document.getElementById('settings-timezone'); if (tzSelect) { @@ -8437,6 +9188,8 @@ function setupSettingsSave() { if (!resp.ok) { showSettingsFeedback(feedback, data.error || 'Failed to save settings.', 'error'); } else { + if (data.provider_visibility) State.providerVisibility = data.provider_visibility; + if (data.api_integrations_visibility) State.apiIntegrationsVisibility = data.api_integrations_visibility; showSettingsFeedback(feedback, 'Settings saved successfully.', 'success'); } } catch (e) { @@ -9139,12 +9892,14 @@ document.addEventListener('DOMContentLoaded', async () => { updateMiniMaxAccountTabsVisibility(); } initMiniMaxAccountTabs(); + loadAPIIntegrationsPreferences(); initTheme(); initLayoutToggle(); initTimezoneBadge(); setupProviderSelector(); setupRangeSelector(); + setupAPIIntegrationsMetricSelector(); setupCycleFilters(); setupPasswordToggle(); setupTableControls(); diff --git a/internal/web/static/style.css b/internal/web/static/style.css index 27d375b..e074eb3 100644 --- a/internal/web/static/style.css +++ b/internal/web/static/style.css @@ -723,6 +723,162 @@ button, input, select { font-family: inherit; } font-size: 12px; } +.chart-controls { + display: flex; + align-items: center; + gap: 12px; + flex-wrap: wrap; +} + +.api-integrations-insights-panels { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: var(--layout-gap); +} + +.api-integrations-card { + cursor: default; + animation: none; + opacity: 1; +} + +.api-integrations-card:hover { + transform: none; +} + +.api-integrations-card-stats, +.api-integrations-summary-grid, +.api-integrations-health-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); + gap: 12px; +} + +.api-integrations-header-meta { + display: flex; + flex-wrap: wrap; + gap: 8px; + align-items: center; + margin-top: 6px; +} + +.api-integrations-provider-pill { + display: inline-flex; + align-items: center; + gap: 6px; + padding: 4px 10px; + border-radius: 999px; + background: color-mix(in srgb, var(--accent-teal) 10%, var(--surface-card-alt)); + border: 1px solid color-mix(in srgb, var(--accent-teal) 18%, var(--border-light)); + color: var(--text-secondary); + font-size: 12px; +} + +.api-integrations-provider-pill strong { + color: var(--text-primary); +} + +.api-integrations-stat, +.api-integrations-health-item { + display: flex; + flex-direction: column; + gap: 6px; + padding: 10px 12px; + background: var(--surface-card-alt); + border: 1px solid var(--border-light); + border-radius: var(--radius-sm); +} + +.api-integrations-stat-label, +.api-integrations-health-label { + font-size: 11px; + font-weight: 700; + text-transform: uppercase; + letter-spacing: 0.06em; + color: var(--text-muted); +} + +.api-integrations-stat-value, +.api-integrations-health-value { + font-size: 16px; + font-weight: 700; + color: var(--text-primary); +} + +.api-integrations-health-summary { + margin-bottom: 16px; +} + +.api-integrations-health-alerts { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 12px; + margin-bottom: 16px; +} + +.api-integrations-health-copy { + display: grid; + gap: 12px; + margin-bottom: 16px; +} + +.api-integrations-health-copy p { + margin: 0; + color: var(--text-secondary); + line-height: 1.5; +} + +.api-integrations-health-copy code { + font-family: var(--font-mono); + font-size: 12px; +} + +.api-integrations-insights-controls { + display: flex; + align-items: center; + gap: 10px; + flex-wrap: wrap; +} + +.api-integrations-insights-controls-label { + font-size: 12px; + font-weight: 700; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.api-integrations-insight-list { + display: grid; + gap: 8px; + margin-top: 14px; + padding-top: 12px; + border-top: 1px solid var(--border-light); +} + +.api-integrations-insight-row { + display: flex; + align-items: baseline; + justify-content: space-between; + gap: 12px; +} + +.api-integrations-insight-label { + font-size: 12px; + color: var(--text-secondary); +} + +.api-integrations-insight-value { + font-size: 13px; + font-weight: 700; + color: var(--text-primary); + text-align: right; +} + +.api-integrations-summary-card { + cursor: pointer; +} + /* Both View - Multi-Column Layout */ .both-view { display: grid; @@ -2166,6 +2322,7 @@ button, input, select { font-family: inherit; } .provider-card-header { padding: 14px 16px; } .provider-card-body { padding: 16px; } .provider-kpis { grid-template-columns: 1fr; } + .api-integrations-card-stats, .api-integrations-summary-grid, .api-integrations-health-grid { grid-template-columns: 1fr; } .provider-chart canvas { height: 170px !important; max-height: 170px; } .quota-card { padding: 18px; } .range-selector { width: 100%; } @@ -2181,6 +2338,7 @@ button, input, select { font-family: inherit; } .filter-pill { flex: 1; text-align: center; padding: 5px 6px; font-size: 11px; } .insights-stats { grid-template-columns: repeat(2, 1fr); } .insights-cards { grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); } + .api-integrations-insights-panels { grid-template-columns: 1fr; } .welcome-title { font-size: 22px; } .both-view { grid-template-columns: 1fr; gap: 16px; } .chart-container.both-charts { grid-template-columns: 1fr; } @@ -2197,6 +2355,7 @@ button, input, select { font-family: inherit; } .provider-card-body { padding: 14px; } .provider-card-title { font-size: 14px; } .provider-card-badge { font-size: 10px; } + .chart-controls { width: 100%; } .provider-chart canvas { height: 160px !important; max-height: 160px; } .quota-card { padding: 16px; border-radius: var(--radius-md); } .usage-percent { font-size: 26px; } diff --git a/internal/web/templates/dashboard.html b/internal/web/templates/dashboard.html index f0d1199..94d2ec0 100644 --- a/internal/web/templates/dashboard.html +++ b/internal/web/templates/dashboard.html @@ -12,7 +12,7 @@ {{range .Providers}} {{end}}
@@ -117,6 +117,10 @@

Dashboard

{{else if eq .CurrentProvider "gemini"}}
+ {{else if eq .CurrentProvider "api-integrations"}} +
+
+
{{else}}
{{if eq .CurrentProvider "zai"}} @@ -309,7 +313,39 @@

{{end}} - {{if ne .CurrentProvider "both"}} + {{if eq .CurrentProvider "api-integrations"}} +
+
+
+

+ + + + + All Time Statistics +

+
+
+

Loading statistics...

+
+
+ +
+
+

+ + + + + Recent Usage Insights +

+
+
+

Loading statistics...

+
+
+
+ {{else if ne .CurrentProvider "both"}}

@@ -325,7 +361,9 @@

Loading insights...

+ {{end}} + {{if ne .CurrentProvider "both"}}

@@ -333,14 +371,24 @@

- Usage Graphs + {{if eq .CurrentProvider "api-integrations"}}API Integration Usage{{else}}Usage Graphs{{end}}

-
- - - - - +
+ {{if eq .CurrentProvider "api-integrations"}} + + {{end}} +
+ + + + + +
@@ -349,7 +397,81 @@

{{end}} - {{if ne .CurrentProvider "both"}} + {{if eq .CurrentProvider "api-integrations"}} +
+
+

+ + + + Ingest Health +

+
+
+

Loading API integrations health...

+
+
+
+ + + + + + + + + + + +
Source FileFile SizeLast Event
No API integration ingest state yet.
+
+
+ +
+
+

+ + + + + Setup Guide +

+
+
+
+
+
+ How Custom API Integrations Work + JSONL + Tail +
+
+
Your script makes a normal provider API call, extracts usage from the response, and appends one normalized JSON line into ~/.onwatch/api-integrations/.
onWatch tails those files and stores the telemetry in SQLite.
+
+
+
+
+ Quick Start + 4 Steps +
+
+
1. Add a wrapper function around your API call.
2. Write events into ~/.onwatch/api-integrations/.
3. Keep onWatch running.
4. Verify ingestion in the health panel and the chart above.
+
+
+
+
+ Reference Paths + Docs +
+
+
Setup doc: docs/API_INTEGRATIONS_SETUP.md.
Wrapper found in: examples/api_integrations/.
Supported .py examples include:
Anthropic, OpenAI, Mistral, OpenRouter, and Gemini.
+
+
+
+
+
+ {{end}} + + {{if and (ne .CurrentProvider "both") (ne .CurrentProvider "api-integrations")}}

diff --git a/main.go b/main.go index 3b29e65..c406c24 100644 --- a/main.go +++ b/main.go @@ -1010,6 +1010,11 @@ func run() error { geminiAg.SetClientCredentials(api.DetectGeminiClientCredentials()) } + var apiIntegrationsAg *agent.APIIntegrationsIngestAgent + if cfg.APIIntegrationsEnabled { + apiIntegrationsAg = agent.NewAPIIntegrationsIngestAgent(db, cfg.APIIntegrationsDir, cfg.APIIntegrationsRetention, logger) + } + // Create notification engine notifier := notify.New(db, logger) notifier.SetEncryptionKey(deriveEncryptionKey(cfg.AdminPassHash)) @@ -1245,6 +1250,9 @@ func run() error { if geminiAg != nil { agentMgr.RegisterFactory("gemini", func() (agent.AgentRunner, error) { return geminiAg, nil }) } + if apiIntegrationsAg != nil { + agentMgr.RegisterFactory("api_integrations", func() (agent.AgentRunner, error) { return apiIntegrationsAg, nil }) + } handler.SetAgentManager(agentMgr) if minimaxMgr != nil { handler.SetMiniMaxAgentManager(minimaxMgr) @@ -1276,6 +1284,11 @@ func run() error { continue } } + if apiIntegrationsAg != nil { + if err := agentMgr.Start("api_integrations"); err == nil { + startedAny = true + } + } if !startedAny { logger.Info("No agents configured") } diff --git a/root_coverage_test.go b/root_coverage_test.go index 2741f12..50c9076 100644 --- a/root_coverage_test.go +++ b/root_coverage_test.go @@ -584,18 +584,34 @@ func startOnwatchNCListener(t *testing.T, port int) *exec.Cmd { } }) + // Wait until the port is connectable. addr := fmt.Sprintf("127.0.0.1:%d", port) deadline := time.Now().Add(2 * time.Second) for time.Now().Before(deadline) { conn, err := net.DialTimeout("tcp", addr, 100*time.Millisecond) if err == nil { _ = conn.Close() + break + } + time.Sleep(20 * time.Millisecond) + } + if _, err := net.DialTimeout("tcp", addr, 100*time.Millisecond); err != nil { + t.Skip("nc listener did not open port in time") + return nil + } + + // Also wait until lsof can see the process - lsof can lag a few ms behind a + // freshly opened socket, causing the port-detection path in runStatus to miss it. + lsofDeadline := time.Now().Add(2 * time.Second) + for time.Now().Before(lsofDeadline) { + out, _ := exec.Command("lsof", "-ti", fmt.Sprintf("tcp:%d", port)).Output() + if len(strings.TrimSpace(string(out))) > 0 { return cmd } time.Sleep(20 * time.Millisecond) } - t.Skip("nc listener did not open port in time") + t.Skip("lsof did not detect nc listener in time") return nil }