diff --git a/crates/token_proxy_core/src/proxy/dashboard/mod.rs b/crates/token_proxy_core/src/proxy/dashboard/mod.rs index 650ab5a..d253aff 100644 --- a/crates/token_proxy_core/src/proxy/dashboard/mod.rs +++ b/crates/token_proxy_core/src/proxy/dashboard/mod.rs @@ -38,7 +38,16 @@ pub struct DashboardProviderStat { #[serde(rename_all = "camelCase")] pub struct DashboardUpstreamStat { pub upstream_id: String, - pub provider: String, + pub requests: u64, + pub total_tokens: u64, + pub cached_tokens: u64, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DashboardAccountStat { + pub upstream_id: String, + pub account_id: Option, pub requests: u64, pub total_tokens: u64, pub cached_tokens: u64, @@ -82,6 +91,7 @@ pub struct DashboardSnapshot { pub summary: DashboardSummary, pub providers: Vec, pub upstreams: Vec, + pub accounts: Vec, pub series: Vec, pub recent: Vec, /// 是否只基于日志文件末尾片段做统计(Step1:true;Step2 SQLite 后应为 false)。 @@ -93,26 +103,74 @@ pub async fn read_snapshot( range: DashboardRange, offset: Option, upstream_id: Option, + account_id: Option, + public_only: bool, ) -> Result { let offset = offset.unwrap_or(0); let from_ts_ms = range.from_ts_ms.map(|value| value as i64); let to_ts_ms = range.to_ts_ms.map(|value| value as i64); let upstream_id = upstream_id.as_deref(); - let bucket_ms = resolve_bucket_ms(&pool, from_ts_ms, to_ts_ms, upstream_id).await?; - - let summary = query_summary(&pool, from_ts_ms, to_ts_ms, upstream_id).await?; - let providers = query_providers(&pool, from_ts_ms, to_ts_ms, upstream_id).await?; + let account_id = account_id.as_deref(); + let bucket_ms = resolve_bucket_ms( + &pool, + from_ts_ms, + to_ts_ms, + upstream_id, + account_id, + public_only, + ) + .await?; + + let summary = query_summary( + &pool, + from_ts_ms, + to_ts_ms, + upstream_id, + account_id, + public_only, + ) + .await?; + let providers = query_providers( + &pool, + from_ts_ms, + to_ts_ms, + upstream_id, + account_id, + public_only, + ) + .await?; // 选项列表只受时间范围限制,切换筛选时仍可看到同一范围内的其它上游。 let upstreams = query_upstreams(&pool, from_ts_ms, to_ts_ms).await?; - let series = query_series(&pool, from_ts_ms, to_ts_ms, bucket_ms, upstream_id).await?; + // 账户选项跟随上游收窄,但不受当前账户筛选影响。 + let accounts = query_accounts(&pool, from_ts_ms, to_ts_ms, upstream_id).await?; + let series = query_series( + &pool, + from_ts_ms, + to_ts_ms, + bucket_ms, + upstream_id, + account_id, + public_only, + ) + .await?; let series = fill_series_buckets(series, from_ts_ms, to_ts_ms, bucket_ms); - let recent = query_recent(&pool, from_ts_ms, to_ts_ms, offset, upstream_id).await?; + let recent = query_recent( + &pool, + from_ts_ms, + to_ts_ms, + offset, + upstream_id, + account_id, + public_only, + ) + .await?; Ok(DashboardSnapshot { summary, providers, upstreams, + accounts, series, recent, truncated: false, @@ -124,6 +182,8 @@ async fn query_summary( from_ts_ms: Option, to_ts_ms: Option, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result { let row = sqlx::query( r#" @@ -143,12 +203,16 @@ SELECT FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) - AND (?3 IS NULL OR upstream_id = ?3); + AND (?3 IS NULL OR upstream_id = ?3) + AND (?4 IS NULL OR account_id = ?4) + AND (?5 = 0 OR account_id IS NULL); "#, ) .bind(from_ts_ms) .bind(to_ts_ms) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_one(pool) .await .map_err(|err| format!("Failed to query dashboard summary: {err}"))?; @@ -169,7 +233,15 @@ WHERE (?1 IS NULL OR ts_ms >= ?1) }; // 中位数查询:使用 LIMIT/OFFSET 取中间值 - let median_latency_ms = query_median_latency(pool, from_ts_ms, to_ts_ms, upstream_id).await?; + let median_latency_ms = query_median_latency( + pool, + from_ts_ms, + to_ts_ms, + upstream_id, + account_id, + public_only, + ) + .await?; Ok(DashboardSummary { total_requests, @@ -190,6 +262,8 @@ async fn query_median_latency( from_ts_ms: Option, to_ts_ms: Option, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result { // 单条 SQL 完成中位数计算: // - 使用 CTE 保证 count 和数据在同一快照内 @@ -202,6 +276,8 @@ WITH filtered AS ( WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) AND (?3 IS NULL OR upstream_id = ?3) + AND (?4 IS NULL OR account_id = ?4) + AND (?5 = 0 OR account_id IS NULL) ), cnt AS ( SELECT COUNT(*) AS n FROM filtered @@ -227,6 +303,8 @@ SELECT COALESCE( .bind(from_ts_ms) .bind(to_ts_ms) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_one(pool) .await .map_err(|err| format!("Failed to query median latency: {err}"))?; @@ -240,6 +318,8 @@ async fn query_providers( from_ts_ms: Option, to_ts_ms: Option, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result, String> { let providers = sqlx::query( r#" @@ -256,6 +336,8 @@ FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) AND (?3 IS NULL OR upstream_id = ?3) + AND (?4 IS NULL OR account_id = ?4) + AND (?5 = 0 OR account_id IS NULL) GROUP BY provider ORDER BY total_tokens DESC; "#, @@ -263,6 +345,8 @@ ORDER BY total_tokens DESC; .bind(from_ts_ms) .bind(to_ts_ms) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_all(pool) .await .map_err(|err| format!("Failed to query provider stats: {err}"))? @@ -293,7 +377,6 @@ async fn query_upstreams( r#" SELECT upstream_id, - provider, COUNT(*) AS requests, COALESCE(SUM(CASE WHEN total_tokens IS NOT NULL THEN total_tokens @@ -304,7 +387,7 @@ SELECT FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) -GROUP BY upstream_id, provider +GROUP BY upstream_id ORDER BY total_tokens DESC, requests DESC, upstream_id ASC; "#, ) @@ -316,13 +399,11 @@ ORDER BY total_tokens DESC, requests DESC, upstream_id ASC; .into_iter() .filter_map(|row| { let upstream_id: String = row.try_get("upstream_id").ok()?; - let provider: String = row.try_get("provider").ok()?; let requests: i64 = row.try_get("requests").ok()?; let total_tokens: i64 = row.try_get("total_tokens").ok()?; let cached_tokens: i64 = row.try_get("cached_tokens").ok()?; Some(DashboardUpstreamStat { upstream_id, - provider, requests: i64_to_u64(requests), total_tokens: i64_to_u64(total_tokens), cached_tokens: i64_to_u64(cached_tokens), @@ -333,12 +414,66 @@ ORDER BY total_tokens DESC, requests DESC, upstream_id ASC; Ok(upstreams) } +async fn query_accounts( + pool: &sqlx::SqlitePool, + from_ts_ms: Option, + to_ts_ms: Option, + upstream_id: Option<&str>, +) -> Result, String> { + let accounts = sqlx::query( + r#" +SELECT + upstream_id, + account_id, + COUNT(*) AS requests, + COALESCE(SUM(CASE + WHEN total_tokens IS NOT NULL THEN total_tokens + WHEN input_tokens IS NOT NULL OR output_tokens IS NOT NULL THEN COALESCE(input_tokens, 0) + COALESCE(output_tokens, 0) + ELSE 0 + END), 0) AS total_tokens, + COALESCE(SUM(COALESCE(cached_tokens, 0)), 0) AS cached_tokens +FROM request_logs +WHERE (?1 IS NULL OR ts_ms >= ?1) + AND (?2 IS NULL OR ts_ms <= ?2) + AND (?3 IS NULL OR upstream_id = ?3) +GROUP BY upstream_id, account_id +ORDER BY upstream_id ASC, account_id IS NULL DESC, requests DESC, account_id ASC; +"#, + ) + .bind(from_ts_ms) + .bind(to_ts_ms) + .bind(upstream_id) + .fetch_all(pool) + .await + .map_err(|err| format!("Failed to query dashboard accounts: {err}"))? + .into_iter() + .filter_map(|row| { + let upstream_id: String = row.try_get("upstream_id").ok()?; + let account_id: Option = row.try_get("account_id").ok()?; + let requests: i64 = row.try_get("requests").ok()?; + let total_tokens: i64 = row.try_get("total_tokens").ok()?; + let cached_tokens: i64 = row.try_get("cached_tokens").ok()?; + Some(DashboardAccountStat { + upstream_id, + account_id, + requests: i64_to_u64(requests), + total_tokens: i64_to_u64(total_tokens), + cached_tokens: i64_to_u64(cached_tokens), + }) + }) + .collect::>(); + + Ok(accounts) +} + async fn query_series( pool: &sqlx::SqlitePool, from_ts_ms: Option, to_ts_ms: Option, bucket_ms: u64, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result, String> { let series = sqlx::query( r#" @@ -358,6 +493,8 @@ FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) AND (?4 IS NULL OR upstream_id = ?4) + AND (?5 IS NULL OR account_id = ?5) + AND (?6 = 0 OR account_id IS NULL) GROUP BY bucket_ts_ms ORDER BY bucket_ts_ms ASC; "#, @@ -366,6 +503,8 @@ ORDER BY bucket_ts_ms ASC; .bind(to_ts_ms) .bind(i64::try_from(bucket_ms).unwrap_or(i64::MAX)) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_all(pool) .await .map_err(|err| format!("Failed to query dashboard series: {err}"))? @@ -476,6 +615,8 @@ async fn query_recent( to_ts_ms: Option, offset: u32, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result, String> { let recent = sqlx::query( r#" @@ -503,6 +644,8 @@ FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) AND (?5 IS NULL OR upstream_id = ?5) + AND (?6 IS NULL OR account_id = ?6) + AND (?7 = 0 OR account_id IS NULL) ORDER BY ts_ms DESC LIMIT ?3 OFFSET ?4; "#, @@ -512,6 +655,8 @@ LIMIT ?3 OFFSET ?4; .bind(i64::from(RECENT_PAGE_SIZE)) .bind(i64::from(offset)) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_all(pool) .await .map_err(|err| format!("Failed to query recent requests: {err}"))? @@ -560,6 +705,8 @@ async fn resolve_bucket_ms( from_ts_ms: Option, to_ts_ms: Option, upstream_id: Option<&str>, + account_id: Option<&str>, + public_only: bool, ) -> Result { if let (Some(from), Some(to)) = (from_ts_ms, to_ts_ms) { let span_ms = (to - from).max(0) as u64; @@ -574,12 +721,16 @@ SELECT FROM request_logs WHERE (?1 IS NULL OR ts_ms >= ?1) AND (?2 IS NULL OR ts_ms <= ?2) - AND (?3 IS NULL OR upstream_id = ?3); + AND (?3 IS NULL OR upstream_id = ?3) + AND (?4 IS NULL OR account_id = ?4) + AND (?5 = 0 OR account_id IS NULL); "#, ) .bind(from_ts_ms) .bind(to_ts_ms) .bind(upstream_id) + .bind(account_id) + .bind(public_only) .fetch_one(pool) .await .map_err(|err| format!("Failed to query dashboard range: {err}"))?; diff --git a/crates/token_proxy_core/src/proxy/dashboard/tests.rs b/crates/token_proxy_core/src/proxy/dashboard/tests.rs index 405fc60..a05de61 100644 --- a/crates/token_proxy_core/src/proxy/dashboard/tests.rs +++ b/crates/token_proxy_core/src/proxy/dashboard/tests.rs @@ -171,7 +171,9 @@ async fn insert_request( #[tokio::test] async fn median_latency_empty_table_returns_zero() { let pool = setup_test_db().await; - let result = query_median_latency(&pool, None, None, None).await.unwrap(); + let result = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!(result, 0, "Empty table should return 0"); } @@ -180,7 +182,9 @@ async fn median_latency_single_value() { let pool = setup_test_db().await; insert_latency(&pool, 100).await; - let result = query_median_latency(&pool, None, None, None).await.unwrap(); + let result = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!(result, 100, "Single value should be the median"); } @@ -192,7 +196,9 @@ async fn median_latency_odd_count() { insert_latency(&pool, 30).await; insert_latency(&pool, 20).await; - let result = query_median_latency(&pool, None, None, None).await.unwrap(); + let result = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!(result, 20, "Odd count median should be middle value"); } @@ -205,7 +211,9 @@ async fn median_latency_even_count() { insert_latency(&pool, 20).await; insert_latency(&pool, 30).await; - let result = query_median_latency(&pool, None, None, None).await.unwrap(); + let result = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!( result, 25, "Even count median should be average of two middle values" @@ -219,7 +227,9 @@ async fn median_latency_even_count_rounds_down() { insert_latency(&pool, 10).await; insert_latency(&pool, 21).await; - let result = query_median_latency(&pool, None, None, None).await.unwrap(); + let result = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!(result, 15, "Median should use integer division"); } @@ -250,18 +260,20 @@ async fn median_latency_with_time_range_filter() { .unwrap(); // 只查询 ts_ms 在 150-250 范围内的数据,应该只有 latency_ms=100 的记录 - let result = query_median_latency(&pool, Some(150), Some(250), None) + let result = query_median_latency(&pool, Some(150), Some(250), None, None, false) .await .unwrap(); assert_eq!(result, 100, "Should filter by time range"); // 查询所有数据,中位数应为 100 - let result_all = query_median_latency(&pool, None, None, None).await.unwrap(); + let result_all = query_median_latency(&pool, None, None, None, None, false) + .await + .unwrap(); assert_eq!(result_all, 100, "All data median should be 100"); } #[tokio::test] -async fn read_snapshot_filters_by_upstream_and_keeps_all_upstream_options() { +async fn read_snapshot_filters_by_upstream_and_keeps_merged_upstream_and_account_options() { let pool = setup_test_db().await; insert_request( &pool, @@ -277,6 +289,20 @@ async fn read_snapshot_filters_by_upstream_and_keeps_all_upstream_options() { 30, ) .await; + insert_request( + &pool, + 150, + "openai-response", + "alpha", + None, + 200, + Some(2), + Some(3), + None, + Some(1), + 40, + ) + .await; insert_request( &pool, 200, @@ -300,29 +326,32 @@ async fn read_snapshot_filters_by_upstream_and_keeps_all_upstream_options() { }, Some(0), Some(String::from("alpha")), + None, + false, ) .await .unwrap(); - assert_eq!(snapshot.summary.total_requests, 1); - assert_eq!(snapshot.summary.success_requests, 1); + assert_eq!(snapshot.summary.total_requests, 2); + assert_eq!(snapshot.summary.success_requests, 2); assert_eq!(snapshot.summary.error_requests, 0); - assert_eq!(snapshot.summary.total_tokens, 30); - assert_eq!(snapshot.summary.cached_tokens, 5); - assert_eq!(snapshot.summary.avg_latency_ms, 30); - assert_eq!(snapshot.summary.median_latency_ms, 30); + assert_eq!(snapshot.summary.total_tokens, 35); + assert_eq!(snapshot.summary.cached_tokens, 6); + assert_eq!(snapshot.summary.avg_latency_ms, 35); + assert_eq!(snapshot.summary.median_latency_ms, 35); - assert_eq!(snapshot.providers.len(), 1); + assert_eq!(snapshot.providers.len(), 2); assert_eq!(snapshot.providers[0].provider, "openai"); assert_eq!(snapshot.providers[0].requests, 1); - assert_eq!(snapshot.recent.len(), 1); + assert_eq!(snapshot.recent.len(), 2); assert_eq!(snapshot.recent[0].upstream_id, "alpha"); + assert_eq!(snapshot.recent[0].account_id, None); assert_eq!( - snapshot.recent[0].account_id.as_deref(), + snapshot.recent[1].account_id.as_deref(), Some("codex-a.json") ); - assert_eq!(snapshot.recent[0].output_tokens, Some(20)); + assert_eq!(snapshot.recent[1].output_tokens, Some(20)); assert!( snapshot .series @@ -336,9 +365,105 @@ async fn read_snapshot_filters_by_upstream_and_keeps_all_upstream_options() { assert!(snapshot .upstreams .iter() - .any(|item| item.upstream_id == "alpha" && item.provider == "openai")); + .any(|item| item.upstream_id == "alpha" && item.requests == 2)); assert!(snapshot .upstreams .iter() - .any(|item| item.upstream_id == "beta" && item.provider == "anthropic")); + .any(|item| item.upstream_id == "beta" && item.requests == 1)); + + assert_eq!(snapshot.accounts.len(), 2); + assert!(snapshot.accounts.iter().any(|item| { + item.upstream_id == "alpha" + && item.account_id.as_deref() == Some("codex-a.json") + && item.requests == 1 + })); + assert!(snapshot.accounts.iter().any(|item| { + item.upstream_id == "alpha" && item.account_id.is_none() && item.requests == 1 + })); +} + +#[tokio::test] +async fn read_snapshot_filters_by_account_and_public_requests() { + let pool = setup_test_db().await; + insert_request( + &pool, + 100, + "openai", + "alpha", + Some("codex-a.json"), + 200, + Some(10), + Some(20), + None, + Some(5), + 30, + ) + .await; + insert_request( + &pool, + 150, + "openai-response", + "alpha", + None, + 200, + Some(2), + Some(3), + None, + Some(1), + 40, + ) + .await; + insert_request( + &pool, + 200, + "anthropic", + "beta", + Some("claude-a.json"), + 200, + Some(3), + Some(4), + None, + Some(1), + 90, + ) + .await; + + let account_snapshot = read_snapshot( + &pool, + DashboardRange { + from_ts_ms: None, + to_ts_ms: None, + }, + Some(0), + Some(String::from("alpha")), + Some(String::from("codex-a.json")), + false, + ) + .await + .unwrap(); + + assert_eq!(account_snapshot.summary.total_requests, 1); + assert_eq!(account_snapshot.recent.len(), 1); + assert_eq!( + account_snapshot.recent[0].account_id.as_deref(), + Some("codex-a.json") + ); + + let public_snapshot = read_snapshot( + &pool, + DashboardRange { + from_ts_ms: None, + to_ts_ms: None, + }, + Some(0), + Some(String::from("alpha")), + None, + true, + ) + .await + .unwrap(); + + assert_eq!(public_snapshot.summary.total_requests, 1); + assert_eq!(public_snapshot.recent.len(), 1); + assert_eq!(public_snapshot.recent[0].account_id, None); } diff --git a/messages/en.json b/messages/en.json index b3a7ddc..46212c8 100644 --- a/messages/en.json +++ b/messages/en.json @@ -338,6 +338,10 @@ "dashboard_upstream_label": "Upstream", "dashboard_upstream_placeholder": "Select upstream", "dashboard_upstream_all": "All", + "dashboard_account_label": "Account", + "dashboard_account_placeholder": "Select account", + "dashboard_account_all": "All accounts", + "dashboard_account_public": "Public", "dashboard_load_failed": "Load failed", "dashboard_stat_requests": "Requests", "dashboard_stat_errors": "Errors", diff --git a/messages/zh.json b/messages/zh.json index 9eabd0a..f3743b9 100644 --- a/messages/zh.json +++ b/messages/zh.json @@ -339,6 +339,10 @@ "dashboard_upstream_label": "上游", "dashboard_upstream_placeholder": "选择上游", "dashboard_upstream_all": "全部", + "dashboard_account_label": "账户", + "dashboard_account_placeholder": "选择账户", + "dashboard_account_all": "全部账户", + "dashboard_account_public": "公共", "dashboard_load_failed": "加载失败", "dashboard_stat_requests": "请求数", "dashboard_stat_errors": "错误数", diff --git a/src-tauri/src/commands/dashboard.rs b/src-tauri/src/commands/dashboard.rs index d29e18a..716c746 100644 --- a/src-tauri/src/commands/dashboard.rs +++ b/src-tauri/src/commands/dashboard.rs @@ -10,8 +10,18 @@ pub async fn read_dashboard_snapshot( range: proxy::dashboard::DashboardRange, offset: Option, upstream_id: Option, + account_id: Option, + public_only: Option, ) -> Result { let paths = app.state::>(); let pool = proxy::sqlite::open_read_pool(paths.inner().as_ref()).await?; - proxy::dashboard::read_snapshot(&pool, range, offset, upstream_id).await + proxy::dashboard::read_snapshot( + &pool, + range, + offset, + upstream_id, + account_id, + public_only.unwrap_or(false), + ) + .await } diff --git a/src/features/dashboard/DashboardPanel.test.tsx b/src/features/dashboard/DashboardPanel.test.tsx index bf82f62..19e6928 100644 --- a/src/features/dashboard/DashboardPanel.test.tsx +++ b/src/features/dashboard/DashboardPanel.test.tsx @@ -51,8 +51,8 @@ describe("dashboard/DashboardPanel", () => { beforeEach(() => { readDashboardSnapshotMock.mockReset(); readDashboardSnapshotMock.mockImplementation( - async ({ upstreamId }: DashboardSnapshotQuery) => { - if (upstreamId === "alpha") { + async ({ upstreamId, accountId, publicOnly }: DashboardSnapshotQuery) => { + if (upstreamId === "alpha" && accountId === "codex-a.json") { return { summary: { totalRequests: 1, @@ -76,14 +76,126 @@ describe("dashboard/DashboardPanel", () => { upstreams: [ { upstreamId: "alpha", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, + { + upstreamId: "beta", + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", + requests: 1, + totalTokens: 30, + cachedTokens: 5, + }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, + { + upstreamId: "beta", + accountId: null, + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + series: [ + { + tsMs: 100, + totalRequests: 1, + errorRequests: 0, + inputTokens: 10, + outputTokens: 20, + cachedTokens: 5, + totalTokens: 30, + }, + ], + recent: [ + { + id: 1, + tsMs: 100, + path: "/v1/chat/completions", + provider: "openai", + upstreamId: "alpha", + accountId: "codex-a.json", + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 30, + cachedTokens: 5, + latencyMs: 30, + upstreamRequestId: null, + }, + ], + truncated: false, + }; + } + + if (upstreamId === "alpha" && publicOnly) { + return { + summary: { + totalRequests: 1, + successRequests: 1, + errorRequests: 0, + totalTokens: 30, + inputTokens: 10, + outputTokens: 20, + cachedTokens: 5, + avgLatencyMs: 30, + medianLatencyMs: 30, + }, + providers: [ + { provider: "openai", requests: 1, totalTokens: 30, cachedTokens: 5, }, + ], + upstreams: [ + { + upstreamId: "alpha", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, { upstreamId: "beta", - provider: "anthropic", + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", + requests: 1, + totalTokens: 30, + cachedTokens: 5, + }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, + { + upstreamId: "beta", + accountId: null, requests: 1, totalTokens: 7, cachedTokens: 1, @@ -101,12 +213,126 @@ describe("dashboard/DashboardPanel", () => { }, ], recent: [ + { + id: 1, + tsMs: 100, + path: "/v1/chat/completions", + provider: "openai-response", + upstreamId: "alpha", + accountId: null, + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 5, + cachedTokens: 1, + latencyMs: 40, + upstreamRequestId: null, + }, + ], + truncated: false, + }; + } + + if (upstreamId === "alpha") { + return { + summary: { + totalRequests: 2, + successRequests: 2, + errorRequests: 0, + totalTokens: 35, + inputTokens: 12, + outputTokens: 23, + cachedTokens: 6, + avgLatencyMs: 35, + medianLatencyMs: 35, + }, + providers: [ + { + provider: "openai", + requests: 1, + totalTokens: 30, + cachedTokens: 5, + }, + { + provider: "openai-response", + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, + ], + upstreams: [ + { + upstreamId: "alpha", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, + { + upstreamId: "beta", + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", + requests: 1, + totalTokens: 30, + cachedTokens: 5, + }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, + { + upstreamId: "beta", + accountId: null, + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + series: [ + { + tsMs: 100, + totalRequests: 2, + errorRequests: 0, + inputTokens: 12, + outputTokens: 23, + cachedTokens: 6, + totalTokens: 35, + }, + ], + recent: [ + { + id: 2, + tsMs: 110, + path: "/v1/responses", + provider: "openai-response", + upstreamId: "alpha", + accountId: null, + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 5, + cachedTokens: 1, + latencyMs: 40, + upstreamRequestId: null, + }, { id: 1, tsMs: 100, path: "/v1/chat/completions", provider: "openai", upstreamId: "alpha", + accountId: "codex-a.json", model: "gpt-5", mappedModel: null, stream: false, @@ -123,15 +349,15 @@ describe("dashboard/DashboardPanel", () => { return { summary: { - totalRequests: 2, - successRequests: 1, + totalRequests: 3, + successRequests: 2, errorRequests: 1, - totalTokens: 37, - inputTokens: 13, - outputTokens: 24, - cachedTokens: 6, - avgLatencyMs: 60, - medianLatencyMs: 60, + totalTokens: 42, + inputTokens: 15, + outputTokens: 27, + cachedTokens: 7, + avgLatencyMs: 53, + medianLatencyMs: 40, }, providers: [ { @@ -146,18 +372,45 @@ describe("dashboard/DashboardPanel", () => { totalTokens: 7, cachedTokens: 1, }, + { + provider: "openai-response", + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, ], upstreams: [ { upstreamId: "alpha", - provider: "openai", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, + { + upstreamId: "beta", + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", requests: 1, totalTokens: 30, cachedTokens: 5, }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, { upstreamId: "beta", - provider: "anthropic", + accountId: null, requests: 1, totalTokens: 7, cachedTokens: 1, @@ -166,12 +419,12 @@ describe("dashboard/DashboardPanel", () => { series: [ { tsMs: 100, - totalRequests: 2, + totalRequests: 3, errorRequests: 1, - inputTokens: 13, - outputTokens: 24, - cachedTokens: 6, - totalTokens: 37, + inputTokens: 15, + outputTokens: 27, + cachedTokens: 7, + totalTokens: 42, }, ], recent: [], @@ -181,22 +434,24 @@ describe("dashboard/DashboardPanel", () => { ); }); - it("defaults to all upstream data and refetches when an upstream is selected", async () => { + it("defaults to all upstream data and refetches when an upstream and account are selected", async () => { const user = userEvent.setup(); renderPanel(); await waitFor(() => { expect(screen.getByTestId("dashboard-summary-total")).toHaveTextContent( - "2" + "3" ); }); - expect(screen.getByTestId("dashboard-chart-total")).toHaveTextContent("37"); + expect(screen.getByTestId("dashboard-chart-total")).toHaveTextContent("42"); expect(readDashboardSnapshotMock).toHaveBeenCalledWith( { range: { fromTsMs: expect.any(Number), toTsMs: expect.any(Number) }, offset: 0, upstreamId: null, + accountId: null, + publicOnly: false, } ); @@ -204,7 +459,30 @@ describe("dashboard/DashboardPanel", () => { screen.getByRole("combobox", { name: m.dashboard_upstream_label() }) ); await user.click( - await screen.findByRole("option", { name: "alpha · openai" }) + await screen.findByRole("option", { name: "alpha" }) + ); + + await waitFor(() => { + expect(screen.getByTestId("dashboard-summary-total")).toHaveTextContent( + "2" + ); + }); + expect(screen.getByTestId("dashboard-chart-total")).toHaveTextContent("35"); + expect(readDashboardSnapshotMock).toHaveBeenLastCalledWith( + { + range: { fromTsMs: expect.any(Number), toTsMs: expect.any(Number) }, + offset: 0, + upstreamId: "alpha", + accountId: null, + publicOnly: false, + } + ); + + await user.click( + screen.getByRole("combobox", { name: m.dashboard_account_label() }) + ); + await user.click( + await screen.findByRole("option", { name: "codex-a.json" }) ); await waitFor(() => { @@ -218,6 +496,8 @@ describe("dashboard/DashboardPanel", () => { range: { fromTsMs: expect.any(Number), toTsMs: expect.any(Number) }, offset: 0, upstreamId: "alpha", + accountId: "codex-a.json", + publicOnly: false, } ); }); diff --git a/src/features/dashboard/DashboardPanel.tsx b/src/features/dashboard/DashboardPanel.tsx index 1d9daee..32eebd5 100644 --- a/src/features/dashboard/DashboardPanel.tsx +++ b/src/features/dashboard/DashboardPanel.tsx @@ -17,10 +17,14 @@ export function DashboardPanel() { activeRange, rangePreset, selectedUpstreamId, + selectedAccountId, + selectedPublicOnly, upstreamOptions, + accountOptions, refresh, onRangeChange, onUpstreamChange, + onAccountChange, } = useDashboardSnapshot() const isLoading = status === "loading" @@ -41,9 +45,13 @@ export function DashboardPanel() { range={rangePreset} upstreamId={selectedUpstreamId} upstreamOptions={upstreamOptions} + accountId={selectedAccountId} + publicOnly={selectedPublicOnly} + accountOptions={accountOptions} loading={isLoading} onRangeChange={onRangeChange} onUpstreamChange={onUpstreamChange} + onAccountChange={onAccountChange} onRefresh={refresh} /> diff --git a/src/features/dashboard/api.test.ts b/src/features/dashboard/api.test.ts index e353f3c..69f0688 100644 --- a/src/features/dashboard/api.test.ts +++ b/src/features/dashboard/api.test.ts @@ -20,6 +20,7 @@ describe("dashboard/api", () => { }, providers: [], upstreams: [], + accounts: [], series: [], recent: [], truncated: false, @@ -30,12 +31,16 @@ describe("dashboard/api", () => { range, offset: 10, upstreamId: "alpha", + accountId: "codex-a.json", + publicOnly: false, }); expect(invokeMock).toHaveBeenCalledWith("read_dashboard_snapshot", { range, offset: 10, upstreamId: "alpha", + accountId: "codex-a.json", + publicOnly: false, }); }); }); diff --git a/src/features/dashboard/snapshot.test.tsx b/src/features/dashboard/snapshot.test.tsx index e9ced66..57a8692 100644 --- a/src/features/dashboard/snapshot.test.tsx +++ b/src/features/dashboard/snapshot.test.tsx @@ -34,14 +34,35 @@ function createSnapshot( upstreams: [ { upstreamId: "alpha", - provider: "openai", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, + { + upstreamId: "beta", + requests: 1, + totalTokens: 7, + cachedTokens: 0, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", requests: 1, totalTokens: 30, cachedTokens: 5, }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, { upstreamId: "beta", - provider: "anthropic", + accountId: "claude-a.json", requests: 1, totalTokens: 7, cachedTokens: 0, @@ -55,7 +76,15 @@ function createSnapshot( } function HookHarness() { - const { snapshot, selectedUpstreamId, onUpstreamChange } = + const { + snapshot, + selectedUpstreamId, + selectedAccountId, + selectedPublicOnly, + accountOptions, + onUpstreamChange, + onAccountChange, + } = useDashboardSnapshot() return ( @@ -63,14 +92,28 @@ function HookHarness() {
{selectedUpstreamId ?? "all"}
+
+ {selectedPublicOnly ? "public" : selectedAccountId ?? "all"} +
{snapshot?.upstreams - .map((item) => `${item.upstreamId}:${item.provider}`) + .map((item) => item.upstreamId) + .join(",") ?? ""} +
+
+ {accountOptions + .map((item) => item.accountId ?? "public") .join(",") ?? ""}
+ + ) } @@ -103,6 +146,22 @@ describe("dashboard/useDashboardSnapshot", () => { providers: [ { provider: "openai", requests: 1, totalTokens: 30, cachedTokens: 5 }, ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", + requests: 1, + totalTokens: 30, + cachedTokens: 5, + }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, + ], recent: [ { id: 1, @@ -134,13 +193,15 @@ describe("dashboard/useDashboardSnapshot", () => { }, offset: 0, upstreamId: null, + accountId: null, + publicOnly: false, }) }) expect(screen.getByTestId("selected-upstream")).toHaveTextContent("all") - expect(screen.getByTestId("upstream-options")).toHaveTextContent( - "alpha:openai,beta:anthropic" - ) + expect(screen.getByTestId("selected-account")).toHaveTextContent("all") + expect(screen.getByTestId("upstream-options")).toHaveTextContent("alpha,beta") + expect(screen.getByTestId("account-options")).toHaveTextContent("") fireEvent.click(screen.getByRole("button", { name: "filter-alpha" })) @@ -152,9 +213,31 @@ describe("dashboard/useDashboardSnapshot", () => { }, offset: 0, upstreamId: "alpha", + accountId: null, + publicOnly: false, }) }) expect(screen.getByTestId("selected-upstream")).toHaveTextContent("alpha") + expect(screen.getByTestId("account-options")).toHaveTextContent( + "codex-a.json,public" + ) + + fireEvent.click(screen.getByRole("button", { name: "filter-account" })) + + await waitFor(() => { + expect(readDashboardSnapshotMock).toHaveBeenNthCalledWith(3, { + range: { + fromTsMs: expect.any(Number), + toTsMs: expect.any(Number), + }, + offset: 0, + upstreamId: "alpha", + accountId: "codex-a.json", + publicOnly: false, + }) + }) + + expect(screen.getByTestId("selected-account")).toHaveTextContent("codex-a.json") }) }) diff --git a/src/features/dashboard/snapshot.tsx b/src/features/dashboard/snapshot.tsx index 13de698..c04e23e 100644 --- a/src/features/dashboard/snapshot.tsx +++ b/src/features/dashboard/snapshot.tsx @@ -21,6 +21,7 @@ import { toDashboardTimeRange, } from "@/features/dashboard/range" import type { + DashboardAccountOption, DashboardRange, DashboardSnapshot, DashboardUpstreamOption, @@ -31,6 +32,8 @@ import { m } from "@/paraglide/messages.js" export const RECENT_PAGE_SIZE = 50 const ALL_UPSTREAMS_VALUE = "__all_upstreams__" +const ALL_ACCOUNTS_VALUE = "__all_accounts__" +const PUBLIC_ACCOUNT_VALUE = "__public_account__" type DashboardStatus = "idle" | "loading" | "error" @@ -41,6 +44,20 @@ function hasUpstreamOption( return upstreams.some((item) => item.upstreamId === upstreamId) } +function hasAccountOption( + accounts: DashboardAccountOption[], + accountId: string | null, + publicOnly: boolean +) { + if (publicOnly) { + return accounts.some((item) => item.accountId === null) + } + if (accountId === null) { + return true + } + return accounts.some((item) => item.accountId === accountId) +} + function usePagination(totalRequests: number) { const [page, setPage] = useState(1) const totalPages = Math.max(1, Math.ceil(totalRequests / RECENT_PAGE_SIZE)) @@ -72,6 +89,8 @@ export function useDashboardSnapshot() { const [rangePreset, setRangePreset] = useState("today") const [snapshot, setSnapshot] = useState(null) const [selectedUpstreamId, setSelectedUpstreamId] = useState(null) + const [selectedAccountId, setSelectedAccountId] = useState(null) + const [selectedPublicOnly, setSelectedPublicOnly] = useState(false) const [activeRange, setActiveRange] = useState(() => resolveDashboardRange("today") ) @@ -93,6 +112,8 @@ export function useDashboardSnapshot() { range, offset, upstreamId: selectedUpstreamId, + accountId: selectedAccountId, + publicOnly: selectedPublicOnly, }) if (requestSeq.current !== requestId) { return @@ -107,6 +128,17 @@ export function useDashboardSnapshot() { setStatus("loading") return } + const visibleAccountOptions = + selectedUpstreamId === null ? [] : data.accounts + if ( + selectedUpstreamId !== null && + !hasAccountOption(visibleAccountOptions, selectedAccountId, selectedPublicOnly) + ) { + setSelectedAccountId(null) + setSelectedPublicOnly(false) + setStatus("loading") + return + } setSnapshot(data) setActiveRange(range) setStatus("idle") @@ -117,7 +149,7 @@ export function useDashboardSnapshot() { setStatus("error") setStatusMessage(parseError(error)) } - }, [page, rangePreset, selectedUpstreamId]) + }, [page, rangePreset, selectedAccountId, selectedPublicOnly, selectedUpstreamId]) useEffect(() => { // 提交后一拍再启动请求,避免 effect 同步路径被误判为级联 setState。 @@ -141,6 +173,15 @@ export function useDashboardSnapshot() { const handleUpstreamChange = useCallback((nextUpstreamId: string | null) => { markLoading() setSelectedUpstreamId(nextUpstreamId) + setSelectedAccountId(null) + setSelectedPublicOnly(false) + resetPage() + }, [markLoading, resetPage]) + + const handleAccountChange = useCallback((nextAccountId: string | null, nextPublicOnly: boolean) => { + markLoading() + setSelectedAccountId(nextAccountId) + setSelectedPublicOnly(nextPublicOnly) resetPage() }, [markLoading, resetPage]) @@ -166,11 +207,15 @@ export function useDashboardSnapshot() { activeRange, rangePreset, selectedUpstreamId, + selectedAccountId, + selectedPublicOnly, upstreamOptions: snapshot?.upstreams ?? [], + accountOptions: selectedUpstreamId === null ? [] : (snapshot?.accounts ?? []), pagination: { page, totalPages, totalRequests }, refresh, onRangeChange: handleRangeChange, onUpstreamChange: handleUpstreamChange, + onAccountChange: handleAccountChange, onPrevPage: handlePrevPage, onNextPage: handleNextPage, } @@ -184,13 +229,37 @@ function toUpstreamFilterValue(value: string) { return value === ALL_UPSTREAMS_VALUE ? null : value } +function resolveAccountSelectValue(accountId: string | null, publicOnly: boolean) { + if (publicOnly) { + return PUBLIC_ACCOUNT_VALUE + } + if (accountId === null) { + return ALL_ACCOUNTS_VALUE + } + return `account:${accountId}` +} + +function toAccountFilterValue(value: string) { + if (value === ALL_ACCOUNTS_VALUE) { + return { accountId: null, publicOnly: false } + } + if (value === PUBLIC_ACCOUNT_VALUE) { + return { accountId: null, publicOnly: true } + } + return { accountId: value.replace(/^account:/, ""), publicOnly: false } +} + type DashboardFiltersProps = { range: DashboardTimeRange upstreamId: string | null upstreamOptions: DashboardUpstreamOption[] + accountId: string | null + publicOnly: boolean + accountOptions: DashboardAccountOption[] loading: boolean onRangeChange: (range: DashboardTimeRange) => void onUpstreamChange: (upstreamId: string | null) => void + onAccountChange: (accountId: string | null, publicOnly: boolean) => void onRefresh: () => void /** 请求详情捕获相关,仅 LogsPanel 使用 */ capture?: { @@ -205,9 +274,13 @@ export function DashboardFilters({ range, upstreamId, upstreamOptions, + accountId, + publicOnly, + accountOptions, loading, onRangeChange, onUpstreamChange, + onAccountChange, onRefresh, capture, }: DashboardFiltersProps) { @@ -261,11 +334,45 @@ export function DashboardFilters({ {upstreamOptions.map((option) => ( - {option.upstreamId} · {option.provider} + {option.upstreamId} ))} + + +
{capture ? ( diff --git a/src/features/dashboard/types.ts b/src/features/dashboard/types.ts index 9cd6852..fcbe248 100644 --- a/src/features/dashboard/types.ts +++ b/src/features/dashboard/types.ts @@ -24,7 +24,14 @@ export type DashboardProviderStat = { export type DashboardUpstreamOption = { upstreamId: string; - provider: string; + requests: number; + totalTokens: number; + cachedTokens: number; +}; + +export type DashboardAccountOption = { + upstreamId: string; + accountId: string | null; requests: number; totalTokens: number; cachedTokens: number; @@ -62,6 +69,7 @@ export type DashboardSnapshot = { summary: DashboardSummary; providers: DashboardProviderStat[]; upstreams: DashboardUpstreamOption[]; + accounts: DashboardAccountOption[]; series: DashboardSeriesPoint[]; recent: DashboardRequestItem[]; truncated: boolean; @@ -71,4 +79,6 @@ export type DashboardSnapshotQuery = { range: DashboardRange; offset?: number; upstreamId?: string | null; + accountId?: string | null; + publicOnly?: boolean; }; diff --git a/src/features/logs/LogsPanel.test.tsx b/src/features/logs/LogsPanel.test.tsx index e24efc3..a4ca1c2 100644 --- a/src/features/logs/LogsPanel.test.tsx +++ b/src/features/logs/LogsPanel.test.tsx @@ -106,7 +106,7 @@ describe("logs/LogsPanel", () => { responseError: null, }); readDashboardSnapshotMock.mockImplementation( - async ({ upstreamId }: DashboardSnapshotQuery) => { + async ({ upstreamId, accountId, publicOnly }: DashboardSnapshotQuery) => { const base = { providers: [ { @@ -121,18 +121,45 @@ describe("logs/LogsPanel", () => { totalTokens: 7, cachedTokens: 1, }, + { + provider: "openai-response", + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, ], upstreams: [ { upstreamId: "alpha", - provider: "openai", + requests: 2, + totalTokens: 35, + cachedTokens: 6, + }, + { + upstreamId: "beta", + requests: 1, + totalTokens: 7, + cachedTokens: 1, + }, + ], + accounts: [ + { + upstreamId: "alpha", + accountId: "codex-a.json", requests: 1, totalTokens: 30, cachedTokens: 5, }, + { + upstreamId: "alpha", + accountId: null, + requests: 1, + totalTokens: 5, + cachedTokens: 1, + }, { upstreamId: "beta", - provider: "anthropic", + accountId: null, requests: 1, totalTokens: 7, cachedTokens: 1, @@ -142,7 +169,7 @@ describe("logs/LogsPanel", () => { truncated: false, }; - if (upstreamId === "alpha") { + if (upstreamId === "alpha" && accountId === "codex-a.json") { return { ...base, summary: { @@ -163,16 +190,102 @@ describe("logs/LogsPanel", () => { path: "/v1/chat/completions", provider: "openai", upstreamId: "alpha", + accountId: "codex-a.json", + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 30, + cachedTokens: 5, + latencyMs: 30, + upstreamRequestId: null, + }, + ], + }; + } + + if (upstreamId === "alpha" && publicOnly) { + return { + ...base, + summary: { + totalRequests: 1, + successRequests: 1, + errorRequests: 0, + totalTokens: 5, + inputTokens: 2, + outputTokens: 3, + cachedTokens: 1, + avgLatencyMs: 40, + medianLatencyMs: 40, + }, + recent: [ + { + id: 3, + tsMs: 110, + path: "/v1/responses", + provider: "openai-response", + upstreamId: "alpha", accountId: null, model: "gpt-5", mappedModel: null, stream: false, status: 200, + totalTokens: 5, + cachedTokens: 1, + latencyMs: 40, + upstreamRequestId: null, + }, + ], + }; + } + + if (upstreamId === "alpha") { + return { + ...base, + summary: { + totalRequests: 2, + successRequests: 2, + errorRequests: 0, + totalTokens: 35, + inputTokens: 12, + outputTokens: 23, + cachedTokens: 6, + avgLatencyMs: 35, + medianLatencyMs: 35, + }, + recent: [ + { + id: 1, + tsMs: 100, + path: "/v1/chat/completions", + provider: "openai", + upstreamId: "alpha", + accountId: "codex-a.json", + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, totalTokens: 30, cachedTokens: 5, latencyMs: 30, upstreamRequestId: null, }, + { + id: 3, + tsMs: 110, + path: "/v1/responses", + provider: "openai-response", + upstreamId: "alpha", + accountId: null, + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 5, + cachedTokens: 1, + latencyMs: 40, + upstreamRequestId: null, + }, ], }; } @@ -180,15 +293,15 @@ describe("logs/LogsPanel", () => { return { ...base, summary: { - totalRequests: 2, - successRequests: 1, + totalRequests: 3, + successRequests: 2, errorRequests: 1, - totalTokens: 37, - inputTokens: 13, - outputTokens: 24, - cachedTokens: 6, - avgLatencyMs: 60, - medianLatencyMs: 60, + totalTokens: 42, + inputTokens: 15, + outputTokens: 27, + cachedTokens: 7, + avgLatencyMs: 53, + medianLatencyMs: 40, }, recent: [ { @@ -196,8 +309,8 @@ describe("logs/LogsPanel", () => { tsMs: 100, path: "/v1/chat/completions", provider: "openai", - upstreamId: "alpha", - accountId: null, + upstreamId: "alpha", + accountId: "codex-a.json", model: "gpt-5", mappedModel: null, stream: false, @@ -207,6 +320,22 @@ describe("logs/LogsPanel", () => { latencyMs: 30, upstreamRequestId: null, }, + { + id: 3, + tsMs: 110, + path: "/v1/responses", + provider: "openai-response", + upstreamId: "alpha", + accountId: null, + model: "gpt-5", + mappedModel: null, + stream: false, + status: 200, + totalTokens: 5, + cachedTokens: 1, + latencyMs: 40, + upstreamRequestId: null, + }, { id: 2, tsMs: 120, @@ -235,7 +364,8 @@ describe("logs/LogsPanel", () => { renderPanel(); await waitFor(() => { - expect(screen.getByTestId("logs-items")).toHaveTextContent("alpha · openai"); + expect(screen.getByTestId("logs-items")).toHaveTextContent("alpha · openai · codex-a.json"); + expect(screen.getByTestId("logs-items")).toHaveTextContent("alpha · openai-response"); expect(screen.getByTestId("logs-items")).toHaveTextContent("beta · anthropic"); }); @@ -243,7 +373,7 @@ describe("logs/LogsPanel", () => { screen.getByRole("combobox", { name: m.dashboard_upstream_label() }) ); await user.click( - await screen.findByRole("option", { name: "alpha · openai" }) + await screen.findByRole("option", { name: "alpha" }) ); await waitFor(() => { @@ -254,20 +384,61 @@ describe("logs/LogsPanel", () => { range: { fromTsMs: expect.any(Number), toTsMs: expect.any(Number) }, offset: 0, upstreamId: "alpha", + accountId: null, + publicOnly: false, } ); }); + it("narrows logs again after selecting account under chosen upstream", async () => { + const user = userEvent.setup(); + + renderPanel(); + + await waitFor(() => { + expect(screen.getByTestId("logs-items")).toHaveTextContent("alpha · openai · codex-a.json"); + }); + + await user.click( + screen.getByRole("combobox", { name: m.dashboard_upstream_label() }) + ); + await user.click( + await screen.findByRole("option", { name: "alpha" }) + ); + + await user.click( + screen.getByRole("combobox", { name: m.dashboard_account_label() }) + ); + await user.click( + await screen.findByRole("option", { name: "codex-a.json" }) + ); + + await waitFor(() => { + expect(readDashboardSnapshotMock).toHaveBeenLastCalledWith({ + range: { fromTsMs: expect.any(Number), toTsMs: expect.any(Number) }, + offset: 0, + upstreamId: "alpha", + accountId: "codex-a.json", + publicOnly: false, + }); + }); + expect(screen.getByTestId("logs-items")).not.toHaveTextContent("openai-response"); + }); + it("shows account id in the provider field inside request detail", async () => { const user = userEvent.setup(); renderPanel(); await waitFor(() => { - expect(screen.getByRole("button", { name: "alpha · openai" })).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: "alpha · openai · codex-a.json" }) + ).toBeInTheDocument(); }); - await user.click(screen.getByRole("button", { name: "alpha · openai" })); + await user.click( + screen.getByRole("button", { name: "alpha · openai · codex-a.json" }) + ); await waitFor(() => { expect(readRequestLogDetailMock).toHaveBeenCalledWith(1); @@ -283,10 +454,14 @@ describe("logs/LogsPanel", () => { renderPanel(); await waitFor(() => { - expect(screen.getByRole("button", { name: "alpha · openai" })).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: "alpha · openai · codex-a.json" }) + ).toBeInTheDocument(); }); - await user.click(screen.getByRole("button", { name: "alpha · openai" })); + await user.click( + screen.getByRole("button", { name: "alpha · openai · codex-a.json" }) + ); await waitFor(() => { expect(readRequestLogDetailMock).toHaveBeenCalledWith(1); diff --git a/src/features/logs/LogsPanel.tsx b/src/features/logs/LogsPanel.tsx index 31b1269..c9473ba 100644 --- a/src/features/logs/LogsPanel.tsx +++ b/src/features/logs/LogsPanel.tsx @@ -357,11 +357,15 @@ export function LogsPanel() { statusMessage, rangePreset, selectedUpstreamId, + selectedAccountId, + selectedPublicOnly, upstreamOptions, + accountOptions, pagination, refresh, onRangeChange, onUpstreamChange, + onAccountChange, onPrevPage, onNextPage, } = useDashboardSnapshot(); @@ -535,9 +539,13 @@ export function LogsPanel() { range={rangePreset} upstreamId={selectedUpstreamId} upstreamOptions={upstreamOptions} + accountId={selectedAccountId} + publicOnly={selectedPublicOnly} + accountOptions={accountOptions} loading={isLoading} onRangeChange={onRangeChange} onUpstreamChange={onUpstreamChange} + onAccountChange={onAccountChange} onRefresh={refresh} capture={{ enabled: captureEnabled,