Make stats endpoint respect selected time window instead of hardcoded last hour
All checks were successful
CI / build (push) Successful in 1m10s
CI / docker (push) Successful in 48s
CI / deploy (push) Successful in 28s

P99 latency and active count now use the same from/to parameters as the
timeseries sparklines, so all stat cards are consistent with the user's
selected time range.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
hsiegeln
2026-03-13 22:19:59 +01:00
parent 6794e4c234
commit cdf4c93630
9 changed files with 57 additions and 18 deletions

View File

@@ -70,8 +70,11 @@ public class SearchController {
@GetMapping("/stats") @GetMapping("/stats")
@Operation(summary = "Aggregate execution stats (P99 latency, active count)") @Operation(summary = "Aggregate execution stats (P99 latency, active count)")
public ResponseEntity<ExecutionStats> stats() { public ResponseEntity<ExecutionStats> stats(
return ResponseEntity.ok(searchService.stats()); @RequestParam Instant from,
@RequestParam(required = false) Instant to) {
Instant end = to != null ? to : Instant.now();
return ResponseEntity.ok(searchService.stats(from, end));
} }
@GetMapping("/stats/timeseries") @GetMapping("/stats/timeseries")

View File

@@ -88,11 +88,11 @@ public class ClickHouseSearchEngine implements SearchEngine {
} }
@Override @Override
public ExecutionStats stats() { public ExecutionStats stats(Instant from, Instant to) {
Long p99 = jdbcTemplate.queryForObject( Long p99 = jdbcTemplate.queryForObject(
"SELECT quantile(0.99)(duration_ms) FROM route_executions " + "SELECT quantile(0.99)(duration_ms) FROM route_executions " +
"WHERE start_time >= now() - INTERVAL 1 HOUR", "WHERE start_time >= ? AND start_time <= ?",
Long.class); Long.class, Timestamp.from(from), Timestamp.from(to));
Long active = jdbcTemplate.queryForObject( Long active = jdbcTemplate.queryForObject(
"SELECT count() FROM route_executions WHERE status = 'RUNNING'", "SELECT count() FROM route_executions WHERE status = 'RUNNING'",
Long.class); Long.class);

View File

@@ -3,7 +3,7 @@ package com.cameleer3.server.core.search;
/** /**
* Aggregate execution statistics. * Aggregate execution statistics.
* *
* @param p99LatencyMs 99th percentile duration in milliseconds (last hour) * @param p99LatencyMs 99th percentile duration in milliseconds within the requested time window
* @param activeCount number of currently running executions * @param activeCount number of currently running executions
*/ */
public record ExecutionStats(long p99LatencyMs, long activeCount) {} public record ExecutionStats(long p99LatencyMs, long activeCount) {}

View File

@@ -28,9 +28,11 @@ public interface SearchEngine {
/** /**
* Compute aggregate stats: P99 latency and count of currently running executions. * Compute aggregate stats: P99 latency and count of currently running executions.
* *
* @param from start of the time window
* @param to end of the time window
* @return execution stats * @return execution stats
*/ */
ExecutionStats stats(); ExecutionStats stats(java.time.Instant from, java.time.Instant to);
/** /**
* Compute bucketed time-series stats over a time window. * Compute bucketed time-series stats over a time window.

View File

@@ -32,8 +32,8 @@ public class SearchService {
/** /**
* Compute aggregate execution stats (P99 latency, active count). * Compute aggregate execution stats (P99 latency, active count).
*/ */
public ExecutionStats stats() { public ExecutionStats stats(java.time.Instant from, java.time.Instant to) {
return engine.stats(); return engine.stats(from, to);
} }
/** /**

View File

@@ -664,6 +664,26 @@
], ],
"summary": "Aggregate execution stats (P99 latency, active count)", "summary": "Aggregate execution stats (P99 latency, active count)",
"operationId": "stats", "operationId": "stats",
"parameters": [
{
"name": "from",
"in": "query",
"required": true,
"schema": {
"type": "string",
"format": "date-time"
}
},
{
"name": "to",
"in": "query",
"required": false,
"schema": {
"type": "string",
"format": "date-time"
}
}
],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",

View File

@@ -2,14 +2,23 @@ import { useQuery } from '@tanstack/react-query';
import { api } from '../client'; import { api } from '../client';
import type { SearchRequest } from '../schema'; import type { SearchRequest } from '../schema';
export function useExecutionStats() { export function useExecutionStats(timeFrom: string | undefined, timeTo: string | undefined) {
return useQuery({ return useQuery({
queryKey: ['executions', 'stats'], queryKey: ['executions', 'stats', timeFrom, timeTo],
queryFn: async () => { queryFn: async () => {
const { data, error } = await api.GET('/search/stats'); const { data, error } = await api.GET('/search/stats', {
params: {
query: {
from: timeFrom!,
to: timeTo || undefined,
},
},
});
if (error) throw new Error('Failed to load stats'); if (error) throw new Error('Failed to load stats');
return data!; return data!;
}, },
enabled: !!timeFrom,
placeholderData: (prev) => prev,
refetchInterval: 10_000, refetchInterval: 10_000,
}); });
} }

View File

@@ -97,6 +97,12 @@ export interface paths {
}; };
'/search/stats': { '/search/stats': {
get: { get: {
parameters: {
query: {
from: string;
to?: string;
};
};
responses: { responses: {
200: { 200: {
content: { content: {

View File

@@ -10,11 +10,10 @@ export function ExecutionExplorer() {
const { toSearchRequest, offset, limit, setOffset, live, toggleLive } = useExecutionSearch(); const { toSearchRequest, offset, limit, setOffset, live, toggleLive } = useExecutionSearch();
const searchRequest = toSearchRequest(); const searchRequest = toSearchRequest();
const { data, isLoading, isFetching } = useSearchExecutions(searchRequest, live); const { data, isLoading, isFetching } = useSearchExecutions(searchRequest, live);
const { data: stats } = useExecutionStats(); const timeFrom = searchRequest.timeFrom ?? undefined;
const { data: timeseries } = useStatsTimeseries( const timeTo = searchRequest.timeTo ?? undefined;
searchRequest.timeFrom ?? undefined, const { data: stats } = useExecutionStats(timeFrom, timeTo);
searchRequest.timeTo ?? undefined, const { data: timeseries } = useStatsTimeseries(timeFrom, timeTo);
);
const sparkTotal = timeseries?.buckets.map((b) => b.totalCount) ?? []; const sparkTotal = timeseries?.buckets.map((b) => b.totalCount) ?? [];
const sparkFailed = timeseries?.buckets.map((b) => b.failedCount) ?? []; const sparkFailed = timeseries?.buckets.map((b) => b.failedCount) ?? [];
@@ -53,7 +52,7 @@ export function ExecutionExplorer() {
<StatCard label="Total Matches" value={total.toLocaleString()} accent="amber" change={`from current search`} sparkData={sparkTotal} /> <StatCard label="Total Matches" value={total.toLocaleString()} accent="amber" change={`from current search`} sparkData={sparkTotal} />
<StatCard label="Avg Duration" value={`${avgDuration}ms`} accent="cyan" sparkData={sparkAvgDuration} /> <StatCard label="Avg Duration" value={`${avgDuration}ms`} accent="cyan" sparkData={sparkAvgDuration} />
<StatCard label="Failed (page)" value={failedCount.toString()} accent="rose" sparkData={sparkFailed} /> <StatCard label="Failed (page)" value={failedCount.toString()} accent="rose" sparkData={sparkFailed} />
<StatCard label="P99 Latency" value={stats ? `${stats.p99LatencyMs}ms` : '--'} accent="green" change="last hour" sparkData={sparkP99} /> <StatCard label="P99 Latency" value={stats ? `${stats.p99LatencyMs}ms` : '--'} accent="green" sparkData={sparkP99} />
<StatCard label="In-Flight" value={stats ? stats.activeCount.toString() : '--'} accent="blue" change="running executions" sparkData={sparkActive} /> <StatCard label="In-Flight" value={stats ? stats.activeCount.toString() : '--'} accent="blue" change="running executions" sparkData={sparkActive} />
</div> </div>