Backend: Added optional `environment` query parameter to catalog, search, stats, timeseries, punchcard, top-errors, logs, and agents endpoints. ClickHouse queries filter by environment when specified (literal SQL for AggregatingMergeTree, ? binds for raw tables). StatsStore interface methods all accept environment parameter. UI: Added EnvironmentSelector component (compact native select). LayoutShell extracts distinct environments from agent data and passes selected environment to catalog and agent queries via URL search param (?env=). TopBar shows current environment label. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
159 lines
4.7 KiB
TypeScript
159 lines
4.7 KiB
TypeScript
import { useQuery } from '@tanstack/react-query';
|
|
import { api } from '../client';
|
|
import type { SearchRequest } from '../types';
|
|
import { useLiveQuery } from './use-refresh-interval';
|
|
|
|
export function useExecutionStats(
|
|
timeFrom: string | undefined,
|
|
timeTo: string | undefined,
|
|
routeId?: string,
|
|
application?: string,
|
|
environment?: string,
|
|
) {
|
|
const live = useLiveQuery(10_000);
|
|
return useQuery({
|
|
queryKey: ['executions', 'stats', timeFrom, timeTo, routeId, application, environment],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.GET('/search/stats', {
|
|
params: {
|
|
query: {
|
|
from: timeFrom!,
|
|
to: timeTo || undefined,
|
|
routeId: routeId || undefined,
|
|
application: application || undefined,
|
|
environment: environment || undefined,
|
|
},
|
|
},
|
|
});
|
|
if (error) throw new Error('Failed to load stats');
|
|
return data!;
|
|
},
|
|
enabled: !!timeFrom && live.enabled,
|
|
placeholderData: (prev) => prev,
|
|
refetchInterval: live.refetchInterval,
|
|
});
|
|
}
|
|
|
|
export function useAttributeKeys() {
|
|
return useQuery({
|
|
queryKey: ['search', 'attribute-keys'],
|
|
queryFn: async () => {
|
|
const token = (await import('../../auth/auth-store')).useAuthStore.getState().accessToken;
|
|
const { config } = await import('../../config');
|
|
const res = await fetch(`${config.apiBaseUrl}/search/attributes/keys`, {
|
|
headers: { Authorization: `Bearer ${token}` },
|
|
});
|
|
if (!res.ok) throw new Error('Failed to load attribute keys');
|
|
return res.json() as Promise<string[]>;
|
|
},
|
|
staleTime: 60_000,
|
|
});
|
|
}
|
|
|
|
export function useSearchExecutions(filters: SearchRequest, live = false) {
|
|
const liveQuery = useLiveQuery(5_000);
|
|
return useQuery({
|
|
queryKey: ['executions', 'search', filters],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.POST('/search/executions', {
|
|
body: filters,
|
|
});
|
|
if (error) throw new Error('Search failed');
|
|
return data!;
|
|
},
|
|
placeholderData: (prev) => prev,
|
|
enabled: live ? liveQuery.enabled : true,
|
|
refetchInterval: live ? liveQuery.refetchInterval : false,
|
|
});
|
|
}
|
|
|
|
export function useStatsTimeseries(
|
|
timeFrom: string | undefined,
|
|
timeTo: string | undefined,
|
|
routeId?: string,
|
|
application?: string,
|
|
environment?: string,
|
|
) {
|
|
const live = useLiveQuery(30_000);
|
|
return useQuery({
|
|
queryKey: ['executions', 'timeseries', timeFrom, timeTo, routeId, application, environment],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.GET('/search/stats/timeseries', {
|
|
params: {
|
|
query: {
|
|
from: timeFrom!,
|
|
to: timeTo || undefined,
|
|
buckets: 24,
|
|
routeId: routeId || undefined,
|
|
application: application || undefined,
|
|
environment: environment || undefined,
|
|
},
|
|
},
|
|
});
|
|
if (error) throw new Error('Failed to load timeseries');
|
|
return data!;
|
|
},
|
|
enabled: !!timeFrom && live.enabled,
|
|
placeholderData: (prev) => prev,
|
|
refetchInterval: live.refetchInterval,
|
|
});
|
|
}
|
|
|
|
export function useExecutionDetail(executionId: string | null) {
|
|
return useQuery({
|
|
queryKey: ['executions', 'detail', executionId],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.GET('/executions/{executionId}', {
|
|
params: { path: { executionId: executionId! } },
|
|
});
|
|
if (error) throw new Error('Failed to load execution detail');
|
|
return data!;
|
|
},
|
|
enabled: !!executionId,
|
|
});
|
|
}
|
|
|
|
export function useProcessorSnapshot(
|
|
executionId: string | null,
|
|
index: number | null,
|
|
) {
|
|
return useQuery({
|
|
queryKey: ['executions', 'snapshot', executionId, index],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.GET(
|
|
'/executions/{executionId}/processors/{index}/snapshot',
|
|
{
|
|
params: {
|
|
path: { executionId: executionId!, index: index! },
|
|
},
|
|
},
|
|
);
|
|
if (error) throw new Error('Failed to load snapshot');
|
|
return data!;
|
|
},
|
|
enabled: !!executionId && index !== null,
|
|
});
|
|
}
|
|
|
|
export function useProcessorSnapshotById(
|
|
executionId: string | null,
|
|
processorId: string | null,
|
|
) {
|
|
return useQuery({
|
|
queryKey: ['executions', 'snapshot-by-id', executionId, processorId],
|
|
queryFn: async () => {
|
|
const { data, error } = await api.GET(
|
|
'/executions/{executionId}/processors/by-id/{processorId}/snapshot',
|
|
{
|
|
params: {
|
|
path: { executionId: executionId!, processorId: processorId! },
|
|
},
|
|
},
|
|
);
|
|
if (error) throw new Error('Failed to load snapshot');
|
|
return data!;
|
|
},
|
|
enabled: !!executionId && !!processorId,
|
|
});
|
|
}
|