Page 1 refetches were using the captured timeRange.end, so rows arriving after the initial render were outside the query window and never surfaced. When timeRange.preset is set (e.g. 'last 1h'), each fetch now advances 'to' to Date.now() so the poll picks up new rows. Absolute ranges are unchanged. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
245 lines
8.2 KiB
TypeScript
245 lines
8.2 KiB
TypeScript
import { useQuery } from '@tanstack/react-query';
|
|
import { config } from '../../config';
|
|
import { useAuthStore } from '../../auth/auth-store';
|
|
import { useRefreshInterval } from './use-refresh-interval';
|
|
import { useGlobalFilters } from '@cameleer/design-system';
|
|
import { useEnvironmentStore } from '../environment-store';
|
|
import { useInfiniteStream, type UseInfiniteStreamResult } from '../../hooks/useInfiniteStream';
|
|
|
|
export interface LogEntryResponse {
|
|
timestamp: string;
|
|
level: string;
|
|
loggerName: string | null;
|
|
message: string;
|
|
threadName: string | null;
|
|
stackTrace: string | null;
|
|
exchangeId: string | null;
|
|
instanceId: string | null;
|
|
application: string | null;
|
|
mdc: Record<string, string> | null;
|
|
source: string | null;
|
|
}
|
|
|
|
export interface LogSearchPageResponse {
|
|
data: LogEntryResponse[];
|
|
nextCursor: string | null;
|
|
hasMore: boolean;
|
|
levelCounts: Record<string, number>;
|
|
}
|
|
|
|
export interface LogSearchParams {
|
|
q?: string;
|
|
level?: string;
|
|
application?: string;
|
|
agentId?: string;
|
|
source?: string;
|
|
/** Required: env in path */
|
|
environment: string;
|
|
exchangeId?: string;
|
|
logger?: string;
|
|
from?: string;
|
|
to?: string;
|
|
cursor?: string;
|
|
limit?: number;
|
|
sort?: 'asc' | 'desc';
|
|
}
|
|
|
|
async function fetchLogs(params: LogSearchParams): Promise<LogSearchPageResponse> {
|
|
const token = useAuthStore.getState().accessToken;
|
|
const urlParams = new URLSearchParams();
|
|
if (params.q) urlParams.set('q', params.q);
|
|
if (params.level) urlParams.set('level', params.level);
|
|
if (params.application) urlParams.set('application', params.application);
|
|
if (params.agentId) urlParams.set('agentId', params.agentId);
|
|
if (params.source) urlParams.set('source', params.source);
|
|
if (params.exchangeId) urlParams.set('exchangeId', params.exchangeId);
|
|
if (params.logger) urlParams.set('logger', params.logger);
|
|
if (params.from) urlParams.set('from', params.from);
|
|
if (params.to) urlParams.set('to', params.to);
|
|
if (params.cursor) urlParams.set('cursor', params.cursor);
|
|
if (params.limit) urlParams.set('limit', String(params.limit));
|
|
if (params.sort) urlParams.set('sort', params.sort);
|
|
|
|
const res = await fetch(
|
|
`${config.apiBaseUrl}/environments/${encodeURIComponent(params.environment)}/logs?${urlParams}`, {
|
|
headers: {
|
|
Authorization: `Bearer ${token}`,
|
|
'X-Cameleer-Protocol-Version': '1',
|
|
},
|
|
});
|
|
if (!res.ok) throw new Error('Failed to load logs');
|
|
return res.json() as Promise<LogSearchPageResponse>;
|
|
}
|
|
|
|
/**
|
|
* Primary log search hook with cursor pagination and level counts.
|
|
*/
|
|
export function useLogs(
|
|
params: LogSearchParams,
|
|
options?: { enabled?: boolean; refetchInterval?: number | false },
|
|
) {
|
|
const defaultRefetch = useRefreshInterval(15_000);
|
|
|
|
return useQuery({
|
|
queryKey: ['logs', params],
|
|
queryFn: () => fetchLogs(params),
|
|
enabled: (options?.enabled ?? true) && !!params.environment,
|
|
placeholderData: (prev) => prev,
|
|
refetchInterval: options?.refetchInterval ?? defaultRefetch,
|
|
staleTime: 300,
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Backward-compatible wrapper for existing consumers (LogTab, AgentHealth, AgentInstance).
|
|
* Returns the same shape they expect: data is the LogEntryResponse[] (unwrapped from the page response).
|
|
*/
|
|
export function useApplicationLogs(
|
|
application?: string,
|
|
agentId?: string,
|
|
options?: { limit?: number; toOverride?: string; exchangeId?: string; source?: string },
|
|
) {
|
|
const refetchInterval = useRefreshInterval(15_000);
|
|
const { timeRange } = useGlobalFilters();
|
|
const selectedEnv = useEnvironmentStore((s) => s.environment);
|
|
const to = options?.toOverride ?? timeRange.end.toISOString();
|
|
const useTimeRange = !options?.exchangeId;
|
|
|
|
const params: LogSearchParams = {
|
|
application: application || undefined,
|
|
agentId: agentId || undefined,
|
|
source: options?.source || undefined,
|
|
environment: selectedEnv ?? '',
|
|
exchangeId: options?.exchangeId || undefined,
|
|
from: useTimeRange ? timeRange.start.toISOString() : undefined,
|
|
to: useTimeRange ? to : undefined,
|
|
limit: options?.limit,
|
|
};
|
|
|
|
const query = useQuery({
|
|
queryKey: ['logs', 'compat', application, agentId, selectedEnv,
|
|
useTimeRange ? timeRange.start.toISOString() : null,
|
|
useTimeRange ? to : null,
|
|
options?.limit, options?.exchangeId, options?.source],
|
|
queryFn: () => fetchLogs(params),
|
|
enabled: !!application && !!selectedEnv,
|
|
placeholderData: (prev) => prev,
|
|
refetchInterval,
|
|
});
|
|
|
|
// Unwrap: existing consumers expect data to be LogEntryResponse[] directly
|
|
return {
|
|
...query,
|
|
data: query.data?.data ?? (undefined as LogEntryResponse[] | undefined),
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Fetches container startup logs for a deployment.
|
|
* Polls every 3s while the deployment is STARTING, stops when RUNNING/FAILED.
|
|
*/
|
|
export function useStartupLogs(
|
|
application: string | undefined,
|
|
environment: string | undefined,
|
|
deployCreatedAt: string | undefined,
|
|
isStarting: boolean,
|
|
) {
|
|
const params: LogSearchParams = {
|
|
application: application || undefined,
|
|
environment: environment ?? '',
|
|
source: 'container',
|
|
from: deployCreatedAt || undefined,
|
|
sort: 'asc',
|
|
limit: 500,
|
|
};
|
|
|
|
return useLogs(params, {
|
|
enabled: !!application && !!deployCreatedAt && !!environment,
|
|
refetchInterval: isStarting ? 3_000 : false,
|
|
});
|
|
}
|
|
|
|
export interface UseInfiniteApplicationLogsArgs {
|
|
application?: string;
|
|
agentId?: string;
|
|
sources?: string[]; // multi-select, server-side OR
|
|
levels?: string[]; // multi-select, server-side OR
|
|
exchangeId?: string;
|
|
sort?: 'asc' | 'desc';
|
|
isAtTop: boolean;
|
|
pageSize?: number;
|
|
}
|
|
|
|
/**
|
|
* Cursor-paginated log stream. Filters `sources`, `levels`, and the global
|
|
* time range are applied server-side. Free-text search is applied by the
|
|
* caller on top of the flattened items.
|
|
*/
|
|
export function useInfiniteApplicationLogs(
|
|
args: UseInfiniteApplicationLogsArgs,
|
|
): UseInfiniteStreamResult<LogEntryResponse> {
|
|
const { timeRange } = useGlobalFilters();
|
|
const selectedEnv = useEnvironmentStore((s) => s.environment);
|
|
|
|
const useTimeRange = !args.exchangeId;
|
|
const fromIso = useTimeRange ? timeRange.start.toISOString() : undefined;
|
|
const toIso = useTimeRange ? timeRange.end.toISOString() : undefined;
|
|
// Relative presets (e.g. "last 1h") should live-tail: each fetch advances
|
|
// `to` to "now" so new rows that arrive after the page was first rendered
|
|
// show up on refetch. Absolute ranges keep their captured `to`.
|
|
const isLiveRange = useTimeRange && !!timeRange.preset;
|
|
|
|
const sortedSources = (args.sources ?? []).slice().sort();
|
|
const sortedLevels = (args.levels ?? []).slice().sort();
|
|
const sourcesParam = sortedSources.join(',');
|
|
const levelsParam = sortedLevels.join(',');
|
|
const pageSize = args.pageSize ?? 100;
|
|
const sort = args.sort ?? 'desc';
|
|
|
|
return useInfiniteStream<LogEntryResponse>({
|
|
queryKey: [
|
|
'logs', 'infinite',
|
|
selectedEnv ?? '',
|
|
args.application ?? '',
|
|
args.agentId ?? '',
|
|
args.exchangeId ?? '',
|
|
sourcesParam,
|
|
levelsParam,
|
|
fromIso ?? '',
|
|
toIso ?? '',
|
|
pageSize,
|
|
sort,
|
|
],
|
|
enabled: !!args.application && !!selectedEnv,
|
|
isAtTop: args.isAtTop,
|
|
fetchPage: async (cursor) => {
|
|
const token = useAuthStore.getState().accessToken;
|
|
const qp = new URLSearchParams();
|
|
if (args.application) qp.set('application', args.application);
|
|
if (args.agentId) qp.set('agentId', args.agentId);
|
|
if (args.exchangeId) qp.set('exchangeId', args.exchangeId);
|
|
if (sourcesParam) qp.set('source', sourcesParam);
|
|
if (levelsParam) qp.set('level', levelsParam);
|
|
if (fromIso) qp.set('from', fromIso);
|
|
const effectiveTo = isLiveRange ? new Date().toISOString() : toIso;
|
|
if (effectiveTo) qp.set('to', effectiveTo);
|
|
if (cursor) qp.set('cursor', cursor);
|
|
qp.set('limit', String(pageSize));
|
|
qp.set('sort', sort);
|
|
|
|
const res = await fetch(
|
|
`${config.apiBaseUrl}/environments/${encodeURIComponent(selectedEnv ?? '')}/logs?${qp}`,
|
|
{
|
|
headers: {
|
|
Authorization: `Bearer ${token}`,
|
|
'X-Cameleer-Protocol-Version': '1',
|
|
},
|
|
},
|
|
);
|
|
if (!res.ok) throw new Error('Failed to load logs');
|
|
const page: LogSearchPageResponse = await res.json();
|
|
return { data: page.data, nextCursor: page.nextCursor, hasMore: page.hasMore };
|
|
},
|
|
});
|
|
}
|