Compare commits
23 Commits
62dd71b860
...
89c9b53edd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89c9b53edd | ||
|
|
07dbfb1391 | ||
|
|
a2d55f7075 | ||
|
|
6d3956935d | ||
|
|
a0a0635ddd | ||
|
|
f1c5a95f12 | ||
|
|
5d9f6735cc | ||
|
|
4f9ee57421 | ||
|
|
ef9bc5a614 | ||
|
|
7f233460aa | ||
|
|
fb7d6db375 | ||
|
|
73309c7e63 | ||
|
|
43f145157d | ||
|
|
c2ce508565 | ||
|
|
a7f53c8993 | ||
|
|
bfb5a7a895 | ||
|
|
20b8d4ccaf | ||
|
|
0194549f25 | ||
|
|
d293dafb99 | ||
|
|
67a834153e | ||
|
|
769752a327 | ||
|
|
e8d6cc5b5d | ||
|
|
b14551de4e |
@@ -43,11 +43,11 @@ ClickHouse is shared across tenants. Every ClickHouse query must filter by `tena
|
||||
- `ApplicationConfigController` — `/api/v1/environments/{envSlug}`. GET `/config` (list), GET/PUT `/apps/{appSlug}/config`, GET `/apps/{appSlug}/processor-routes`, POST `/apps/{appSlug}/config/test-expression`. PUT also pushes `CONFIG_UPDATE` to LIVE agents in this env.
|
||||
- `AppSettingsController` — `/api/v1/environments/{envSlug}`. GET `/app-settings` (list), GET/PUT/DELETE `/apps/{appSlug}/settings`. ADMIN/OPERATOR only.
|
||||
- `SearchController` — `/api/v1/environments/{envSlug}`. GET `/executions`, POST `/executions/search`, GET `/stats`, `/stats/timeseries`, `/stats/timeseries/by-app`, `/stats/timeseries/by-route`, `/stats/punchcard`, `/attributes/keys`, `/errors/top`.
|
||||
- `LogQueryController` — GET `/api/v1/environments/{envSlug}/logs` (filters: source, application, agentId, exchangeId, level, logger, q, time range).
|
||||
- `LogQueryController` — GET `/api/v1/environments/{envSlug}/logs` (filters: source (multi, comma-split, OR-joined), level (multi, comma-split, OR-joined), application, agentId, exchangeId, logger, q, time range). Cursor-paginated.
|
||||
- `RouteCatalogController` — GET `/api/v1/environments/{envSlug}/routes` (merged route catalog from registry + ClickHouse; env filter unconditional).
|
||||
- `RouteMetricsController` — GET `/api/v1/environments/{envSlug}/routes/metrics`, GET `/api/v1/environments/{envSlug}/routes/metrics/processors`.
|
||||
- `AgentListController` — GET `/api/v1/environments/{envSlug}/agents` (registered agents with runtime metrics, filtered to env).
|
||||
- `AgentEventsController` — GET `/api/v1/environments/{envSlug}/agents/events` (lifecycle events).
|
||||
- `AgentEventsController` — GET `/api/v1/environments/{envSlug}/agents/events` (lifecycle events; cursor-paginated, returns `{ data, nextCursor, hasMore }`; order `(timestamp DESC, instance_id ASC)`; cursor is base64url of `"{timestampIso}|{instanceId}"`).
|
||||
- `AgentMetricsController` — GET `/api/v1/environments/{envSlug}/agents/{agentId}/metrics` (JVM/Camel metrics). Rejects cross-env agents (404) as defence-in-depth.
|
||||
- `DiagramRenderController` — GET `/api/v1/environments/{envSlug}/apps/{appSlug}/routes/{routeId}/diagram` (env-scoped lookup). Also GET `/api/v1/diagrams/{contentHash}/render` (flat — content hashes are globally unique).
|
||||
|
||||
|
||||
@@ -29,7 +29,10 @@ The UI has 4 main tabs: **Exchanges**, **Dashboard**, **Runtime**, **Deployments
|
||||
- `ui/src/components/ProcessDiagram/` — ELK-rendered route diagram
|
||||
- `ui/src/hooks/useScope.ts` — TabKey type, scope inference
|
||||
- `ui/src/components/StartupLogPanel.tsx` — deployment startup log viewer (container logs from ClickHouse, polls 3s while STARTING)
|
||||
- `ui/src/api/queries/logs.ts` — `useStartupLogs` hook for container startup log polling, `useLogs`/`useApplicationLogs` for general log search
|
||||
- `ui/src/api/queries/logs.ts` — `useStartupLogs` hook for container startup log polling, `useLogs`/`useApplicationLogs` for bounded log search (single page), `useInfiniteApplicationLogs` for streaming log views (cursor-paginated, server-side source/level filters)
|
||||
- `ui/src/api/queries/agents.ts` — `useAgents` for agent list, `useInfiniteAgentEvents` for cursor-paginated timeline stream
|
||||
- `ui/src/hooks/useInfiniteStream.ts` — tanstack `useInfiniteQuery` wrapper with top-gated auto-refetch, flattened `items[]`, and `refresh()` invalidator
|
||||
- `ui/src/components/InfiniteScrollArea.tsx` — scrollable container with IntersectionObserver top/bottom sentinels. Streaming log/event views use this + `useInfiniteStream`. Bounded views (LogTab, StartupLogPanel) keep `useLogs`/`useStartupLogs`
|
||||
|
||||
## UI Styling
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package com.cameleer.server.app.controller;
|
||||
|
||||
import com.cameleer.server.app.dto.AgentEventPageResponse;
|
||||
import com.cameleer.server.app.dto.AgentEventResponse;
|
||||
import com.cameleer.server.app.web.EnvPath;
|
||||
import com.cameleer.server.core.agent.AgentEventPage;
|
||||
import com.cameleer.server.core.agent.AgentEventService;
|
||||
import com.cameleer.server.core.runtime.Environment;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
@@ -14,7 +16,6 @@ import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/environments/{envSlug}/agents/events")
|
||||
@@ -29,24 +30,25 @@ public class AgentEventsController {
|
||||
|
||||
@GetMapping
|
||||
@Operation(summary = "Query agent events in this environment",
|
||||
description = "Returns agent lifecycle events, optionally filtered by app and/or agent ID")
|
||||
@ApiResponse(responseCode = "200", description = "Events returned")
|
||||
public ResponseEntity<List<AgentEventResponse>> getEvents(
|
||||
description = "Cursor-paginated. Returns newest first. Pass nextCursor back as ?cursor= for the next page.")
|
||||
@ApiResponse(responseCode = "200", description = "Event page returned")
|
||||
public ResponseEntity<AgentEventPageResponse> getEvents(
|
||||
@EnvPath Environment env,
|
||||
@RequestParam(required = false) String appId,
|
||||
@RequestParam(required = false) String agentId,
|
||||
@RequestParam(required = false) String from,
|
||||
@RequestParam(required = false) String to,
|
||||
@RequestParam(required = false) String cursor,
|
||||
@RequestParam(defaultValue = "50") int limit) {
|
||||
|
||||
Instant fromInstant = from != null ? Instant.parse(from) : null;
|
||||
Instant toInstant = to != null ? Instant.parse(to) : null;
|
||||
|
||||
var events = agentEventService.queryEvents(appId, agentId, env.slug(), fromInstant, toInstant, limit)
|
||||
.stream()
|
||||
.map(AgentEventResponse::from)
|
||||
.toList();
|
||||
AgentEventPage page = agentEventService.queryEventPage(
|
||||
appId, agentId, env.slug(), fromInstant, toInstant, cursor, limit);
|
||||
|
||||
return ResponseEntity.ok(events);
|
||||
var data = page.data().stream().map(AgentEventResponse::from).toList();
|
||||
|
||||
return ResponseEntity.ok(new AgentEventPageResponse(data, page.nextCursor(), page.hasMore()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,8 @@ import org.springframework.web.bind.annotation.ExceptionHandler;
|
||||
import org.springframework.web.bind.annotation.RestControllerAdvice;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.time.format.DateTimeParseException;
|
||||
|
||||
/**
|
||||
* Global exception handler that ensures error responses use the typed {@link ErrorResponse} schema.
|
||||
*/
|
||||
@@ -18,4 +20,11 @@ public class ApiExceptionHandler {
|
||||
return ResponseEntity.status(ex.getStatusCode())
|
||||
.body(new ErrorResponse(reason != null ? reason : "Unknown error"));
|
||||
}
|
||||
|
||||
@ExceptionHandler({DateTimeParseException.class, IllegalArgumentException.class})
|
||||
public ResponseEntity<ErrorResponse> handleBadRequest(Exception ex) {
|
||||
String msg = ex.getMessage();
|
||||
return ResponseEntity.badRequest()
|
||||
.body(new ErrorResponse(msg != null ? msg : "Bad request"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,12 +61,20 @@ public class LogQueryController {
|
||||
.toList();
|
||||
}
|
||||
|
||||
List<String> sources = List.of();
|
||||
if (source != null && !source.isEmpty()) {
|
||||
sources = Arrays.stream(source.split(","))
|
||||
.map(String::trim)
|
||||
.filter(s -> !s.isEmpty())
|
||||
.toList();
|
||||
}
|
||||
|
||||
Instant fromInstant = from != null ? Instant.parse(from) : null;
|
||||
Instant toInstant = to != null ? Instant.parse(to) : null;
|
||||
|
||||
LogSearchRequest request = new LogSearchRequest(
|
||||
searchText, levels, application, instanceId, exchangeId,
|
||||
logger, env.slug(), source, fromInstant, toInstant, cursor, limit, sort);
|
||||
logger, env.slug(), sources, fromInstant, toInstant, cursor, limit, sort);
|
||||
|
||||
LogSearchResponse result = logIndex.search(request);
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
package com.cameleer.server.app.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Schema(description = "Cursor-paginated agent event list")
|
||||
public record AgentEventPageResponse(
|
||||
List<AgentEventResponse> data,
|
||||
String nextCursor,
|
||||
boolean hasMore
|
||||
) {}
|
||||
@@ -10,13 +10,11 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -29,7 +27,6 @@ import java.util.Map;
|
||||
public class ClickHouseLogStore implements LogIndex {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ClickHouseLogStore.class);
|
||||
private static final DateTimeFormatter ISO_FMT = DateTimeFormatter.ISO_INSTANT;
|
||||
|
||||
private final String tenantId;
|
||||
private final JdbcTemplate jdbc;
|
||||
@@ -47,12 +44,12 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
|
||||
String sql = "INSERT INTO logs (tenant_id, timestamp, application, instance_id, level, " +
|
||||
"logger_name, message, thread_name, stack_trace, exchange_id, mdc, source) " +
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
|
||||
"VALUES (?, parseDateTime64BestEffort(?, 3), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
|
||||
|
||||
jdbc.batchUpdate(sql, entries, entries.size(), (ps, entry) -> {
|
||||
Instant ts = entry.getTimestamp() != null ? entry.getTimestamp() : Instant.now();
|
||||
ps.setString(1, tenantId);
|
||||
ps.setTimestamp(2, Timestamp.from(ts));
|
||||
ps.setString(2, ts.toString());
|
||||
ps.setString(3, applicationId);
|
||||
ps.setString(4, instanceId);
|
||||
ps.setString(5, entry.getLevel() != null ? entry.getLevel() : "");
|
||||
@@ -76,14 +73,14 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
|
||||
String sql = "INSERT INTO logs (tenant_id, environment, timestamp, application, instance_id, level, " +
|
||||
"logger_name, message, thread_name, stack_trace, exchange_id, mdc, source) " +
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
|
||||
"VALUES (?, ?, parseDateTime64BestEffort(?, 3), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
|
||||
|
||||
jdbc.batchUpdate(sql, entries, entries.size(), (ps, ble) -> {
|
||||
LogEntry entry = ble.entry();
|
||||
Instant ts = entry.getTimestamp() != null ? entry.getTimestamp() : Instant.now();
|
||||
ps.setString(1, ble.tenantId() != null ? ble.tenantId() : tenantId);
|
||||
ps.setString(2, ble.environment() != null ? ble.environment() : "default");
|
||||
ps.setTimestamp(3, Timestamp.from(ts));
|
||||
ps.setString(3, ts.toString());
|
||||
ps.setString(4, ble.applicationId());
|
||||
ps.setString(5, ble.instanceId());
|
||||
ps.setString(6, entry.getLevel() != null ? entry.getLevel() : "");
|
||||
@@ -146,19 +143,22 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
baseParams.add("%" + escapeLike(request.logger()) + "%");
|
||||
}
|
||||
|
||||
if (request.source() != null && !request.source().isEmpty()) {
|
||||
baseConditions.add("source = ?");
|
||||
baseParams.add(request.source());
|
||||
if (request.sources() != null && !request.sources().isEmpty()) {
|
||||
String placeholders = String.join(", ", Collections.nCopies(request.sources().size(), "?"));
|
||||
baseConditions.add("source IN (" + placeholders + ")");
|
||||
for (String s : request.sources()) {
|
||||
baseParams.add(s);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.from() != null) {
|
||||
baseConditions.add("timestamp >= ?");
|
||||
baseParams.add(Timestamp.from(request.from()));
|
||||
baseConditions.add("timestamp >= parseDateTime64BestEffort(?, 3)");
|
||||
baseParams.add(request.from().toString());
|
||||
}
|
||||
|
||||
if (request.to() != null) {
|
||||
baseConditions.add("timestamp <= ?");
|
||||
baseParams.add(Timestamp.from(request.to()));
|
||||
baseConditions.add("timestamp <= parseDateTime64BestEffort(?, 3)");
|
||||
baseParams.add(request.to().toString());
|
||||
}
|
||||
|
||||
// Level counts query: uses base conditions WITHOUT level filter and cursor
|
||||
@@ -178,30 +178,44 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
}
|
||||
|
||||
if (request.cursor() != null && !request.cursor().isEmpty()) {
|
||||
Instant cursorTs = Instant.parse(request.cursor());
|
||||
if ("asc".equalsIgnoreCase(request.sort())) {
|
||||
dataConditions.add("timestamp > ?");
|
||||
} else {
|
||||
dataConditions.add("timestamp < ?");
|
||||
String decoded = new String(Base64.getUrlDecoder().decode(request.cursor()),
|
||||
StandardCharsets.UTF_8);
|
||||
int bar = decoded.indexOf('|');
|
||||
if (bar <= 0 || bar == decoded.length() - 1) {
|
||||
throw new IllegalArgumentException("Malformed cursor");
|
||||
}
|
||||
dataParams.add(Timestamp.from(cursorTs));
|
||||
Instant cursorTs;
|
||||
try {
|
||||
cursorTs = Instant.parse(decoded.substring(0, bar));
|
||||
} catch (java.time.format.DateTimeParseException e) {
|
||||
throw new IllegalArgumentException("Malformed cursor", e);
|
||||
}
|
||||
String cursorId = decoded.substring(bar + 1);
|
||||
String cmp = "asc".equalsIgnoreCase(request.sort()) ? ">" : "<";
|
||||
dataConditions.add(
|
||||
"(timestamp " + cmp + " parseDateTime64BestEffort(?, 3)" +
|
||||
" OR (timestamp = parseDateTime64BestEffort(?, 3) AND insert_id " + cmp + " toUUID(?)))");
|
||||
dataParams.add(cursorTs.toString());
|
||||
dataParams.add(cursorTs.toString());
|
||||
dataParams.add(cursorId);
|
||||
}
|
||||
|
||||
String dataWhere = String.join(" AND ", dataConditions);
|
||||
String orderDir = "asc".equalsIgnoreCase(request.sort()) ? "ASC" : "DESC";
|
||||
int fetchLimit = request.limit() + 1; // fetch N+1 to detect hasMore
|
||||
|
||||
String dataSql = "SELECT timestamp, level, logger_name, message, thread_name, stack_trace, " +
|
||||
"exchange_id, instance_id, application, mdc, source " +
|
||||
String dataSql = "SELECT formatDateTime(timestamp, '%Y-%m-%dT%H:%i:%S', 'UTC') AS ts_utc," +
|
||||
" toUnixTimestamp64Milli(timestamp) AS ts_millis," +
|
||||
" level, logger_name, message, thread_name, stack_trace, " +
|
||||
"exchange_id, instance_id, application, mdc, source, toString(insert_id) AS insert_id_str " +
|
||||
"FROM logs WHERE " + dataWhere +
|
||||
" ORDER BY timestamp " + orderDir + " LIMIT ?";
|
||||
" ORDER BY timestamp " + orderDir + ", insert_id " + orderDir + " LIMIT ?";
|
||||
dataParams.add(fetchLimit);
|
||||
|
||||
List<String> insertIds = new ArrayList<>();
|
||||
List<LogEntryResult> results = jdbc.query(dataSql, dataParams.toArray(), (rs, rowNum) -> {
|
||||
Timestamp ts = rs.getTimestamp("timestamp");
|
||||
String timestampStr = ts != null
|
||||
? ts.toInstant().atOffset(ZoneOffset.UTC).format(ISO_FMT)
|
||||
: null;
|
||||
long tsMillis = rs.getLong("ts_millis");
|
||||
String timestampStr = Instant.ofEpochMilli(tsMillis).toString();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> mdc = (Map<String, String>) rs.getObject("mdc");
|
||||
@@ -209,6 +223,8 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
|
||||
String source = rs.getString("source");
|
||||
|
||||
insertIds.add(rs.getString("insert_id_str"));
|
||||
|
||||
return new LogEntryResult(
|
||||
timestampStr,
|
||||
rs.getString("level"),
|
||||
@@ -231,7 +247,10 @@ public class ClickHouseLogStore implements LogIndex {
|
||||
|
||||
String nextCursor = null;
|
||||
if (hasMore && !results.isEmpty()) {
|
||||
nextCursor = results.get(results.size() - 1).timestamp();
|
||||
int lastIdx = results.size() - 1;
|
||||
String raw = results.get(lastIdx).timestamp() + "|" + insertIds.get(lastIdx);
|
||||
nextCursor = Base64.getUrlEncoder().withoutPadding()
|
||||
.encodeToString(raw.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
return new LogSearchResponse(results, nextCursor, hasMore, levelCounts);
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
package com.cameleer.server.app.storage;
|
||||
|
||||
import com.cameleer.server.core.agent.AgentEventPage;
|
||||
import com.cameleer.server.core.agent.AgentEventRecord;
|
||||
import com.cameleer.server.core.agent.AgentEventRepository;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -21,7 +24,7 @@ public class ClickHouseAgentEventRepository implements AgentEventRepository {
|
||||
"INSERT INTO agent_events (tenant_id, instance_id, application_id, environment, event_type, detail) VALUES (?, ?, ?, ?, ?, ?)";
|
||||
|
||||
private static final String SELECT_BASE =
|
||||
"SELECT 0 AS id, instance_id, application_id, event_type, detail, timestamp FROM agent_events WHERE tenant_id = ?";
|
||||
"SELECT 0 AS id, instance_id, application_id, event_type, detail, timestamp, toString(insert_id) AS insert_id_str FROM agent_events WHERE tenant_id = ?";
|
||||
|
||||
private final String tenantId;
|
||||
private final JdbcTemplate jdbc;
|
||||
@@ -38,41 +41,69 @@ public class ClickHouseAgentEventRepository implements AgentEventRepository {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AgentEventRecord> query(String applicationId, String instanceId, String environment, Instant from, Instant to, int limit) {
|
||||
public AgentEventPage queryPage(String applicationId, String instanceId, String environment,
|
||||
Instant from, Instant to, String cursor, int limit) {
|
||||
var sql = new StringBuilder(SELECT_BASE);
|
||||
var params = new ArrayList<Object>();
|
||||
params.add(tenantId);
|
||||
|
||||
if (applicationId != null) {
|
||||
sql.append(" AND application_id = ?");
|
||||
params.add(applicationId);
|
||||
}
|
||||
if (instanceId != null) {
|
||||
sql.append(" AND instance_id = ?");
|
||||
params.add(instanceId);
|
||||
}
|
||||
if (environment != null) {
|
||||
sql.append(" AND environment = ?");
|
||||
params.add(environment);
|
||||
}
|
||||
if (from != null) {
|
||||
sql.append(" AND timestamp >= ?");
|
||||
params.add(Timestamp.from(from));
|
||||
}
|
||||
if (to != null) {
|
||||
sql.append(" AND timestamp < ?");
|
||||
params.add(Timestamp.from(to));
|
||||
}
|
||||
sql.append(" ORDER BY timestamp DESC LIMIT ?");
|
||||
params.add(limit);
|
||||
if (applicationId != null) { sql.append(" AND application_id = ?"); params.add(applicationId); }
|
||||
if (instanceId != null) { sql.append(" AND instance_id = ?"); params.add(instanceId); }
|
||||
if (environment != null) { sql.append(" AND environment = ?"); params.add(environment); }
|
||||
if (from != null) { sql.append(" AND timestamp >= ?"); params.add(Timestamp.from(from)); }
|
||||
if (to != null) { sql.append(" AND timestamp < ?"); params.add(Timestamp.from(to)); }
|
||||
|
||||
return jdbc.query(sql.toString(), (rs, rowNum) -> new AgentEventRecord(
|
||||
rs.getLong("id"),
|
||||
rs.getString("instance_id"),
|
||||
rs.getString("application_id"),
|
||||
rs.getString("event_type"),
|
||||
rs.getString("detail"),
|
||||
rs.getTimestamp("timestamp").toInstant()
|
||||
), params.toArray());
|
||||
if (cursor != null && !cursor.isEmpty()) {
|
||||
String decoded = new String(Base64.getUrlDecoder().decode(cursor), StandardCharsets.UTF_8);
|
||||
int bar = decoded.indexOf('|');
|
||||
if (bar <= 0 || bar == decoded.length() - 1) {
|
||||
throw new IllegalArgumentException("Malformed cursor");
|
||||
}
|
||||
Instant cursorTs;
|
||||
try {
|
||||
cursorTs = Instant.parse(decoded.substring(0, bar));
|
||||
} catch (java.time.format.DateTimeParseException e) {
|
||||
throw new IllegalArgumentException("Malformed cursor", e);
|
||||
}
|
||||
String cursorInsertId = decoded.substring(bar + 1);
|
||||
sql.append(" AND (timestamp < ? OR (timestamp = ? AND insert_id < toUUID(?)))");
|
||||
params.add(Timestamp.from(cursorTs));
|
||||
params.add(Timestamp.from(cursorTs));
|
||||
params.add(cursorInsertId);
|
||||
}
|
||||
|
||||
sql.append(" ORDER BY timestamp DESC, insert_id DESC LIMIT ?");
|
||||
int fetchLimit = limit + 1;
|
||||
params.add(fetchLimit);
|
||||
|
||||
List<String> insertIds = new ArrayList<>();
|
||||
List<AgentEventRecord> results = new ArrayList<>(jdbc.query(sql.toString(),
|
||||
(rs, rowNum) -> {
|
||||
insertIds.add(rs.getString("insert_id_str"));
|
||||
return new AgentEventRecord(
|
||||
rs.getLong("id"),
|
||||
rs.getString("instance_id"),
|
||||
rs.getString("application_id"),
|
||||
rs.getString("event_type"),
|
||||
rs.getString("detail"),
|
||||
rs.getTimestamp("timestamp").toInstant()
|
||||
);
|
||||
}, params.toArray()));
|
||||
|
||||
boolean hasMore = results.size() > limit;
|
||||
if (hasMore) {
|
||||
results = new ArrayList<>(results.subList(0, limit));
|
||||
}
|
||||
|
||||
String nextCursor = null;
|
||||
if (hasMore && !results.isEmpty()) {
|
||||
int lastIdx = results.size() - 1;
|
||||
AgentEventRecord last = results.get(lastIdx);
|
||||
String raw = last.timestamp().toString() + "|" + insertIds.get(lastIdx);
|
||||
nextCursor = Base64.getUrlEncoder().withoutPadding()
|
||||
.encodeToString(raw.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
return new AgentEventPage(results, nextCursor, hasMore);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -327,7 +327,8 @@ CREATE TABLE IF NOT EXISTS agent_events (
|
||||
instance_id LowCardinality(String),
|
||||
application_id LowCardinality(String),
|
||||
event_type LowCardinality(String),
|
||||
detail String DEFAULT ''
|
||||
detail String DEFAULT '',
|
||||
insert_id UUID DEFAULT generateUUIDv4()
|
||||
)
|
||||
ENGINE = MergeTree()
|
||||
PARTITION BY (tenant_id, toYYYYMM(timestamp))
|
||||
@@ -349,6 +350,7 @@ CREATE TABLE IF NOT EXISTS logs (
|
||||
stack_trace String DEFAULT '',
|
||||
exchange_id String DEFAULT '',
|
||||
mdc Map(String, String) DEFAULT map(),
|
||||
insert_id UUID DEFAULT generateUUIDv4(),
|
||||
|
||||
INDEX idx_msg message TYPE ngrambf_v1(3, 256, 2, 0) GRANULARITY 4,
|
||||
INDEX idx_stack stack_trace TYPE ngrambf_v1(3, 256, 2, 0) GRANULARITY 4,
|
||||
@@ -398,3 +400,12 @@ CREATE TABLE IF NOT EXISTS route_catalog (
|
||||
)
|
||||
ENGINE = ReplacingMergeTree(last_seen)
|
||||
ORDER BY (tenant_id, environment, application_id, route_id);
|
||||
|
||||
-- insert_id tiebreak for keyset pagination (fixes same-millisecond cursor collision).
|
||||
-- IF NOT EXISTS on ADD COLUMN is idempotent. MATERIALIZE COLUMN is a background mutation,
|
||||
-- effectively a no-op once all parts are already materialized.
|
||||
ALTER TABLE logs ADD COLUMN IF NOT EXISTS insert_id UUID DEFAULT generateUUIDv4();
|
||||
ALTER TABLE logs MATERIALIZE COLUMN insert_id;
|
||||
|
||||
ALTER TABLE agent_events ADD COLUMN IF NOT EXISTS insert_id UUID DEFAULT generateUUIDv4();
|
||||
ALTER TABLE agent_events MATERIALIZE COLUMN insert_id;
|
||||
|
||||
@@ -323,4 +323,78 @@ class ClickHouseLogStoreIT {
|
||||
String.class);
|
||||
assertThat(customVal).isEqualTo("custom-value");
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_bySources_singleValue_filtersCorrectly() {
|
||||
Instant now = Instant.parse("2026-03-31T12:00:00Z");
|
||||
// "source" column is populated by indexBatch via LogEntry.getSource(); default is "app" when null.
|
||||
// Force one row to "container" via a direct insert to avoid coupling to LogEntry constructor.
|
||||
store.indexBatch("agent-1", "my-app", List.of(
|
||||
entry(now, "INFO", "logger", "app msg", "t1", null, null)
|
||||
));
|
||||
jdbc.update("INSERT INTO logs (tenant_id, environment, timestamp, application, instance_id, level, " +
|
||||
"logger_name, message, thread_name, stack_trace, exchange_id, mdc, source) VALUES " +
|
||||
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"default", "default", java.sql.Timestamp.from(now.plusSeconds(1)), "my-app", "agent-1",
|
||||
"INFO", "logger", "container msg", "t1", "", "", java.util.Map.of(), "container");
|
||||
|
||||
LogSearchResponse result = store.search(new LogSearchRequest(
|
||||
null, null, "my-app", null, null, null, null,
|
||||
List.of("container"), null, null, null, 100, "desc"));
|
||||
|
||||
assertThat(result.data()).hasSize(1);
|
||||
assertThat(result.data().get(0).message()).isEqualTo("container msg");
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_bySources_multiValue_joinsAsOr() {
|
||||
Instant now = Instant.parse("2026-03-31T12:00:00Z");
|
||||
store.indexBatch("agent-1", "my-app", List.of(
|
||||
entry(now, "INFO", "logger", "app msg", "t1", null, null)
|
||||
));
|
||||
jdbc.update("INSERT INTO logs (tenant_id, environment, timestamp, application, instance_id, level, " +
|
||||
"logger_name, message, thread_name, stack_trace, exchange_id, mdc, source) VALUES " +
|
||||
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"default", "default", java.sql.Timestamp.from(now.plusSeconds(1)), "my-app", "agent-1",
|
||||
"INFO", "logger", "container msg", "t1", "", "", java.util.Map.of(), "container");
|
||||
jdbc.update("INSERT INTO logs (tenant_id, environment, timestamp, application, instance_id, level, " +
|
||||
"logger_name, message, thread_name, stack_trace, exchange_id, mdc, source) VALUES " +
|
||||
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"default", "default", java.sql.Timestamp.from(now.plusSeconds(2)), "my-app", "agent-1",
|
||||
"INFO", "logger", "agent msg", "t1", "", "", java.util.Map.of(), "agent");
|
||||
|
||||
LogSearchResponse result = store.search(new LogSearchRequest(
|
||||
null, null, "my-app", null, null, null, null,
|
||||
List.of("app", "container"), null, null, null, 100, "desc"));
|
||||
|
||||
assertThat(result.data()).hasSize(2);
|
||||
assertThat(result.data()).extracting(LogEntryResult::message)
|
||||
.containsExactlyInAnyOrder("app msg", "container msg");
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_cursorPagination_sameMillisecond_doesNotSkip() {
|
||||
Instant ts = Instant.parse("2026-04-17T10:00:00Z");
|
||||
// Insert 5 rows at the exact same timestamp
|
||||
java.util.List<LogEntry> batch = new java.util.ArrayList<>();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
batch.add(entry(ts, "INFO", "logger", "msg-" + i, "t1", null, null));
|
||||
}
|
||||
store.indexBatch("agent-1", "my-app", batch);
|
||||
|
||||
// Page through with limit 2; across 3 pages we must see all 5 distinct messages, no duplicates
|
||||
java.util.Set<String> seen = new java.util.HashSet<>();
|
||||
String cursor = null;
|
||||
for (int page = 0; page < 10; page++) {
|
||||
LogSearchResponse resp = store.search(new LogSearchRequest(
|
||||
null, null, "my-app", null, null, null, null, null,
|
||||
null, null, cursor, 2, "desc"));
|
||||
for (LogEntryResult r : resp.data()) {
|
||||
assertThat(seen.add(r.message())).as("duplicate row returned: " + r.message()).isTrue();
|
||||
}
|
||||
cursor = resp.nextCursor();
|
||||
if (!resp.hasMore()) break;
|
||||
}
|
||||
assertThat(seen).containsExactlyInAnyOrder("msg-0", "msg-1", "msg-2", "msg-3", "msg-4");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.cameleer.server.app.storage;
|
||||
|
||||
import com.cameleer.server.core.agent.AgentEventRecord;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -10,10 +9,8 @@ import org.testcontainers.clickhouse.ClickHouseContainer;
|
||||
import org.testcontainers.junit.jupiter.Container;
|
||||
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@@ -66,91 +63,97 @@ class ClickHouseAgentEventRepositoryIT {
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_byAppId_filtersCorrectly() {
|
||||
repo.insert("agent-1", "app-x", "default", "CONNECTED", "");
|
||||
repo.insert("agent-2", "app-y", "default", "DISCONNECTED", "");
|
||||
|
||||
List<AgentEventRecord> results = repo.query("app-x", null, null, null, null, 100);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).applicationId()).isEqualTo("app-x");
|
||||
assertThat(results.get(0).instanceId()).isEqualTo("agent-1");
|
||||
void queryPage_emptyTable_returnsEmptyPage() {
|
||||
com.cameleer.server.core.agent.AgentEventPage page =
|
||||
repo.queryPage(null, null, null, null, null, null, 10);
|
||||
assertThat(page.data()).isEmpty();
|
||||
assertThat(page.hasMore()).isFalse();
|
||||
assertThat(page.nextCursor()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_byAgentId_filtersCorrectly() {
|
||||
repo.insert("agent-alpha", "app-shared", "default", "CONNECTED", "");
|
||||
repo.insert("agent-beta", "app-shared", "default", "CONNECTED", "");
|
||||
|
||||
List<AgentEventRecord> results = repo.query(null, "agent-alpha", null, null, null, 100);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).instanceId()).isEqualTo("agent-alpha");
|
||||
void queryPage_boundary_noHasMoreWhenLimitEqualsRowCount() {
|
||||
Instant base = Instant.parse("2026-04-01T10:00:00Z");
|
||||
for (int i = 0; i < 3; i++) {
|
||||
insertAt("agent-1", "app-a", "TICK", "t" + i, base.plusSeconds(i));
|
||||
}
|
||||
com.cameleer.server.core.agent.AgentEventPage page =
|
||||
repo.queryPage(null, null, null, null, null, null, 3);
|
||||
assertThat(page.data()).hasSize(3);
|
||||
assertThat(page.hasMore()).isFalse();
|
||||
assertThat(page.nextCursor()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_byTimeRange_filtersCorrectly() {
|
||||
Instant t1 = Instant.parse("2026-01-01T10:00:00Z");
|
||||
Instant t2 = Instant.parse("2026-01-01T11:00:00Z");
|
||||
Instant t3 = Instant.parse("2026-01-01T12:00:00Z");
|
||||
|
||||
insertAt("agent-1", "app-a", "CONNECTED", "early", t1);
|
||||
insertAt("agent-1", "app-a", "HEARTBEAT", "mid", t2);
|
||||
insertAt("agent-1", "app-a", "DISCONNECTED", "late", t3);
|
||||
|
||||
// Query [t2, t3) — should return only the middle event
|
||||
List<AgentEventRecord> results = repo.query(null, null, null, t2, t3, 100);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).eventType()).isEqualTo("HEARTBEAT");
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_respectsLimit() {
|
||||
Instant base = Instant.parse("2026-02-01T00:00:00Z");
|
||||
for (int i = 0; i < 10; i++) {
|
||||
insertAt("agent-1", "app-a", "HEARTBEAT", "beat-" + i, base.plusSeconds(i));
|
||||
void queryPage_paginatesAcrossThreePages() {
|
||||
Instant base = Instant.parse("2026-04-01T10:00:00Z");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
insertAt("agent-1", "app-a", "TICK", "t" + i, base.plusSeconds(i));
|
||||
}
|
||||
|
||||
List<AgentEventRecord> results = repo.query(null, null, null, null, null, 3);
|
||||
com.cameleer.server.core.agent.AgentEventPage p1 =
|
||||
repo.queryPage(null, null, null, null, null, null, 2);
|
||||
assertThat(p1.data()).hasSize(2);
|
||||
assertThat(p1.hasMore()).isTrue();
|
||||
assertThat(p1.nextCursor()).isNotBlank();
|
||||
assertThat(p1.data().get(0).detail()).isEqualTo("t4");
|
||||
assertThat(p1.data().get(1).detail()).isEqualTo("t3");
|
||||
|
||||
assertThat(results).hasSize(3);
|
||||
com.cameleer.server.core.agent.AgentEventPage p2 =
|
||||
repo.queryPage(null, null, null, null, null, p1.nextCursor(), 2);
|
||||
assertThat(p2.data()).hasSize(2);
|
||||
assertThat(p2.hasMore()).isTrue();
|
||||
assertThat(p2.data().get(0).detail()).isEqualTo("t2");
|
||||
assertThat(p2.data().get(1).detail()).isEqualTo("t1");
|
||||
|
||||
com.cameleer.server.core.agent.AgentEventPage p3 =
|
||||
repo.queryPage(null, null, null, null, null, p2.nextCursor(), 2);
|
||||
assertThat(p3.data()).hasSize(1);
|
||||
assertThat(p3.hasMore()).isFalse();
|
||||
assertThat(p3.nextCursor()).isNull();
|
||||
assertThat(p3.data().get(0).detail()).isEqualTo("t0");
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_returnsZeroId() {
|
||||
repo.insert("agent-1", "app-a", "default", "CONNECTED", "");
|
||||
void queryPage_tiebreak_sameMillisecond_returnsAllRowsNoDuplicates() {
|
||||
Instant ts = Instant.parse("2026-04-01T10:00:00Z");
|
||||
insertAt("agent-a", "app-a", "TICK", "a", ts);
|
||||
insertAt("agent-b", "app-a", "TICK", "b", ts);
|
||||
insertAt("agent-c", "app-a", "TICK", "c", ts);
|
||||
|
||||
List<AgentEventRecord> results = repo.query(null, null, null, null, null, 10);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).id()).isEqualTo(0L);
|
||||
java.util.Set<String> seen = new java.util.HashSet<>();
|
||||
String cursor = null;
|
||||
for (int page = 0; page < 10; page++) {
|
||||
com.cameleer.server.core.agent.AgentEventPage p =
|
||||
repo.queryPage(null, null, null, null, null, cursor, 1);
|
||||
for (com.cameleer.server.core.agent.AgentEventRecord r : p.data()) {
|
||||
assertThat(seen.add(r.instanceId())).as("duplicate row returned: " + r.instanceId()).isTrue();
|
||||
}
|
||||
cursor = p.nextCursor();
|
||||
if (!p.hasMore()) break;
|
||||
}
|
||||
assertThat(seen).containsExactlyInAnyOrder("agent-a", "agent-b", "agent-c");
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_noFilters_returnsAllEvents() {
|
||||
repo.insert("agent-1", "app-a", "default", "CONNECTED", "");
|
||||
repo.insert("agent-2", "app-b", "default", "DISCONNECTED", "");
|
||||
void queryPage_malformedCursor_invalidTimestamp_throws() {
|
||||
String raw = "not-a-timestamp|agent-1";
|
||||
String cursor = java.util.Base64.getUrlEncoder().withoutPadding()
|
||||
.encodeToString(raw.getBytes(java.nio.charset.StandardCharsets.UTF_8));
|
||||
|
||||
List<AgentEventRecord> results = repo.query(null, null, null, null, null, 100);
|
||||
|
||||
assertThat(results).hasSize(2);
|
||||
org.junit.jupiter.api.Assertions.assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> repo.queryPage(null, null, null, null, null, cursor, 10));
|
||||
}
|
||||
|
||||
@Test
|
||||
void query_resultsOrderedByTimestampDesc() {
|
||||
Instant t1 = Instant.parse("2026-03-01T08:00:00Z");
|
||||
Instant t2 = Instant.parse("2026-03-01T09:00:00Z");
|
||||
Instant t3 = Instant.parse("2026-03-01T10:00:00Z");
|
||||
void queryPage_malformedCursor_emptyInsertId_throws() {
|
||||
String raw = "2026-04-01T10:00:00Z|";
|
||||
String cursor = java.util.Base64.getUrlEncoder().withoutPadding()
|
||||
.encodeToString(raw.getBytes(java.nio.charset.StandardCharsets.UTF_8));
|
||||
|
||||
insertAt("agent-1", "app-a", "FIRST", "", t1);
|
||||
insertAt("agent-1", "app-a", "SECOND", "", t2);
|
||||
insertAt("agent-1", "app-a", "THIRD", "", t3);
|
||||
|
||||
List<AgentEventRecord> results = repo.query(null, null, null, null, null, 100);
|
||||
|
||||
assertThat(results.get(0).eventType()).isEqualTo("THIRD");
|
||||
assertThat(results.get(1).eventType()).isEqualTo("SECOND");
|
||||
assertThat(results.get(2).eventType()).isEqualTo("FIRST");
|
||||
org.junit.jupiter.api.Assertions.assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> repo.queryPage(null, null, null, null, null, cursor, 10));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package com.cameleer.server.core.agent;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Cursor-paginated result page for agent event queries.
|
||||
*
|
||||
* @param data events on this page, ordered newest-first
|
||||
* @param nextCursor opaque cursor to pass back for the next page (null when no more)
|
||||
* @param hasMore whether more results exist beyond this page
|
||||
*/
|
||||
public record AgentEventPage(
|
||||
List<AgentEventRecord> data,
|
||||
String nextCursor,
|
||||
boolean hasMore
|
||||
) {}
|
||||
@@ -1,11 +1,16 @@
|
||||
package com.cameleer.server.core.agent;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
public interface AgentEventRepository {
|
||||
|
||||
void insert(String instanceId, String applicationId, String environment, String eventType, String detail);
|
||||
|
||||
List<AgentEventRecord> query(String applicationId, String instanceId, String environment, Instant from, Instant to, int limit);
|
||||
/**
|
||||
* Cursor-paginated query ordered by (timestamp DESC, instance_id ASC). The cursor
|
||||
* is an opaque base64 string produced by the implementation; pass {@code null} for
|
||||
* the first page.
|
||||
*/
|
||||
AgentEventPage queryPage(String applicationId, String instanceId, String environment,
|
||||
Instant from, Instant to, String cursor, int limit);
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
public class AgentEventService {
|
||||
|
||||
@@ -21,7 +20,8 @@ public class AgentEventService {
|
||||
repository.insert(instanceId, applicationId, environment, eventType, detail);
|
||||
}
|
||||
|
||||
public List<AgentEventRecord> queryEvents(String applicationId, String instanceId, String environment, Instant from, Instant to, int limit) {
|
||||
return repository.query(applicationId, instanceId, environment, from, to, limit);
|
||||
public AgentEventPage queryEventPage(String applicationId, String instanceId, String environment,
|
||||
Instant from, Instant to, String cursor, int limit) {
|
||||
return repository.queryPage(applicationId, instanceId, environment, from, to, cursor, limit);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,15 +7,15 @@ import java.util.List;
|
||||
* Immutable search criteria for querying application logs.
|
||||
*
|
||||
* @param q free-text search across message and stack trace
|
||||
* @param levels log level filter (e.g. ["WARN","ERROR"])
|
||||
* @param levels log level filter (e.g. ["WARN","ERROR"]), OR-joined
|
||||
* @param application application ID filter (nullable = all apps)
|
||||
* @param instanceId agent instance ID filter
|
||||
* @param exchangeId Camel exchange ID filter
|
||||
* @param logger logger name substring filter
|
||||
* @param environment optional environment filter (e.g. "dev", "staging", "prod")
|
||||
* @param source optional source filter: "app" or "agent"
|
||||
* @param from inclusive start of time range (required)
|
||||
* @param to inclusive end of time range (required)
|
||||
* @param sources optional source filter (e.g. ["app","container","agent"]), OR-joined
|
||||
* @param from inclusive start of time range
|
||||
* @param to inclusive end of time range
|
||||
* @param cursor ISO timestamp cursor for keyset pagination
|
||||
* @param limit page size (1-500, default 100)
|
||||
* @param sort sort direction: "asc" or "desc" (default "desc")
|
||||
@@ -28,7 +28,7 @@ public record LogSearchRequest(
|
||||
String exchangeId,
|
||||
String logger,
|
||||
String environment,
|
||||
String source,
|
||||
List<String> sources,
|
||||
Instant from,
|
||||
Instant to,
|
||||
String cursor,
|
||||
@@ -44,5 +44,6 @@ public record LogSearchRequest(
|
||||
if (limit > MAX_LIMIT) limit = MAX_LIMIT;
|
||||
if (sort == null || !"asc".equalsIgnoreCase(sort)) sort = "desc";
|
||||
if (levels == null) levels = List.of();
|
||||
if (sources == null) sources = List.of();
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,252 @@
|
||||
# Streaming Views: Multi-Select Filters + Infinite Scroll
|
||||
|
||||
**Date:** 2026-04-17
|
||||
**Status:** Draft
|
||||
|
||||
## Problem
|
||||
|
||||
In the Runtime page's **Application Log**, the `App / Agent / Container` source buttons are single-select, and the **Level** buttons are multi-select but filtered **client-side**. Both are flawed for the same reason: when one source or level floods the result set (e.g. hundreds of INFO-level app logs), less-frequent entries (container logs, ERROR lines) can be pushed past the fetched page and never appear.
|
||||
|
||||
The page is also a single `useQuery` that fetches at most `limit` rows. There is no way to browse older logs.
|
||||
|
||||
The **Timeline** (agent events) has the same single-page limitation.
|
||||
|
||||
## Goal
|
||||
|
||||
Establish a consistent pattern for streaming views (log-like lists + event feeds):
|
||||
|
||||
1. **Multi-select server-side filters** joined with `OR` — the server decides what's included, so no category is ever silently starved.
|
||||
2. **Infinite scroll** via cursor pagination — user scrolls down to load older entries.
|
||||
3. **Top-gated auto-refetch** — polling only runs while the user is viewing the newest entries; when they scroll away to inspect history, polling pauses so their viewport cannot shift under them.
|
||||
4. **Client-side text filter** only — text search filters pages already loaded without re-querying.
|
||||
|
||||
Apply this pattern to:
|
||||
- `AgentHealth.tsx` — Application Log + Timeline
|
||||
- `AgentInstance.tsx` — Application Log + Timeline
|
||||
- Shared components/hooks so future streaming views reuse the same primitives.
|
||||
|
||||
Out of scope for infinite scroll (bounded data, don't need it):
|
||||
- `LogTab.tsx` — per-exchange logs (bounded, one exchange, already capped at 500)
|
||||
- `StartupLogPanel.tsx` — container logs during a single deployment startup
|
||||
|
||||
These two keep `useLogs` / `useStartupLogs` as-is. They will benefit automatically if `LogEntry.source` gets pushed through to `LogViewer` for badge rendering (already supported by DS).
|
||||
|
||||
## Design
|
||||
|
||||
### 1. Backend: multi-value `source`
|
||||
|
||||
The log endpoint already supports comma-split `level`. Mirror that for `source`.
|
||||
|
||||
**`cameleer-server-core/.../search/LogSearchRequest.java`**
|
||||
- Change `String source` → `List<String> sources` (default empty list).
|
||||
|
||||
**`cameleer-server-app/.../controller/LogQueryController.java`**
|
||||
- `@RequestParam(required = false) String source` stays.
|
||||
- Parse same way as `level` (comma-split, trim, drop blanks) into `List<String>`.
|
||||
|
||||
**`cameleer-server-app/.../search/ClickHouseLogStore.java`**
|
||||
- Replace:
|
||||
```
|
||||
if (request.source() != null && !request.source().isEmpty()) {
|
||||
baseConditions.add("source = ?");
|
||||
baseParams.add(request.source());
|
||||
}
|
||||
```
|
||||
with a list-driven `source IN (?, ?, …)` built when `request.sources()` is non-empty.
|
||||
|
||||
No schema change. No migration.
|
||||
|
||||
### 2. Backend: cursor pagination for agent events
|
||||
|
||||
Agent events currently return `List<AgentEventResponse>` with `limit` (default 50). No cursor. To support infinite scroll we need stable desc ordering + a cursor.
|
||||
|
||||
Events have no stable ID (the ClickHouse repository synthesizes `id = 0`). The stable ordering is `(timestamp DESC, instance_id ASC)`. Cursor encodes that tuple as an opaque base64 string, matching how `ClickHouseLogStore` builds log cursors.
|
||||
|
||||
**`cameleer-server-core/.../agent/AgentEventService.java` / `AgentEventRepository.java`**
|
||||
- Add overload `queryEvents(appId, agentId, env, from, to, cursor, limit)` returning a `AgentEventPage { data, nextCursor, hasMore }`.
|
||||
- Keep the existing un-cursored `queryEvents` if used elsewhere (audit usage; if not, delete it).
|
||||
|
||||
**`cameleer-server-app/.../storage/ClickHouseAgentEventRepository.java`**
|
||||
- New method: on cursor, decode `(cursorTs, cursorInstance)` and add `(timestamp, instance_id) < (?, ?)` predicate.
|
||||
- Always `ORDER BY timestamp DESC, instance_id ASC`.
|
||||
- Always fetch `limit + 1` rows; last row determines `hasMore` + `nextCursor`.
|
||||
|
||||
**`cameleer-server-app/.../controller/AgentEventsController.java`**
|
||||
- Accept optional `?cursor=`; return `AgentEventPageResponse { data, nextCursor, hasMore }` instead of a bare list.
|
||||
|
||||
### 3. Shared UI primitives
|
||||
|
||||
Two thin primitives. No new design-system package; live under `ui/src/components/` and `ui/src/hooks/` so they're reused from the app only.
|
||||
|
||||
**`ui/src/hooks/useInfiniteStream.ts`** — thin `useInfiniteQuery` wrapper:
|
||||
|
||||
```ts
|
||||
interface StreamPage<T> { data: T[]; nextCursor: string | null; hasMore: boolean }
|
||||
|
||||
interface UseInfiniteStreamArgs<T> {
|
||||
queryKey: unknown[];
|
||||
fetchPage: (cursor: string | undefined) => Promise<StreamPage<T>>;
|
||||
enabled?: boolean;
|
||||
isAtTop: boolean; // drives refetchInterval on/off
|
||||
refetchMs?: number; // default 15000
|
||||
}
|
||||
|
||||
interface UseInfiniteStreamResult<T> {
|
||||
items: T[];
|
||||
fetchNextPage: () => void;
|
||||
hasNextPage: boolean;
|
||||
isFetchingNextPage: boolean;
|
||||
isLoading: boolean;
|
||||
refresh: () => void; // invalidates and re-fetches from page 1
|
||||
}
|
||||
```
|
||||
|
||||
Internals: wraps `useInfiniteQuery` with `getNextPageParam: (last) => last.hasMore ? last.nextCursor : undefined`, flattens pages, exposes `refresh` that calls `queryClient.invalidateQueries(queryKey)`.
|
||||
|
||||
**`ui/src/components/InfiniteScrollArea.tsx`** — scrollable container:
|
||||
|
||||
```tsx
|
||||
interface InfiniteScrollAreaProps {
|
||||
onEndReached: () => void; // call fetchNextPage
|
||||
onTopVisibilityChange: (b: boolean) => void;
|
||||
isFetchingNextPage: boolean;
|
||||
hasNextPage: boolean;
|
||||
maxHeight?: number | string;
|
||||
children: ReactNode;
|
||||
}
|
||||
```
|
||||
|
||||
Renders:
|
||||
```
|
||||
<div ref={scrollRef} style={{ overflowY: 'auto', maxHeight }}>
|
||||
<div ref={topSentinelRef} /> // IntersectionObserver -> onTopVisibilityChange
|
||||
{children}
|
||||
<div ref={bottomSentinelRef} /> // IntersectionObserver -> onEndReached
|
||||
{isFetchingNextPage && <Spinner />}
|
||||
{!hasNextPage && <EndMarker />}
|
||||
</div>
|
||||
```
|
||||
|
||||
A single `IntersectionObserver` per sentinel with `rootMargin: '100px'` for the bottom so the next page is prefetched before the user hits the literal bottom.
|
||||
|
||||
### 4. New hooks
|
||||
|
||||
**`ui/src/api/queries/logs.ts`**
|
||||
|
||||
Add `useInfiniteApplicationLogs`:
|
||||
|
||||
```ts
|
||||
export function useInfiniteApplicationLogs(args: {
|
||||
application?: string;
|
||||
agentId?: string;
|
||||
sources?: string[]; // multi-select, server-side
|
||||
levels?: string[]; // multi-select, server-side
|
||||
exchangeId?: string;
|
||||
isAtTop: boolean;
|
||||
}): UseInfiniteStreamResult<LogEntryResponse>
|
||||
```
|
||||
|
||||
Under the hood: cursor-paginated calls to `/environments/{env}/logs`; `fetchPage(cursor)` sets `source=a,b`, `level=ERROR,WARN`, and time range from the global filter store.
|
||||
|
||||
Keep `useLogs` and `useApplicationLogs` (the existing single-page wrapper) so `LogTab` and `StartupLogPanel` remain untouched. Mark `useApplicationLogs` internally as "bounded consumers only" via a short TSDoc line.
|
||||
|
||||
**`ui/src/api/queries/agents.ts`**
|
||||
|
||||
Add `useInfiniteAgentEvents`:
|
||||
|
||||
```ts
|
||||
export function useInfiniteAgentEvents(args: {
|
||||
appId?: string;
|
||||
agentId?: string;
|
||||
isAtTop: boolean;
|
||||
}): UseInfiniteStreamResult<AgentEventResponse>
|
||||
```
|
||||
|
||||
Existing `useAgentEvents` is only used in `AgentHealth`/`AgentInstance` for the Timeline. It gets replaced by the new hook — no other consumers, no backwards-compat shim.
|
||||
|
||||
### 5. Page wiring — `AgentHealth.tsx` and `AgentInstance.tsx`
|
||||
|
||||
**Application Log block:**
|
||||
|
||||
- State
|
||||
- `logSources: Set<string>` (new, replaces `logSource: string`).
|
||||
- `logLevels: Set<string>` (unchanged shape, now passed through as `levels`).
|
||||
- `logSearch: string` (unchanged, client-side).
|
||||
- `logSortAsc: boolean` (unchanged).
|
||||
- `isLogAtTop: boolean` (new, from `InfiniteScrollArea`).
|
||||
|
||||
- Data
|
||||
- `const { items, fetchNextPage, hasNextPage, isFetchingNextPage, refresh } = useInfiniteApplicationLogs({ application: appId, agentId, sources: [...logSources], levels: [...logLevels], isAtTop: isLogAtTop });`
|
||||
|
||||
- Map → `LogEntry[]`
|
||||
- Same mapping as today plus `source: l.source ?? undefined` so `LogViewer`'s source badge lights up.
|
||||
|
||||
- Client-side
|
||||
- Text filter (`logSearch`) applied after flattening.
|
||||
- Sort reversal (`logSortAsc`) applied after.
|
||||
|
||||
- ButtonGroup wiring
|
||||
- `<ButtonGroup items={LOG_SOURCE_ITEMS} value={logSources} onChange={setLogSources} />`
|
||||
- Level `<ButtonGroup>` unchanged (`value={logLevels}`), but `onChange` now implicitly re-queries.
|
||||
- A "Clear" button next to each group when the set is non-empty (source has 3 options; still low-cost and symmetric with level).
|
||||
|
||||
- Layout
|
||||
- `<LogViewer entries={filteredLogs} />` (no `maxHeight`) wrapped in `<InfiniteScrollArea maxHeight={360} ...>`.
|
||||
|
||||
**Timeline block:**
|
||||
|
||||
- `const { items, fetchNextPage, hasNextPage, isFetchingNextPage, refresh } = useInfiniteAgentEvents({ appId, isAtTop: isTimelineAtTop });`
|
||||
- Map to `FeedEvent[]` (same transform as today).
|
||||
- `<EventFeed events={feedEvents} />` inside `<InfiniteScrollArea>`.
|
||||
- Timeline has no filters today; none added in this spec.
|
||||
|
||||
**Refresh buttons:**
|
||||
Both log and timeline `<RefreshCw>` buttons call `refresh()` from the stream hook. `logRefreshTo`/`eventRefreshTo` states are retired.
|
||||
|
||||
### 6. Auto-refetch gating semantics
|
||||
|
||||
- `InfiniteScrollArea` fires `onTopVisibilityChange(true)` when the top sentinel is fully in view, `false` when it scrolls out. Implemented via `IntersectionObserver` with threshold `1.0`.
|
||||
- `useInfiniteStream` reads `isAtTop` and sets `refetchInterval: isAtTop ? refetchMs : false`. Tanstack re-evaluates on every render, so toggling the prop takes effect on the next cycle.
|
||||
- Manual `refresh()` always works and scrolls back to top (scroll reset is owned by the page, calling `scrollRef.current?.scrollTo({ top: 0 })` after the refresh settles — wired in `AgentHealth` / `AgentInstance`).
|
||||
- No viewport-preservation logic. The contract "refetch must not move the user's viewport" is satisfied by disabling refetch while the user is scrolled away.
|
||||
|
||||
### 7. Bounded views — minimal changes
|
||||
|
||||
- `LogTab.tsx`: map `e.source` into the rendered `LogEntry` so source badges appear. No other changes.
|
||||
- `StartupLogPanel.tsx`: no change; startup-log rows are container-sourced by definition.
|
||||
|
||||
### 8. OpenAPI regeneration
|
||||
|
||||
After controller/DTO changes:
|
||||
```
|
||||
cd ui && npm run generate-api:live
|
||||
```
|
||||
Commit the resulting `openapi.json` + `schema.d.ts` in the same change. TypeScript will surface any SPA call sites that need adjustment; fix all of them before testing in the browser.
|
||||
|
||||
### 9. `.claude/rules/` updates
|
||||
|
||||
- `app-classes.md` — `LogQueryController` entry: add "multi-value `source` (comma-split)"; `AgentEventsController` entry: add "cursor-paginated, returns `{ data, nextCursor, hasMore }`".
|
||||
- `ui.md` — add entry for `InfiniteScrollArea` component and note the "streaming views use `useInfiniteStream` + `InfiniteScrollArea`" convention.
|
||||
|
||||
## Testing
|
||||
|
||||
- **Backend unit**: `ClickHouseLogStoreTest` for `source IN (...)` predicate (single, multi, empty). `ClickHouseAgentEventRepositoryTest` for cursor ordering and `hasMore` boundary.
|
||||
- **Controller**: `LogQueryControllerTest` (existing) — add `source=app,container` case. `AgentEventsControllerTest` — cursor round-trip.
|
||||
- **UI manual smoke**: scroll long log streams, mix source filters, verify auto-refetch toggles at top, confirm text filter stays local, confirm refresh resets scroll.
|
||||
|
||||
## Risks & mitigations
|
||||
|
||||
| Risk | Mitigation |
|
||||
|---|---|
|
||||
| Event cursor using `(timestamp, instance_id)` — collisions if two events share both | In practice events are recorded one-at-a-time per instance; tuple is stable. Tie-breaker fine as both fields are indexed. |
|
||||
| `useInfiniteQuery` refetch reloads all pages → visible flicker | Use `placeholderData: (prev) => prev` so stale data stays during refetch. Tested pattern already used by `useApplicationLogs`. |
|
||||
| IntersectionObserver on the top sentinel fires both on scroll and on list grow | Debounce via React state, not a ref — state updates coalesce per render. |
|
||||
| DS `LogViewer` may virtualize internally, hiding sentinels | `LogViewer` today has no virtualization (DS 0.1.49 props are minimal). If that changes, move sentinels into the outer scroll container (already the plan). |
|
||||
| Level client→server change breaks existing users with saved filter state | No persisted filter state exists for log levels; this is in-memory only. |
|
||||
|
||||
## Decision log
|
||||
|
||||
- **Server-side for source and level, client-side for text.** Confirmed during design — flooding argument applies symmetrically to source and level.
|
||||
- **Gate auto-refetch on top visibility** instead of preserving viewport. Rationale: viewport preservation requires stable row IDs (logs have timestamp collisions) and is fragile under virtualization. Gating is simple, predictable, and meets the "user never loses their line" constraint.
|
||||
- **Bounded log views (LogTab, StartupLogPanel) keep single-page hooks.** Infinite scroll isn't useful for capped data; shared primitives are available if they ever need it.
|
||||
- **Agent events get cursor pagination**, matching logs. Single-source-of-truth pattern for streaming lists.
|
||||
File diff suppressed because one or more lines are too long
@@ -3,6 +3,7 @@ import { config } from '../../config';
|
||||
import { useAuthStore } from '../../auth/auth-store';
|
||||
import { useEnvironmentStore } from '../environment-store';
|
||||
import { useRefreshInterval } from './use-refresh-interval';
|
||||
import { useInfiniteStream, type UseInfiniteStreamResult } from '../../hooks/useInfiniteStream';
|
||||
|
||||
export function useAgents(status?: string, application?: string) {
|
||||
const environment = useEnvironmentStore((s) => s.environment);
|
||||
@@ -31,18 +32,45 @@ export function useAgents(status?: string, application?: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export function useAgentEvents(appId?: string, agentId?: string, limit = 50, toOverride?: string) {
|
||||
export interface AgentEventResponse {
|
||||
id: number;
|
||||
instanceId: string;
|
||||
applicationId: string;
|
||||
eventType: string;
|
||||
detail: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface AgentEventPageResponse {
|
||||
data: AgentEventResponse[];
|
||||
nextCursor: string | null;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
export interface UseInfiniteAgentEventsArgs {
|
||||
appId?: string;
|
||||
agentId?: string;
|
||||
isAtTop: boolean;
|
||||
pageSize?: number;
|
||||
}
|
||||
|
||||
export function useInfiniteAgentEvents(
|
||||
args: UseInfiniteAgentEventsArgs,
|
||||
): UseInfiniteStreamResult<AgentEventResponse> {
|
||||
const environment = useEnvironmentStore((s) => s.environment);
|
||||
const refetchInterval = useRefreshInterval(15_000);
|
||||
return useQuery({
|
||||
queryKey: ['agents', 'events', environment, appId, agentId, limit, toOverride],
|
||||
queryFn: async () => {
|
||||
const pageSize = args.pageSize ?? 50;
|
||||
|
||||
return useInfiniteStream<AgentEventResponse>({
|
||||
queryKey: ['agents', 'events', 'infinite', environment ?? '', args.appId ?? '', args.agentId ?? '', pageSize],
|
||||
enabled: !!environment,
|
||||
isAtTop: args.isAtTop,
|
||||
fetchPage: async (cursor) => {
|
||||
const token = useAuthStore.getState().accessToken;
|
||||
const params = new URLSearchParams();
|
||||
if (appId) params.set('appId', appId);
|
||||
if (agentId) params.set('agentId', agentId);
|
||||
if (toOverride) params.set('to', toOverride);
|
||||
params.set('limit', String(limit));
|
||||
if (args.appId) params.set('appId', args.appId);
|
||||
if (args.agentId) params.set('agentId', args.agentId);
|
||||
if (cursor) params.set('cursor', cursor);
|
||||
params.set('limit', String(pageSize));
|
||||
const res = await fetch(
|
||||
`${config.apiBaseUrl}/environments/${encodeURIComponent(environment!)}/agents/events?${params}`,
|
||||
{
|
||||
@@ -52,9 +80,8 @@ export function useAgentEvents(appId?: string, agentId?: string, limit = 50, toO
|
||||
},
|
||||
});
|
||||
if (!res.ok) throw new Error('Failed to load agent events');
|
||||
return res.json();
|
||||
const page: AgentEventPageResponse = await res.json();
|
||||
return { data: page.data, nextCursor: page.nextCursor, hasMore: page.hasMore };
|
||||
},
|
||||
enabled: !!environment,
|
||||
refetchInterval,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { useAuthStore } from '../../auth/auth-store';
|
||||
import { useRefreshInterval } from './use-refresh-interval';
|
||||
import { useGlobalFilters } from '@cameleer/design-system';
|
||||
import { useEnvironmentStore } from '../environment-store';
|
||||
import { useInfiniteStream, type UseInfiniteStreamResult } from '../../hooks/useInfiniteStream';
|
||||
|
||||
export interface LogEntryResponse {
|
||||
timestamp: string;
|
||||
@@ -157,3 +158,82 @@ export function useStartupLogs(
|
||||
refetchInterval: isStarting ? 3_000 : false,
|
||||
});
|
||||
}
|
||||
|
||||
export interface UseInfiniteApplicationLogsArgs {
|
||||
application?: string;
|
||||
agentId?: string;
|
||||
sources?: string[]; // multi-select, server-side OR
|
||||
levels?: string[]; // multi-select, server-side OR
|
||||
exchangeId?: string;
|
||||
sort?: 'asc' | 'desc';
|
||||
isAtTop: boolean;
|
||||
pageSize?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cursor-paginated log stream. Filters `sources`, `levels`, and the global
|
||||
* time range are applied server-side. Free-text search is applied by the
|
||||
* caller on top of the flattened items.
|
||||
*/
|
||||
export function useInfiniteApplicationLogs(
|
||||
args: UseInfiniteApplicationLogsArgs,
|
||||
): UseInfiniteStreamResult<LogEntryResponse> {
|
||||
const { timeRange } = useGlobalFilters();
|
||||
const selectedEnv = useEnvironmentStore((s) => s.environment);
|
||||
|
||||
const useTimeRange = !args.exchangeId;
|
||||
const fromIso = useTimeRange ? timeRange.start.toISOString() : undefined;
|
||||
const toIso = useTimeRange ? timeRange.end.toISOString() : undefined;
|
||||
|
||||
const sortedSources = (args.sources ?? []).slice().sort();
|
||||
const sortedLevels = (args.levels ?? []).slice().sort();
|
||||
const sourcesParam = sortedSources.join(',');
|
||||
const levelsParam = sortedLevels.join(',');
|
||||
const pageSize = args.pageSize ?? 100;
|
||||
const sort = args.sort ?? 'desc';
|
||||
|
||||
return useInfiniteStream<LogEntryResponse>({
|
||||
queryKey: [
|
||||
'logs', 'infinite',
|
||||
selectedEnv ?? '',
|
||||
args.application ?? '',
|
||||
args.agentId ?? '',
|
||||
args.exchangeId ?? '',
|
||||
sourcesParam,
|
||||
levelsParam,
|
||||
fromIso ?? '',
|
||||
toIso ?? '',
|
||||
pageSize,
|
||||
sort,
|
||||
],
|
||||
enabled: !!args.application && !!selectedEnv,
|
||||
isAtTop: args.isAtTop,
|
||||
fetchPage: async (cursor) => {
|
||||
const token = useAuthStore.getState().accessToken;
|
||||
const qp = new URLSearchParams();
|
||||
if (args.application) qp.set('application', args.application);
|
||||
if (args.agentId) qp.set('agentId', args.agentId);
|
||||
if (args.exchangeId) qp.set('exchangeId', args.exchangeId);
|
||||
if (sourcesParam) qp.set('source', sourcesParam);
|
||||
if (levelsParam) qp.set('level', levelsParam);
|
||||
if (fromIso) qp.set('from', fromIso);
|
||||
if (toIso) qp.set('to', toIso);
|
||||
if (cursor) qp.set('cursor', cursor);
|
||||
qp.set('limit', String(pageSize));
|
||||
qp.set('sort', sort);
|
||||
|
||||
const res = await fetch(
|
||||
`${config.apiBaseUrl}/environments/${encodeURIComponent(selectedEnv ?? '')}/logs?${qp}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
'X-Cameleer-Protocol-Version': '1',
|
||||
},
|
||||
},
|
||||
);
|
||||
if (!res.ok) throw new Error('Failed to load logs');
|
||||
const page: LogSearchPageResponse = await res.json();
|
||||
return { data: page.data, nextCursor: page.nextCursor, hasMore: page.hasMore };
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
14
ui/src/api/schema.d.ts
vendored
14
ui/src/api/schema.d.ts
vendored
@@ -1395,7 +1395,7 @@ export interface paths {
|
||||
};
|
||||
/**
|
||||
* Query agent events in this environment
|
||||
* @description Returns agent lifecycle events, optionally filtered by app and/or agent ID
|
||||
* @description Cursor-paginated. Returns newest first. Pass nextCursor back as ?cursor= for the next page.
|
||||
*/
|
||||
get: operations["getEvents"];
|
||||
put?: never;
|
||||
@@ -2309,6 +2309,7 @@ export interface components {
|
||||
routeId: string;
|
||||
instanceId: string;
|
||||
applicationId: string;
|
||||
environment: string;
|
||||
status: string;
|
||||
/** Format: date-time */
|
||||
startTime: string;
|
||||
@@ -2641,6 +2642,12 @@ export interface components {
|
||||
/** Format: double */
|
||||
value: number;
|
||||
};
|
||||
/** @description Cursor-paginated agent event list */
|
||||
AgentEventPageResponse: {
|
||||
data?: components["schemas"]["AgentEventResponse"][];
|
||||
nextCursor?: string;
|
||||
hasMore?: boolean;
|
||||
};
|
||||
/** @description Agent lifecycle event */
|
||||
AgentEventResponse: {
|
||||
/** Format: int64 */
|
||||
@@ -6058,6 +6065,7 @@ export interface operations {
|
||||
agentId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
};
|
||||
header?: never;
|
||||
@@ -6066,13 +6074,13 @@ export interface operations {
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Events returned */
|
||||
/** @description Event page returned */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"*/*": components["schemas"]["AgentEventResponse"][];
|
||||
"*/*": components["schemas"]["AgentEventPageResponse"];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -57,6 +57,7 @@ export function LogTab({ applicationId, exchangeId, processorId }: LogTabProps)
|
||||
timestamp: e.timestamp ?? '',
|
||||
level: mapLogLevel(e.level),
|
||||
message: e.message ?? '',
|
||||
source: e.source ?? undefined,
|
||||
}));
|
||||
}, [logPage, processorId, filter]);
|
||||
|
||||
|
||||
22
ui/src/components/InfiniteScrollArea.module.css
Normal file
22
ui/src/components/InfiniteScrollArea.module.css
Normal file
@@ -0,0 +1,22 @@
|
||||
.scrollArea {
|
||||
overflow-y: auto;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.sentinel {
|
||||
height: 1px;
|
||||
width: 100%;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.loadingMore,
|
||||
.endOfStream {
|
||||
padding: 8px 12px;
|
||||
font-size: 11px;
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.endOfStream {
|
||||
opacity: 0.5;
|
||||
}
|
||||
80
ui/src/components/InfiniteScrollArea.tsx
Normal file
80
ui/src/components/InfiniteScrollArea.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import { useEffect, useRef, type ReactNode, type RefObject } from 'react';
|
||||
import styles from './InfiniteScrollArea.module.css';
|
||||
|
||||
export interface InfiniteScrollAreaProps {
|
||||
onEndReached: () => void;
|
||||
onTopVisibilityChange?: (atTop: boolean) => void;
|
||||
isFetchingNextPage: boolean;
|
||||
hasNextPage: boolean;
|
||||
isLoading?: boolean;
|
||||
hasItems?: boolean;
|
||||
maxHeight?: number | string;
|
||||
children: ReactNode;
|
||||
/** Optional caller-owned scroll container ref (e.g. for scroll-to-top on refresh). */
|
||||
scrollRef?: RefObject<HTMLDivElement | null>;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function InfiniteScrollArea({
|
||||
onEndReached,
|
||||
onTopVisibilityChange,
|
||||
isFetchingNextPage,
|
||||
hasNextPage,
|
||||
isLoading = false,
|
||||
hasItems = true,
|
||||
maxHeight = 360,
|
||||
children,
|
||||
scrollRef,
|
||||
className,
|
||||
}: InfiniteScrollAreaProps) {
|
||||
const internalRef = useRef<HTMLDivElement | null>(null);
|
||||
const containerRef = scrollRef ?? internalRef;
|
||||
const topSentinel = useRef<HTMLDivElement | null>(null);
|
||||
const bottomSentinel = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!onTopVisibilityChange) return;
|
||||
const root = containerRef.current;
|
||||
const target = topSentinel.current;
|
||||
if (!root || !target) return;
|
||||
const obs = new IntersectionObserver(
|
||||
(entries) => {
|
||||
for (const e of entries) onTopVisibilityChange(e.isIntersecting);
|
||||
},
|
||||
{ root, threshold: 1.0 },
|
||||
);
|
||||
obs.observe(target);
|
||||
return () => obs.disconnect();
|
||||
}, [containerRef, onTopVisibilityChange]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasNextPage) return;
|
||||
const root = containerRef.current;
|
||||
const target = bottomSentinel.current;
|
||||
if (!root || !target) return;
|
||||
const obs = new IntersectionObserver(
|
||||
(entries) => {
|
||||
for (const e of entries) if (e.isIntersecting) onEndReached();
|
||||
},
|
||||
{ root, rootMargin: '100px', threshold: 0 },
|
||||
);
|
||||
obs.observe(target);
|
||||
return () => obs.disconnect();
|
||||
}, [containerRef, onEndReached, hasNextPage]);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={`${styles.scrollArea}${className ? ` ${className}` : ''}`}
|
||||
style={{ maxHeight }}
|
||||
>
|
||||
<div ref={topSentinel} className={styles.sentinel} aria-hidden="true" />
|
||||
{children}
|
||||
<div ref={bottomSentinel} className={styles.sentinel} aria-hidden="true" />
|
||||
{isFetchingNextPage && <div className={styles.loadingMore}>Loading more…</div>}
|
||||
{!hasNextPage && !isLoading && hasItems && (
|
||||
<div className={styles.endOfStream}>End of stream</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
71
ui/src/hooks/useInfiniteStream.ts
Normal file
71
ui/src/hooks/useInfiniteStream.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useInfiniteQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
|
||||
export interface StreamPage<T> {
|
||||
data: T[];
|
||||
nextCursor: string | null;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
export interface UseInfiniteStreamArgs<T> {
|
||||
queryKey: readonly unknown[];
|
||||
fetchPage: (cursor: string | undefined) => Promise<StreamPage<T>>;
|
||||
enabled?: boolean;
|
||||
/** When true, the query auto-refetches every refetchMs ms. When false, polling pauses. */
|
||||
isAtTop: boolean;
|
||||
refetchMs?: number;
|
||||
staleTime?: number;
|
||||
}
|
||||
|
||||
export interface UseInfiniteStreamResult<T> {
|
||||
items: T[];
|
||||
fetchNextPage: () => void;
|
||||
hasNextPage: boolean;
|
||||
isFetchingNextPage: boolean;
|
||||
isLoading: boolean;
|
||||
refresh: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Thin wrapper over tanstack useInfiniteQuery that:
|
||||
* - flattens pages into a single items[] array (newest first)
|
||||
* - gates auto-refetch on isAtTop (so a user scrolled down does not lose their viewport)
|
||||
* - exposes refresh() that invalidates the query (reset to page 1 on next render)
|
||||
*/
|
||||
export function useInfiniteStream<T>(args: UseInfiniteStreamArgs<T>): UseInfiniteStreamResult<T> {
|
||||
const { queryKey, fetchPage, enabled = true, isAtTop, refetchMs = 15_000, staleTime = 300 } = args;
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const query = useInfiniteQuery<StreamPage<T>, Error>({
|
||||
queryKey: [...queryKey],
|
||||
initialPageParam: undefined as string | undefined,
|
||||
queryFn: ({ pageParam }) => fetchPage(pageParam as string | undefined),
|
||||
getNextPageParam: (last) => (last.hasMore && last.nextCursor ? last.nextCursor : undefined),
|
||||
enabled,
|
||||
refetchInterval: isAtTop ? refetchMs : false,
|
||||
staleTime,
|
||||
placeholderData: (prev) => prev,
|
||||
});
|
||||
|
||||
const items = useMemo<T[]>(
|
||||
() => (query.data?.pages ?? []).flatMap((p) => p.data),
|
||||
[query.data],
|
||||
);
|
||||
|
||||
const fetchNextPage = useCallback(() => {
|
||||
if (query.hasNextPage && !query.isFetchingNextPage) query.fetchNextPage();
|
||||
}, [query.hasNextPage, query.isFetchingNextPage, query.fetchNextPage]);
|
||||
|
||||
const refresh = useCallback(() => {
|
||||
queryClient.invalidateQueries({ queryKey: [...queryKey] });
|
||||
}, [queryClient, queryKey]);
|
||||
|
||||
return {
|
||||
items,
|
||||
fetchNextPage,
|
||||
hasNextPage: !!query.hasNextPage,
|
||||
isFetchingNextPage: query.isFetchingNextPage,
|
||||
isLoading: query.isLoading,
|
||||
refresh,
|
||||
};
|
||||
}
|
||||
@@ -11,8 +11,10 @@ import type { Column, FeedEvent, LogEntry, ButtonGroupItem } from '@cameleer/des
|
||||
import styles from './AgentHealth.module.css';
|
||||
import sectionStyles from '../../styles/section-card.module.css';
|
||||
import logStyles from '../../styles/log-panel.module.css';
|
||||
import { useAgents, useAgentEvents } from '../../api/queries/agents';
|
||||
import { useApplicationLogs } from '../../api/queries/logs';
|
||||
import { useAgents } from '../../api/queries/agents';
|
||||
import { useInfiniteApplicationLogs } from '../../api/queries/logs';
|
||||
import { useInfiniteAgentEvents } from '../../api/queries/agents';
|
||||
import { InfiniteScrollArea } from '../../components/InfiniteScrollArea';
|
||||
import { useApplicationConfig, useUpdateApplicationConfig } from '../../api/queries/commands';
|
||||
import { useCatalog, useDismissApp } from '../../api/queries/catalog';
|
||||
import { useIsAdmin } from '../../auth/auth-store';
|
||||
@@ -281,8 +283,9 @@ export default function AgentHealth() {
|
||||
});
|
||||
}, [appConfig, configDraft, updateConfig, toast, appId]);
|
||||
const [eventSortAsc, setEventSortAsc] = useState(false);
|
||||
const [eventRefreshTo, setEventRefreshTo] = useState<string | undefined>();
|
||||
const { data: events } = useAgentEvents(appId, undefined, 50, eventRefreshTo);
|
||||
const [isTimelineAtTop, setIsTimelineAtTop] = useState(true);
|
||||
const timelineScrollRef = useRef<HTMLDivElement | null>(null);
|
||||
const eventStream = useInfiniteAgentEvents({ appId, isAtTop: isTimelineAtTop });
|
||||
|
||||
const [appFilter, setAppFilter] = useState('');
|
||||
type AppSortKey = 'status' | 'name' | 'tps' | 'cpu' | 'heartbeat';
|
||||
@@ -300,22 +303,30 @@ export default function AgentHealth() {
|
||||
|
||||
const [logSearch, setLogSearch] = useState('');
|
||||
const [logLevels, setLogLevels] = useState<Set<string>>(new Set());
|
||||
const [logSource, setLogSource] = useState<string>(''); // '' = all, 'app', 'agent'
|
||||
const [logSources, setLogSources] = useState<Set<string>>(new Set());
|
||||
const [logSortAsc, setLogSortAsc] = useState(false);
|
||||
const [logRefreshTo, setLogRefreshTo] = useState<string | undefined>();
|
||||
const { data: rawLogs } = useApplicationLogs(appId, undefined, { toOverride: logRefreshTo, source: logSource || undefined });
|
||||
const [isLogAtTop, setIsLogAtTop] = useState(true);
|
||||
const logScrollRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
const logStream = useInfiniteApplicationLogs({
|
||||
application: appId,
|
||||
sources: [...logSources],
|
||||
levels: [...logLevels],
|
||||
sort: logSortAsc ? 'asc' : 'desc',
|
||||
isAtTop: isLogAtTop,
|
||||
});
|
||||
const logEntries = useMemo<LogEntry[]>(() => {
|
||||
const mapped = (rawLogs || []).map((l) => ({
|
||||
return logStream.items.map((l) => ({
|
||||
timestamp: l.timestamp ?? '',
|
||||
level: mapLogLevel(l.level),
|
||||
message: l.message ?? '',
|
||||
source: l.source ?? undefined,
|
||||
}));
|
||||
return logSortAsc ? mapped.toReversed() : mapped;
|
||||
}, [rawLogs, logSortAsc]);
|
||||
}, [logStream.items]);
|
||||
const logSearchLower = logSearch.toLowerCase();
|
||||
const filteredLogs = logEntries
|
||||
.filter((l) => logLevels.size === 0 || logLevels.has(l.level))
|
||||
.filter((l) => !logSearchLower || l.message.toLowerCase().includes(logSearchLower));
|
||||
const filteredLogs = logSearchLower
|
||||
? logEntries.filter((l) => l.message.toLowerCase().includes(logSearchLower))
|
||||
: logEntries;
|
||||
|
||||
const agentList = agents ?? [];
|
||||
|
||||
@@ -359,15 +370,15 @@ export default function AgentHealth() {
|
||||
|
||||
// Map events to FeedEvent
|
||||
const feedEvents: FeedEvent[] = useMemo(() => {
|
||||
const mapped = (events ?? []).map((e: { id: number; instanceId: string; eventType: string; detail: string; timestamp: string }) => ({
|
||||
id: String(e.id),
|
||||
const mapped = eventStream.items.map((e) => ({
|
||||
id: `${e.timestamp}:${e.instanceId}:${e.eventType}`,
|
||||
severity: eventSeverity(e.eventType),
|
||||
icon: eventIcon(e.eventType),
|
||||
message: `${e.instanceId}: ${e.eventType}${e.detail ? ' \u2014 ' + e.detail : ''}`,
|
||||
timestamp: new Date(e.timestamp),
|
||||
}));
|
||||
return eventSortAsc ? mapped.toReversed() : mapped;
|
||||
}, [events, eventSortAsc]);
|
||||
}, [eventStream.items, eventSortAsc]);
|
||||
|
||||
// Column definitions for the instance DataTable
|
||||
const instanceColumns: Column<AgentInstance>[] = useMemo(
|
||||
@@ -890,11 +901,18 @@ export default function AgentHealth() {
|
||||
<div className={logStyles.logHeader}>
|
||||
<SectionHeader>Application Log</SectionHeader>
|
||||
<div className={logStyles.headerActions}>
|
||||
<span className={styles.sectionMeta}>{logEntries.length} entries</span>
|
||||
<span className={styles.sectionMeta}>
|
||||
{filteredLogs.length === logStream.items.length
|
||||
? `${filteredLogs.length} entries`
|
||||
: `${filteredLogs.length} of ${logStream.items.length} entries`}
|
||||
</span>
|
||||
<Button variant="ghost" size="sm" onClick={() => setLogSortAsc((v) => !v)} title={logSortAsc ? 'Oldest first' : 'Newest first'}>
|
||||
{logSortAsc ? '\u2191' : '\u2193'}
|
||||
</Button>
|
||||
<Button variant="ghost" size="sm" onClick={() => setLogRefreshTo(new Date().toISOString())} title="Refresh">
|
||||
<Button variant="ghost" size="sm" onClick={() => {
|
||||
logStream.refresh();
|
||||
logScrollRef.current?.scrollTo({ top: 0 });
|
||||
}} title="Refresh">
|
||||
<RefreshCw size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
@@ -922,9 +940,14 @@ export default function AgentHealth() {
|
||||
</div>
|
||||
<ButtonGroup
|
||||
items={LOG_SOURCE_ITEMS}
|
||||
value={logSource ? new Set([logSource]) : new Set()}
|
||||
onChange={(v) => setLogSource(v.size === 0 ? '' : [...v][0])}
|
||||
value={logSources}
|
||||
onChange={setLogSources}
|
||||
/>
|
||||
{logSources.size > 0 && (
|
||||
<Button variant="ghost" size="sm" onClick={() => setLogSources(new Set())}>
|
||||
Clear
|
||||
</Button>
|
||||
)}
|
||||
<ButtonGroup items={LOG_LEVEL_ITEMS} value={logLevels} onChange={setLogLevels} />
|
||||
{logLevels.size > 0 && (
|
||||
<Button variant="ghost" size="sm" onClick={() => setLogLevels(new Set())}>
|
||||
@@ -932,33 +955,62 @@ export default function AgentHealth() {
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
{filteredLogs.length > 0 ? (
|
||||
<LogViewer entries={filteredLogs} maxHeight={360} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{logSearch || logLevels.size > 0 ? 'No matching log entries' : 'No log entries available'}
|
||||
</div>
|
||||
)}
|
||||
<InfiniteScrollArea
|
||||
scrollRef={logScrollRef}
|
||||
onEndReached={logStream.fetchNextPage}
|
||||
onTopVisibilityChange={setIsLogAtTop}
|
||||
isFetchingNextPage={logStream.isFetchingNextPage}
|
||||
hasNextPage={logStream.hasNextPage}
|
||||
isLoading={logStream.isLoading}
|
||||
hasItems={logStream.items.length > 0}
|
||||
maxHeight={360}
|
||||
>
|
||||
{filteredLogs.length > 0 ? (
|
||||
<LogViewer entries={filteredLogs} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{logSearch || logLevels.size > 0 || logSources.size > 0
|
||||
? 'No matching log entries'
|
||||
: logStream.isLoading ? 'Loading logs\u2026' : 'No log entries available'}
|
||||
</div>
|
||||
)}
|
||||
</InfiniteScrollArea>
|
||||
</div>
|
||||
|
||||
<div className={`${sectionStyles.section} ${styles.eventCard}`}>
|
||||
<div className={styles.eventCardHeader}>
|
||||
<span className={styles.sectionTitle}>Timeline</span>
|
||||
<div className={logStyles.headerActions}>
|
||||
<span className={styles.sectionMeta}>{feedEvents.length} events</span>
|
||||
<span className={styles.sectionMeta}>{eventStream.items.length} events</span>
|
||||
<Button variant="ghost" size="sm" onClick={() => setEventSortAsc((v) => !v)} title={eventSortAsc ? 'Oldest first' : 'Newest first'}>
|
||||
{eventSortAsc ? '\u2191' : '\u2193'}
|
||||
</Button>
|
||||
<Button variant="ghost" size="sm" onClick={() => setEventRefreshTo(new Date().toISOString())} title="Refresh">
|
||||
<Button variant="ghost" size="sm" onClick={() => {
|
||||
eventStream.refresh();
|
||||
timelineScrollRef.current?.scrollTo({ top: 0 });
|
||||
}} title="Refresh">
|
||||
<RefreshCw size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
{feedEvents.length > 0 ? (
|
||||
<EventFeed events={feedEvents} maxItems={100} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>No events in the selected time range.</div>
|
||||
)}
|
||||
<InfiniteScrollArea
|
||||
scrollRef={timelineScrollRef}
|
||||
onEndReached={eventStream.fetchNextPage}
|
||||
onTopVisibilityChange={setIsTimelineAtTop}
|
||||
isFetchingNextPage={eventStream.isFetchingNextPage}
|
||||
hasNextPage={eventStream.hasNextPage}
|
||||
isLoading={eventStream.isLoading}
|
||||
hasItems={eventStream.items.length > 0}
|
||||
maxHeight={360}
|
||||
>
|
||||
{feedEvents.length > 0 ? (
|
||||
<EventFeed events={feedEvents} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{eventStream.isLoading ? 'Loading events\u2026' : 'No events in the selected time range.'}
|
||||
</div>
|
||||
)}
|
||||
</InfiniteScrollArea>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useMemo, useRef, useState } from 'react';
|
||||
import { useParams } from 'react-router';
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
import { useInfiniteApplicationLogs } from '../../api/queries/logs';
|
||||
import { useInfiniteAgentEvents } from '../../api/queries/agents';
|
||||
import { InfiniteScrollArea } from '../../components/InfiniteScrollArea';
|
||||
import {
|
||||
StatCard, StatusDot, Badge, ThemedChart, Line, Area, ReferenceLine, CHART_COLORS,
|
||||
EventFeed, Spinner, EmptyState, SectionHeader, MonoText,
|
||||
@@ -11,8 +14,7 @@ import styles from './AgentInstance.module.css';
|
||||
import sectionStyles from '../../styles/section-card.module.css';
|
||||
import logStyles from '../../styles/log-panel.module.css';
|
||||
import chartCardStyles from '../../styles/chart-card.module.css';
|
||||
import { useAgents, useAgentEvents } from '../../api/queries/agents';
|
||||
import { useApplicationLogs } from '../../api/queries/logs';
|
||||
import { useAgents } from '../../api/queries/agents';
|
||||
import { useAgentMetrics } from '../../api/queries/agent-metrics';
|
||||
import { formatUptime, mapLogLevel, eventSeverity, eventIcon } from '../../utils/agent-utils';
|
||||
import { useEnvironmentStore } from '../../api/environment-store';
|
||||
@@ -36,17 +38,19 @@ export default function AgentInstance() {
|
||||
const { timeRange } = useGlobalFilters();
|
||||
const [logSearch, setLogSearch] = useState('');
|
||||
const [logLevels, setLogLevels] = useState<Set<string>>(new Set());
|
||||
const [logSource, setLogSource] = useState<string>(''); // '' = all, 'app', 'agent'
|
||||
const [logSources, setLogSources] = useState<Set<string>>(new Set());
|
||||
const [logSortAsc, setLogSortAsc] = useState(false);
|
||||
const [eventSortAsc, setEventSortAsc] = useState(false);
|
||||
const [logRefreshTo, setLogRefreshTo] = useState<string | undefined>();
|
||||
const [eventRefreshTo, setEventRefreshTo] = useState<string | undefined>();
|
||||
const [isLogAtTop, setIsLogAtTop] = useState(true);
|
||||
const [isTimelineAtTop, setIsTimelineAtTop] = useState(true);
|
||||
const logScrollRef = useRef<HTMLDivElement | null>(null);
|
||||
const timelineScrollRef = useRef<HTMLDivElement | null>(null);
|
||||
const timeFrom = timeRange.start.toISOString();
|
||||
const timeTo = timeRange.end.toISOString();
|
||||
|
||||
const selectedEnv = useEnvironmentStore((s) => s.environment);
|
||||
const { data: agents, isLoading } = useAgents(undefined, appId);
|
||||
const { data: events } = useAgentEvents(appId, instanceId, 50, eventRefreshTo);
|
||||
const eventStream = useInfiniteAgentEvents({ appId, agentId: instanceId, isAtTop: isTimelineAtTop });
|
||||
|
||||
const agent = useMemo(
|
||||
() => (agents || []).find((a: any) => a.instanceId === instanceId) as any,
|
||||
@@ -80,17 +84,17 @@ export default function AgentInstance() {
|
||||
);
|
||||
|
||||
const feedEvents = useMemo<FeedEvent[]>(() => {
|
||||
const mapped = (events || [])
|
||||
.filter((e: any) => !instanceId || e.instanceId === instanceId)
|
||||
.map((e: any) => ({
|
||||
id: String(e.id),
|
||||
const mapped = eventStream.items
|
||||
.filter((e) => !instanceId || e.instanceId === instanceId)
|
||||
.map((e) => ({
|
||||
id: `${e.timestamp}:${e.instanceId}:${e.eventType}`,
|
||||
severity: eventSeverity(e.eventType),
|
||||
icon: eventIcon(e.eventType),
|
||||
message: `${e.eventType}${e.detail ? ' \u2014 ' + e.detail : ''}`,
|
||||
timestamp: new Date(e.timestamp),
|
||||
}));
|
||||
return eventSortAsc ? mapped.toReversed() : mapped;
|
||||
}, [events, instanceId, eventSortAsc]);
|
||||
}, [eventStream.items, instanceId, eventSortAsc]);
|
||||
|
||||
const formatTime = (t: string) =>
|
||||
new Date(t).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' });
|
||||
@@ -132,19 +136,26 @@ export default function AgentInstance() {
|
||||
}, [agentMetrics]);
|
||||
|
||||
// Application logs
|
||||
const { data: rawLogs } = useApplicationLogs(appId, instanceId, { toOverride: logRefreshTo, source: logSource || undefined });
|
||||
const logStream = useInfiniteApplicationLogs({
|
||||
application: appId,
|
||||
agentId: instanceId,
|
||||
sources: [...logSources],
|
||||
levels: [...logLevels],
|
||||
sort: logSortAsc ? 'asc' : 'desc',
|
||||
isAtTop: isLogAtTop,
|
||||
});
|
||||
const logEntries = useMemo<LogEntry[]>(() => {
|
||||
const mapped = (rawLogs || []).map((l) => ({
|
||||
return logStream.items.map((l) => ({
|
||||
timestamp: l.timestamp ?? '',
|
||||
level: mapLogLevel(l.level),
|
||||
message: l.message ?? '',
|
||||
source: l.source ?? undefined,
|
||||
}));
|
||||
return logSortAsc ? mapped.toReversed() : mapped;
|
||||
}, [rawLogs, logSortAsc]);
|
||||
}, [logStream.items]);
|
||||
const searchLower = logSearch.toLowerCase();
|
||||
const filteredLogs = logEntries
|
||||
.filter((l) => logLevels.size === 0 || logLevels.has(l.level))
|
||||
.filter((l) => !searchLower || l.message.toLowerCase().includes(searchLower));
|
||||
const filteredLogs = searchLower
|
||||
? logEntries.filter((l) => l.message.toLowerCase().includes(searchLower))
|
||||
: logEntries;
|
||||
|
||||
if (isLoading) return <Spinner size="lg" />;
|
||||
|
||||
@@ -396,11 +407,18 @@ export default function AgentInstance() {
|
||||
<div className={logStyles.logHeader}>
|
||||
<SectionHeader>Application Log</SectionHeader>
|
||||
<div className={logStyles.headerActions}>
|
||||
<span className={styles.chartMeta}>{logEntries.length} entries</span>
|
||||
<span className={styles.chartMeta}>
|
||||
{filteredLogs.length === logStream.items.length
|
||||
? `${filteredLogs.length} entries`
|
||||
: `${filteredLogs.length} of ${logStream.items.length} entries`}
|
||||
</span>
|
||||
<button className={logStyles.sortBtn} onClick={() => setLogSortAsc((v) => !v)} title={logSortAsc ? 'Oldest first' : 'Newest first'}>
|
||||
{logSortAsc ? '\u2191' : '\u2193'}
|
||||
</button>
|
||||
<button className={logStyles.refreshBtn} onClick={() => setLogRefreshTo(new Date().toISOString())} title="Refresh">
|
||||
<button className={logStyles.refreshBtn} onClick={() => {
|
||||
logStream.refresh();
|
||||
logScrollRef.current?.scrollTo({ top: 0 });
|
||||
}} title="Refresh">
|
||||
<RefreshCw size={14} />
|
||||
</button>
|
||||
</div>
|
||||
@@ -428,9 +446,14 @@ export default function AgentInstance() {
|
||||
</div>
|
||||
<ButtonGroup
|
||||
items={LOG_SOURCE_ITEMS}
|
||||
value={logSource ? new Set([logSource]) : new Set()}
|
||||
onChange={(v) => setLogSource(v.size === 0 ? '' : [...v][0])}
|
||||
value={logSources}
|
||||
onChange={setLogSources}
|
||||
/>
|
||||
{logSources.size > 0 && (
|
||||
<button className={logStyles.logClearFilters} onClick={() => setLogSources(new Set())}>
|
||||
Clear
|
||||
</button>
|
||||
)}
|
||||
<ButtonGroup items={LOG_LEVEL_ITEMS} value={logLevels} onChange={setLogLevels} />
|
||||
{logLevels.size > 0 && (
|
||||
<button className={logStyles.logClearFilters} onClick={() => setLogLevels(new Set())}>
|
||||
@@ -438,33 +461,62 @@ export default function AgentInstance() {
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{filteredLogs.length > 0 ? (
|
||||
<LogViewer entries={filteredLogs} maxHeight={360} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{logSearch || logLevels.size > 0 ? 'No matching log entries' : 'No log entries available'}
|
||||
</div>
|
||||
)}
|
||||
<InfiniteScrollArea
|
||||
scrollRef={logScrollRef}
|
||||
onEndReached={logStream.fetchNextPage}
|
||||
onTopVisibilityChange={setIsLogAtTop}
|
||||
isFetchingNextPage={logStream.isFetchingNextPage}
|
||||
hasNextPage={logStream.hasNextPage}
|
||||
isLoading={logStream.isLoading}
|
||||
hasItems={logStream.items.length > 0}
|
||||
maxHeight={360}
|
||||
>
|
||||
{filteredLogs.length > 0 ? (
|
||||
<LogViewer entries={filteredLogs} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{logSearch || logLevels.size > 0 || logSources.size > 0
|
||||
? 'No matching log entries'
|
||||
: logStream.isLoading ? 'Loading logs…' : 'No log entries available'}
|
||||
</div>
|
||||
)}
|
||||
</InfiniteScrollArea>
|
||||
</div>
|
||||
|
||||
<div className={`${sectionStyles.section} ${styles.timelineCard}`}>
|
||||
<div className={styles.timelineHeader}>
|
||||
<span className={styles.chartTitle}>Timeline</span>
|
||||
<div className={logStyles.headerActions}>
|
||||
<span className={styles.chartMeta}>{feedEvents.length} events</span>
|
||||
<span className={styles.chartMeta}>{eventStream.items.length} events</span>
|
||||
<button className={logStyles.sortBtn} onClick={() => setEventSortAsc((v) => !v)} title={eventSortAsc ? 'Oldest first' : 'Newest first'}>
|
||||
{eventSortAsc ? '\u2191' : '\u2193'}
|
||||
</button>
|
||||
<button className={logStyles.refreshBtn} onClick={() => setEventRefreshTo(new Date().toISOString())} title="Refresh">
|
||||
<button className={logStyles.refreshBtn} onClick={() => {
|
||||
eventStream.refresh();
|
||||
timelineScrollRef.current?.scrollTo({ top: 0 });
|
||||
}} title="Refresh">
|
||||
<RefreshCw size={14} />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{feedEvents.length > 0 ? (
|
||||
<EventFeed events={feedEvents} maxItems={50} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>No events in the selected time range.</div>
|
||||
)}
|
||||
<InfiniteScrollArea
|
||||
scrollRef={timelineScrollRef}
|
||||
onEndReached={eventStream.fetchNextPage}
|
||||
onTopVisibilityChange={setIsTimelineAtTop}
|
||||
isFetchingNextPage={eventStream.isFetchingNextPage}
|
||||
hasNextPage={eventStream.hasNextPage}
|
||||
isLoading={eventStream.isLoading}
|
||||
hasItems={eventStream.items.length > 0}
|
||||
maxHeight={360}
|
||||
>
|
||||
{feedEvents.length > 0 ? (
|
||||
<EventFeed events={feedEvents} />
|
||||
) : (
|
||||
<div className={logStyles.logEmpty}>
|
||||
{eventStream.isLoading ? 'Loading events…' : 'No events in the selected time range.'}
|
||||
</div>
|
||||
)}
|
||||
</InfiniteScrollArea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user