From b73f5e6dd4f8feea09a77328da28b0d04992c1d5 Mon Sep 17 00:00:00 2001 From: hsiegeln <37154749+hsiegeln@users.noreply.github.com> Date: Thu, 2 Apr 2026 08:47:16 +0200 Subject: [PATCH] feat: add Logs tab with cursor-paginated search, level filters, and live tail MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Extend GET /api/v1/logs with cursor pagination, multi-level filtering, optional application scoping, and level count aggregation - Add exchangeId, instanceId, application, mdc fields to log responses - Refactor ClickHouseLogStore with keyset pagination (N+1 pattern) - Add LogSearchRequest/LogSearchResponse core domain records - Create LogSearchPageResponse wrapper DTO - Add Logs as 4th content tab (Exchanges | Dashboard | Runtime | Logs) - Implement LogSearch component with debounced search, level filter bar, expandable log entries, cursor pagination, and live tail mode - Add cross-navigation: exchange header → logs, log tab → logs tab - Update ClickHouseLogStoreIT with cursor, multi-level, cross-app tests Closes: #104 Co-Authored-By: Claude Opus 4.6 (1M context) --- .../app/controller/LogQueryController.java | 53 +++-- .../server/app/dto/LogEntryResponse.java | 10 +- .../server/app/dto/LogSearchPageResponse.java | 14 ++ .../server/app/search/ClickHouseLogStore.java | 148 +++++++++--- .../app/search/ClickHouseLogStoreIT.java | 188 +++++++++++++-- .../server/core/search/LogSearchRequest.java | 44 ++++ .../server/core/search/LogSearchResponse.java | 21 ++ .../server/core/storage/LogEntryResult.java | 6 +- .../server/core/storage/LogIndex.java | 7 +- .../2026-04-01-ux-audit-pmf-readiness.md | 95 ++++++++ ui/src/api/queries/logs.ts | 114 +++++++-- ui/src/components/ContentTabs.tsx | 1 + .../ExecutionDiagram/tabs/LogTab.tsx | 48 ++-- ui/src/hooks/useScope.ts | 4 +- ui/src/pages/Exchanges/ExchangeHeader.tsx | 9 +- ui/src/pages/LogsTab/LevelFilterBar.tsx | 50 ++++ ui/src/pages/LogsTab/LogEntry.module.css | 187 +++++++++++++++ ui/src/pages/LogsTab/LogEntry.tsx | 134 +++++++++++ ui/src/pages/LogsTab/LogSearch.module.css | 156 ++++++++++++ ui/src/pages/LogsTab/LogSearch.tsx | 222 ++++++++++++++++++ ui/src/pages/LogsTab/LogsPage.tsx | 7 + ui/src/router.tsx | 6 + 22 files changed, 1405 insertions(+), 119 deletions(-) create mode 100644 cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogSearchPageResponse.java create mode 100644 cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchRequest.java create mode 100644 cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchResponse.java create mode 100644 docs/superpowers/specs/2026-04-01-ux-audit-pmf-readiness.md create mode 100644 ui/src/pages/LogsTab/LevelFilterBar.tsx create mode 100644 ui/src/pages/LogsTab/LogEntry.module.css create mode 100644 ui/src/pages/LogsTab/LogEntry.tsx create mode 100644 ui/src/pages/LogsTab/LogSearch.module.css create mode 100644 ui/src/pages/LogsTab/LogSearch.tsx create mode 100644 ui/src/pages/LogsTab/LogsPage.tsx diff --git a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/controller/LogQueryController.java b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/controller/LogQueryController.java index 32500c17..78e73031 100644 --- a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/controller/LogQueryController.java +++ b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/controller/LogQueryController.java @@ -1,7 +1,9 @@ package com.cameleer3.server.app.controller; import com.cameleer3.server.app.dto.LogEntryResponse; -import com.cameleer3.server.core.storage.LogEntryResult; +import com.cameleer3.server.app.dto.LogSearchPageResponse; +import com.cameleer3.server.core.search.LogSearchRequest; +import com.cameleer3.server.core.search.LogSearchResponse; import com.cameleer3.server.core.storage.LogIndex; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.tags.Tag; @@ -12,6 +14,7 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.time.Instant; +import java.util.Arrays; import java.util.List; @RestController @@ -27,30 +30,52 @@ public class LogQueryController { @GetMapping @Operation(summary = "Search application log entries", - description = "Returns log entries for a given application, optionally filtered by agent, level, time range, and text query") - public ResponseEntity> searchLogs( - @RequestParam String application, - @RequestParam(name = "agentId", required = false) String instanceId, - @RequestParam(required = false) String level, + description = "Returns log entries with cursor-based pagination and level count aggregation. " + + "Supports free-text search, multi-level filtering, and optional application scoping.") + public ResponseEntity searchLogs( + @RequestParam(required = false) String q, @RequestParam(required = false) String query, + @RequestParam(required = false) String level, + @RequestParam(required = false) String application, + @RequestParam(name = "agentId", required = false) String instanceId, @RequestParam(required = false) String exchangeId, + @RequestParam(required = false) String logger, @RequestParam(required = false) String from, @RequestParam(required = false) String to, - @RequestParam(defaultValue = "200") int limit) { + @RequestParam(required = false) String cursor, + @RequestParam(defaultValue = "100") int limit, + @RequestParam(defaultValue = "desc") String sort) { - limit = Math.min(limit, 1000); + // q takes precedence over deprecated query param + String searchText = q != null ? q : query; + + // Parse CSV levels + List levels = List.of(); + if (level != null && !level.isEmpty()) { + levels = Arrays.stream(level.split(",")) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .toList(); + } Instant fromInstant = from != null ? Instant.parse(from) : null; Instant toInstant = to != null ? Instant.parse(to) : null; - List results = logIndex.search( - application, instanceId, level, query, exchangeId, fromInstant, toInstant, limit); + LogSearchRequest request = new LogSearchRequest( + searchText, levels, application, instanceId, exchangeId, + logger, fromInstant, toInstant, cursor, limit, sort); - List entries = results.stream() - .map(r -> new LogEntryResponse(r.timestamp(), r.level(), r.loggerName(), - r.message(), r.threadName(), r.stackTrace())) + LogSearchResponse result = logIndex.search(request); + + List entries = result.data().stream() + .map(r -> new LogEntryResponse( + r.timestamp(), r.level(), r.loggerName(), + r.message(), r.threadName(), r.stackTrace(), + r.exchangeId(), r.instanceId(), r.application(), + r.mdc())) .toList(); - return ResponseEntity.ok(entries); + return ResponseEntity.ok(new LogSearchPageResponse( + entries, result.nextCursor(), result.hasMore(), result.levelCounts())); } } diff --git a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogEntryResponse.java b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogEntryResponse.java index 7d5f3e9e..0f16764a 100644 --- a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogEntryResponse.java +++ b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogEntryResponse.java @@ -2,12 +2,18 @@ package com.cameleer3.server.app.dto; import io.swagger.v3.oas.annotations.media.Schema; +import java.util.Map; + @Schema(description = "Application log entry") public record LogEntryResponse( @Schema(description = "Log timestamp (ISO-8601)") String timestamp, - @Schema(description = "Log level (INFO, WARN, ERROR, DEBUG)") String level, + @Schema(description = "Log level (INFO, WARN, ERROR, DEBUG, TRACE)") String level, @Schema(description = "Logger name") String loggerName, @Schema(description = "Log message") String message, @Schema(description = "Thread name") String threadName, - @Schema(description = "Stack trace (if present)") String stackTrace + @Schema(description = "Stack trace (if present)") String stackTrace, + @Schema(description = "Camel exchange ID (if present)") String exchangeId, + @Schema(description = "Agent instance ID") String instanceId, + @Schema(description = "Application ID") String application, + @Schema(description = "MDC context map") Map mdc ) {} diff --git a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogSearchPageResponse.java b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogSearchPageResponse.java new file mode 100644 index 00000000..4b885630 --- /dev/null +++ b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/dto/LogSearchPageResponse.java @@ -0,0 +1,14 @@ +package com.cameleer3.server.app.dto; + +import io.swagger.v3.oas.annotations.media.Schema; + +import java.util.List; +import java.util.Map; + +@Schema(description = "Log search response with cursor pagination and level counts") +public record LogSearchPageResponse( + @Schema(description = "Log entries for the current page") List data, + @Schema(description = "Cursor for next page (null if no more results)") String nextCursor, + @Schema(description = "Whether more results exist beyond this page") boolean hasMore, + @Schema(description = "Count of logs per level (unaffected by level filter)") Map levelCounts +) {} diff --git a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/search/ClickHouseLogStore.java b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/search/ClickHouseLogStore.java index d7bd66ea..c579c6f0 100644 --- a/cameleer3-server-app/src/main/java/com/cameleer3/server/app/search/ClickHouseLogStore.java +++ b/cameleer3-server-app/src/main/java/com/cameleer3/server/app/search/ClickHouseLogStore.java @@ -1,6 +1,8 @@ package com.cameleer3.server.app.search; import com.cameleer3.common.model.LogEntry; +import com.cameleer3.server.core.search.LogSearchRequest; +import com.cameleer3.server.core.search.LogSearchResponse; import com.cameleer3.server.core.storage.LogEntryResult; import com.cameleer3.server.core.storage.LogIndex; import org.slf4j.Logger; @@ -14,6 +16,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -55,12 +58,9 @@ public class ClickHouseLogStore implements LogIndex { ps.setString(7, entry.getThreadName() != null ? entry.getThreadName() : ""); ps.setString(8, entry.getStackTrace() != null ? entry.getStackTrace() : ""); - // Extract camel.exchangeId from MDC into top-level column Map mdc = entry.getMdc() != null ? entry.getMdc() : Collections.emptyMap(); String exchangeId = mdc.getOrDefault("camel.exchangeId", ""); ps.setString(9, exchangeId); - - // ClickHouse JDBC handles java.util.Map natively for Map columns ps.setObject(10, mdc); }); @@ -68,62 +68,140 @@ public class ClickHouseLogStore implements LogIndex { } @Override - public List search(String applicationId, String instanceId, String level, - String query, String exchangeId, - Instant from, Instant to, int limit) { - StringBuilder sql = new StringBuilder( - "SELECT timestamp, level, logger_name, message, thread_name, stack_trace " + - "FROM logs WHERE tenant_id = 'default' AND application = ?"); - List params = new ArrayList<>(); - params.add(applicationId); + public LogSearchResponse search(LogSearchRequest request) { + // Build shared WHERE conditions (used by both data and count queries) + List baseConditions = new ArrayList<>(); + List baseParams = new ArrayList<>(); + baseConditions.add("tenant_id = 'default'"); - if (instanceId != null && !instanceId.isEmpty()) { - sql.append(" AND instance_id = ?"); - params.add(instanceId); + if (request.application() != null && !request.application().isEmpty()) { + baseConditions.add("application = ?"); + baseParams.add(request.application()); } - if (level != null && !level.isEmpty()) { - sql.append(" AND level = ?"); - params.add(level.toUpperCase()); + if (request.instanceId() != null && !request.instanceId().isEmpty()) { + baseConditions.add("instance_id = ?"); + baseParams.add(request.instanceId()); } - if (exchangeId != null && !exchangeId.isEmpty()) { - sql.append(" AND (exchange_id = ? OR (mapContains(mdc, 'camel.exchangeId') AND mdc['camel.exchangeId'] = ?))"); - params.add(exchangeId); - params.add(exchangeId); + if (request.exchangeId() != null && !request.exchangeId().isEmpty()) { + baseConditions.add("(exchange_id = ? OR (mapContains(mdc, 'camel.exchangeId') AND mdc['camel.exchangeId'] = ?))"); + baseParams.add(request.exchangeId()); + baseParams.add(request.exchangeId()); } - if (query != null && !query.isEmpty()) { - sql.append(" AND message LIKE ?"); - params.add("%" + query + "%"); + if (request.q() != null && !request.q().isEmpty()) { + String term = "%" + escapeLike(request.q()) + "%"; + baseConditions.add("(message LIKE ? OR stack_trace LIKE ?)"); + baseParams.add(term); + baseParams.add(term); } - if (from != null) { - sql.append(" AND timestamp >= ?"); - params.add(Timestamp.from(from)); + if (request.logger() != null && !request.logger().isEmpty()) { + baseConditions.add("logger_name LIKE ?"); + baseParams.add("%" + escapeLike(request.logger()) + "%"); } - if (to != null) { - sql.append(" AND timestamp <= ?"); - params.add(Timestamp.from(to)); + if (request.from() != null) { + baseConditions.add("timestamp >= ?"); + baseParams.add(Timestamp.from(request.from())); } - sql.append(" ORDER BY timestamp DESC LIMIT ?"); - params.add(limit); + if (request.to() != null) { + baseConditions.add("timestamp <= ?"); + baseParams.add(Timestamp.from(request.to())); + } - return jdbc.query(sql.toString(), params.toArray(), (rs, rowNum) -> { + // Level counts query: uses base conditions WITHOUT level filter and cursor + String baseWhere = String.join(" AND ", baseConditions); + Map levelCounts = queryLevelCounts(baseWhere, baseParams); + + // Data query conditions: add level filter and cursor on top of base + List dataConditions = new ArrayList<>(baseConditions); + List dataParams = new ArrayList<>(baseParams); + + if (request.levels() != null && !request.levels().isEmpty()) { + String placeholders = String.join(", ", Collections.nCopies(request.levels().size(), "?")); + dataConditions.add("level IN (" + placeholders + ")"); + for (String lvl : request.levels()) { + dataParams.add(lvl.toUpperCase()); + } + } + + if (request.cursor() != null && !request.cursor().isEmpty()) { + Instant cursorTs = Instant.parse(request.cursor()); + if ("asc".equalsIgnoreCase(request.sort())) { + dataConditions.add("timestamp > ?"); + } else { + dataConditions.add("timestamp < ?"); + } + dataParams.add(Timestamp.from(cursorTs)); + } + + String dataWhere = String.join(" AND ", dataConditions); + String orderDir = "asc".equalsIgnoreCase(request.sort()) ? "ASC" : "DESC"; + int fetchLimit = request.limit() + 1; // fetch N+1 to detect hasMore + + String dataSql = "SELECT timestamp, level, logger_name, message, thread_name, stack_trace, " + + "exchange_id, instance_id, application, mdc " + + "FROM logs WHERE " + dataWhere + + " ORDER BY timestamp " + orderDir + " LIMIT ?"; + dataParams.add(fetchLimit); + + List results = jdbc.query(dataSql, dataParams.toArray(), (rs, rowNum) -> { Timestamp ts = rs.getTimestamp("timestamp"); String timestampStr = ts != null - ? ts.toInstant().atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_INSTANT) + ? ts.toInstant().atOffset(ZoneOffset.UTC).format(ISO_FMT) : null; + + @SuppressWarnings("unchecked") + Map mdc = (Map) rs.getObject("mdc"); + if (mdc == null) mdc = Collections.emptyMap(); + return new LogEntryResult( timestampStr, rs.getString("level"), rs.getString("logger_name"), rs.getString("message"), rs.getString("thread_name"), - rs.getString("stack_trace") + rs.getString("stack_trace"), + rs.getString("exchange_id"), + rs.getString("instance_id"), + rs.getString("application"), + mdc ); }); + + boolean hasMore = results.size() > request.limit(); + if (hasMore) { + results = new ArrayList<>(results.subList(0, request.limit())); + } + + String nextCursor = null; + if (hasMore && !results.isEmpty()) { + nextCursor = results.get(results.size() - 1).timestamp(); + } + + return new LogSearchResponse(results, nextCursor, hasMore, levelCounts); + } + + private Map queryLevelCounts(String baseWhere, List baseParams) { + String sql = "SELECT level, count() AS cnt FROM logs WHERE " + baseWhere + " GROUP BY level"; + Map counts = new LinkedHashMap<>(); + try { + jdbc.query(sql, baseParams.toArray(), (rs, rowNum) -> { + counts.put(rs.getString("level"), rs.getLong("cnt")); + return null; + }); + } catch (Exception e) { + log.warn("Failed to query level counts", e); + } + return counts; + } + + private static String escapeLike(String term) { + return term.replace("\\", "\\\\") + .replace("%", "\\%") + .replace("_", "\\_"); } } diff --git a/cameleer3-server-app/src/test/java/com/cameleer3/server/app/search/ClickHouseLogStoreIT.java b/cameleer3-server-app/src/test/java/com/cameleer3/server/app/search/ClickHouseLogStoreIT.java index b53e55b0..41ddaaac 100644 --- a/cameleer3-server-app/src/test/java/com/cameleer3/server/app/search/ClickHouseLogStoreIT.java +++ b/cameleer3-server-app/src/test/java/com/cameleer3/server/app/search/ClickHouseLogStoreIT.java @@ -1,6 +1,8 @@ package com.cameleer3.server.app.search; import com.cameleer3.common.model.LogEntry; +import com.cameleer3.server.core.search.LogSearchRequest; +import com.cameleer3.server.core.search.LogSearchResponse; import com.cameleer3.server.core.storage.LogEntryResult; import com.zaxxer.hikari.HikariDataSource; import org.junit.jupiter.api.BeforeEach; @@ -52,6 +54,10 @@ class ClickHouseLogStoreIT { return new LogEntry(ts, level, logger, message, thread, stackTrace, mdc); } + private LogSearchRequest req(String application) { + return new LogSearchRequest(null, null, application, null, null, null, null, null, null, 100, "desc"); + } + // ── Tests ───────────────────────────────────────────────────────────── @Test @@ -78,10 +84,12 @@ class ClickHouseLogStoreIT { entry(now, "INFO", "logger", "msg-b", "t1", null, null) )); - List results = store.search("app-a", null, null, null, null, null, null, 100); + LogSearchResponse result = store.search(req("app-a")); - assertThat(results).hasSize(1); - assertThat(results.get(0).message()).isEqualTo("msg-a"); + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).message()).isEqualTo("msg-a"); + assertThat(result.data().get(0).application()).isEqualTo("app-a"); + assertThat(result.data().get(0).instanceId()).isEqualTo("agent-1"); } @Test @@ -92,11 +100,27 @@ class ClickHouseLogStoreIT { entry(now.plusSeconds(1), "ERROR", "logger", "error message", "t1", null, null) )); - List results = store.search("my-app", null, "ERROR", null, null, null, null, 100); + LogSearchResponse result = store.search(new LogSearchRequest( + null, List.of("ERROR"), "my-app", null, null, null, null, null, null, 100, "desc")); - assertThat(results).hasSize(1); - assertThat(results.get(0).level()).isEqualTo("ERROR"); - assertThat(results.get(0).message()).isEqualTo("error message"); + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).level()).isEqualTo("ERROR"); + assertThat(result.data().get(0).message()).isEqualTo("error message"); + } + + @Test + void search_multiLevel_filtersCorrectly() { + Instant now = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "my-app", List.of( + entry(now, "INFO", "logger", "info msg", "t1", null, null), + entry(now.plusSeconds(1), "WARN", "logger", "warn msg", "t1", null, null), + entry(now.plusSeconds(2), "ERROR", "logger", "error msg", "t1", null, null) + )); + + LogSearchResponse result = store.search(new LogSearchRequest( + null, List.of("WARN", "ERROR"), "my-app", null, null, null, null, null, null, 100, "desc")); + + assertThat(result.data()).hasSize(2); } @Test @@ -107,10 +131,11 @@ class ClickHouseLogStoreIT { entry(now.plusSeconds(1), "INFO", "logger", "Health check OK", "t1", null, null) )); - List results = store.search("my-app", null, null, "order #12345", null, null, null, 100); + LogSearchResponse result = store.search(new LogSearchRequest( + "order #12345", null, "my-app", null, null, null, null, null, null, 100, "desc")); - assertThat(results).hasSize(1); - assertThat(results.get(0).message()).contains("order #12345"); + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).message()).contains("order #12345"); } @Test @@ -123,10 +148,12 @@ class ClickHouseLogStoreIT { entry(now.plusSeconds(1), "INFO", "logger", "msg without exchange", "t1", null, null) )); - List results = store.search("my-app", null, null, null, "exchange-abc", null, null, 100); + LogSearchResponse result = store.search(new LogSearchRequest( + null, null, "my-app", null, "exchange-abc", null, null, null, null, 100, "desc")); - assertThat(results).hasSize(1); - assertThat(results.get(0).message()).isEqualTo("msg with exchange"); + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).message()).isEqualTo("msg with exchange"); + assertThat(result.data().get(0).exchangeId()).isEqualTo("exchange-abc"); } @Test @@ -141,14 +168,139 @@ class ClickHouseLogStoreIT { entry(t3, "INFO", "logger", "afternoon", "t1", null, null) )); - // Query only the noon window Instant from = Instant.parse("2026-03-31T11:00:00Z"); Instant to = Instant.parse("2026-03-31T13:00:00Z"); - List results = store.search("my-app", null, null, null, null, from, to, 100); + LogSearchResponse result = store.search(new LogSearchRequest( + null, null, "my-app", null, null, null, from, to, null, 100, "desc")); - assertThat(results).hasSize(1); - assertThat(results.get(0).message()).isEqualTo("noon"); + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).message()).isEqualTo("noon"); + } + + @Test + void search_crossApp_returnsAllApps() { + Instant now = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "app-a", List.of( + entry(now, "INFO", "logger", "msg-a", "t1", null, null) + )); + store.indexBatch("agent-2", "app-b", List.of( + entry(now, "INFO", "logger", "msg-b", "t1", null, null) + )); + + // No application filter — should return both + LogSearchResponse result = store.search(new LogSearchRequest( + null, null, null, null, null, null, null, null, null, 100, "desc")); + + assertThat(result.data()).hasSize(2); + } + + @Test + void search_byLogger_filtersCorrectly() { + Instant now = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "my-app", List.of( + entry(now, "INFO", "com.example.OrderProcessor", "order msg", "t1", null, null), + entry(now.plusSeconds(1), "INFO", "com.example.PaymentService", "payment msg", "t1", null, null) + )); + + LogSearchResponse result = store.search(new LogSearchRequest( + null, null, "my-app", null, null, "OrderProcessor", null, null, null, 100, "desc")); + + assertThat(result.data()).hasSize(1); + assertThat(result.data().get(0).loggerName()).contains("OrderProcessor"); + } + + @Test + void search_cursorPagination_works() { + Instant base = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "my-app", List.of( + entry(base, "INFO", "logger", "msg-1", "t1", null, null), + entry(base.plusSeconds(1), "INFO", "logger", "msg-2", "t1", null, null), + entry(base.plusSeconds(2), "INFO", "logger", "msg-3", "t1", null, null), + entry(base.plusSeconds(3), "INFO", "logger", "msg-4", "t1", null, null), + entry(base.plusSeconds(4), "INFO", "logger", "msg-5", "t1", null, null) + )); + + // Page 1: limit 2 + LogSearchResponse page1 = store.search(new LogSearchRequest( + null, null, "my-app", null, null, null, null, null, null, 2, "desc")); + + assertThat(page1.data()).hasSize(2); + assertThat(page1.hasMore()).isTrue(); + assertThat(page1.nextCursor()).isNotNull(); + assertThat(page1.data().get(0).message()).isEqualTo("msg-5"); + + // Page 2: use cursor + LogSearchResponse page2 = store.search(new LogSearchRequest( + null, null, "my-app", null, null, null, null, null, page1.nextCursor(), 2, "desc")); + + assertThat(page2.data()).hasSize(2); + assertThat(page2.hasMore()).isTrue(); + assertThat(page2.data().get(0).message()).isEqualTo("msg-3"); + + // Page 3: last page + LogSearchResponse page3 = store.search(new LogSearchRequest( + null, null, "my-app", null, null, null, null, null, page2.nextCursor(), 2, "desc")); + + assertThat(page3.data()).hasSize(1); + assertThat(page3.hasMore()).isFalse(); + assertThat(page3.nextCursor()).isNull(); + } + + @Test + void search_levelCounts_correctAndUnaffectedByLevelFilter() { + Instant now = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "my-app", List.of( + entry(now, "INFO", "logger", "info1", "t1", null, null), + entry(now.plusSeconds(1), "INFO", "logger", "info2", "t1", null, null), + entry(now.plusSeconds(2), "WARN", "logger", "warn1", "t1", null, null), + entry(now.plusSeconds(3), "ERROR", "logger", "err1", "t1", null, null) + )); + + // Filter for ERROR only, but counts should include all levels + LogSearchResponse result = store.search(new LogSearchRequest( + null, List.of("ERROR"), "my-app", null, null, null, null, null, null, 100, "desc")); + + assertThat(result.data()).hasSize(1); + assertThat(result.levelCounts()).containsEntry("INFO", 2L); + assertThat(result.levelCounts()).containsEntry("WARN", 1L); + assertThat(result.levelCounts()).containsEntry("ERROR", 1L); + } + + @Test + void search_sortAsc_returnsOldestFirst() { + Instant base = Instant.parse("2026-03-31T12:00:00Z"); + store.indexBatch("agent-1", "my-app", List.of( + entry(base, "INFO", "logger", "msg-1", "t1", null, null), + entry(base.plusSeconds(1), "INFO", "logger", "msg-2", "t1", null, null), + entry(base.plusSeconds(2), "INFO", "logger", "msg-3", "t1", null, null) + )); + + LogSearchResponse result = store.search(new LogSearchRequest( + null, null, "my-app", null, null, null, null, null, null, 100, "asc")); + + assertThat(result.data()).hasSize(3); + assertThat(result.data().get(0).message()).isEqualTo("msg-1"); + assertThat(result.data().get(2).message()).isEqualTo("msg-3"); + } + + @Test + void search_returnsNewFields() { + Instant now = Instant.parse("2026-03-31T12:00:00Z"); + Map mdc = Map.of("camel.exchangeId", "ex-123", "custom.key", "custom-value"); + + store.indexBatch("agent-1", "my-app", List.of( + entry(now, "INFO", "logger", "msg", "t1", null, mdc) + )); + + LogSearchResponse result = store.search(req("my-app")); + + assertThat(result.data()).hasSize(1); + LogEntryResult entry = result.data().get(0); + assertThat(entry.exchangeId()).isEqualTo("ex-123"); + assertThat(entry.instanceId()).isEqualTo("agent-1"); + assertThat(entry.application()).isEqualTo("my-app"); + assertThat(entry.mdc()).containsEntry("custom.key", "custom-value"); } @Test @@ -163,13 +315,11 @@ class ClickHouseLogStoreIT { entry(now, "INFO", "logger", "msg", "t1", null, mdc) )); - // Verify MDC is stored by querying raw data String exchangeId = jdbc.queryForObject( "SELECT exchange_id FROM logs WHERE application = 'my-app' LIMIT 1", String.class); assertThat(exchangeId).isEqualTo("ex-123"); - // Verify MDC map contains custom key String customVal = jdbc.queryForObject( "SELECT mdc['custom.key'] FROM logs WHERE application = 'my-app' LIMIT 1", String.class); diff --git a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchRequest.java b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchRequest.java new file mode 100644 index 00000000..132453a3 --- /dev/null +++ b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchRequest.java @@ -0,0 +1,44 @@ +package com.cameleer3.server.core.search; + +import java.time.Instant; +import java.util.List; + +/** + * Immutable search criteria for querying application logs. + * + * @param q free-text search across message and stack trace + * @param levels log level filter (e.g. ["WARN","ERROR"]) + * @param application application ID filter (nullable = all apps) + * @param instanceId agent instance ID filter + * @param exchangeId Camel exchange ID filter + * @param logger logger name substring filter + * @param from inclusive start of time range (required) + * @param to inclusive end of time range (required) + * @param cursor ISO timestamp cursor for keyset pagination + * @param limit page size (1-500, default 100) + * @param sort sort direction: "asc" or "desc" (default "desc") + */ +public record LogSearchRequest( + String q, + List levels, + String application, + String instanceId, + String exchangeId, + String logger, + Instant from, + Instant to, + String cursor, + int limit, + String sort +) { + + private static final int DEFAULT_LIMIT = 100; + private static final int MAX_LIMIT = 500; + + public LogSearchRequest { + if (limit <= 0) limit = DEFAULT_LIMIT; + if (limit > MAX_LIMIT) limit = MAX_LIMIT; + if (sort == null || !"asc".equalsIgnoreCase(sort)) sort = "desc"; + if (levels == null) levels = List.of(); + } +} diff --git a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchResponse.java b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchResponse.java new file mode 100644 index 00000000..027018e3 --- /dev/null +++ b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/search/LogSearchResponse.java @@ -0,0 +1,21 @@ +package com.cameleer3.server.core.search; + +import com.cameleer3.server.core.storage.LogEntryResult; + +import java.util.List; +import java.util.Map; + +/** + * Log search result with cursor-based pagination and level aggregation. + * + * @param data matching log entries for the current page + * @param nextCursor ISO timestamp cursor for the next page (null if no more) + * @param hasMore whether more results exist beyond this page + * @param levelCounts count of matching logs per level (unaffected by level filter) + */ +public record LogSearchResponse( + List data, + String nextCursor, + boolean hasMore, + Map levelCounts +) {} diff --git a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogEntryResult.java b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogEntryResult.java index b13912b9..2eba2415 100644 --- a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogEntryResult.java +++ b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogEntryResult.java @@ -1,6 +1,8 @@ package com.cameleer3.server.core.storage; -import java.time.Instant; +import java.util.Map; public record LogEntryResult(String timestamp, String level, String loggerName, - String message, String threadName, String stackTrace) {} + String message, String threadName, String stackTrace, + String exchangeId, String instanceId, String application, + Map mdc) {} diff --git a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogIndex.java b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogIndex.java index 3d48fc7d..b069bc86 100644 --- a/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogIndex.java +++ b/cameleer3-server-core/src/main/java/com/cameleer3/server/core/storage/LogIndex.java @@ -1,15 +1,14 @@ package com.cameleer3.server.core.storage; import com.cameleer3.common.model.LogEntry; +import com.cameleer3.server.core.search.LogSearchRequest; +import com.cameleer3.server.core.search.LogSearchResponse; -import java.time.Instant; import java.util.List; public interface LogIndex { - List search(String applicationId, String instanceId, String level, - String query, String exchangeId, - Instant from, Instant to, int limit); + LogSearchResponse search(LogSearchRequest request); void indexBatch(String instanceId, String applicationId, List entries); } diff --git a/docs/superpowers/specs/2026-04-01-ux-audit-pmf-readiness.md b/docs/superpowers/specs/2026-04-01-ux-audit-pmf-readiness.md new file mode 100644 index 00000000..76ee3c2f --- /dev/null +++ b/docs/superpowers/specs/2026-04-01-ux-audit-pmf-readiness.md @@ -0,0 +1,95 @@ +# UX Audit: PMF Readiness for First Market Offer + +**Date:** 2026-04-01 +**Epic:** Gitea #100 +**Timeline:** 8 weeks to first market offer (~2026-05-27) + +## Context + +Comprehensive UX audit evaluating readiness for product-market fit. Full-stack Apache Camel observability platform competing with Datadog, Grafana+Tempo, Dynatrace. Self-hosted first, SaaS later. + +**Three target personas (all equally important):** +- Integration Developers -- debug message flows, trace exchanges, inspect payloads/errors +- DevOps/Platform Engineers -- manage deployments, agent health, route control, config push +- Engineering Managers / Tech Leads -- dashboards, SLA compliance, error trends + +**Competitive positioning:** "General APM tools don't understand Camel. We do." + +## What's Working Well + +- Process diagram visualization -- killer differentiator, no APM tool shows Camel routes this way +- Three-tab navigation (Exchanges/Dashboard/Runtime) maps cleanly to personas +- Command palette (Ctrl+K) with categorized search is polished +- Dashboard 3-level drill-down with KPIs, treemaps, punchcard heatmaps -- competitive-grade +- Dark mode is clean and well-implemented +- Design system gives visual consistency; amber/brown brand is distinctive +- Route control bar (Start/Stop/Suspend/Resume/Replay) -- unique differentiator +- Live auto-refresh with LIVE indicator +- Correlation chain navigation + +## Issues Created + +### P0 -- Ship Blockers +- #101 Onboarding & empty state experience (spec posted) +- #102 Alerting & notification system (spec posted) +- #103 Shareable links with filter state (spec posted) + +### P1 -- Must Have +- #104 Log search experience (spec posted) +- #105 Exchange table readability (spec posted) +- #106 Latency outlier investigation path (spec posted) + +### P2 -- Should Have +- #107 Data export CSV/JSON (spec posted) +- #108 Sidebar consolidation & tab-awareness (spec posted) +- #109 Dashboard L3 diagram readability (spec posted) +- #110 Time/locale formatting consistency (spec posted) +- #111 Pagination & deep result access (spec posted) + +### P3 -- Polish +- #112 Admin page context separation (spec posted) +- #113 Runtime suspended routes context (spec posted) +- #114 App Config detail full page (spec posted) +- #115 Comparative & historical analysis (spec posted) + +## Recommended 8-Week Execution Order + +| Weeks | Issues | Focus | +|-------|--------|-------| +| 1-2 | #103, #105, #110 | Quick wins: shareable links, table readability, formatting | +| 3-4 | #101, #111 | First-run experience, pagination | +| 5-6 | #102 | Alerting (the big sticky feature) | +| 7-8 | #104, #106 | Depth: log search, latency investigation | + +## Design Specs + +Full design specifications are posted as comments on each Gitea issue. Key architectural decisions: + +### #101 Onboarding +- `useOnboardingState()` hook derives phase from existing `useAgents()` + `useRouteCatalog()` polling +- Phases: welcome -> connected -> receiving -> complete -> dismissed +- Framework-specific snippets (Spring Boot/Quarkus/Standalone x Maven/Gradle) +- Bootstrap token via new `GET /api/v1/admin/bootstrap-token` (ADMIN only) +- Per-page empty states using DS `EmptyState` component + +### #102 Alerting +- PostgreSQL tables: alert_channels, alert_rules, alert_history, alert_rule_state +- 7 built-in alert types querying ClickHouse MVs and agent registry +- Evaluation engine: Spring @Scheduled, 10s loop, hysteresis, cooldown +- 3 notification channels: webhook, email (SMTP), Slack +- Bell icon in TopBar with firing count badge +- New "Alerting" admin tab with Rules/Channels/History sub-tabs + +### #103 Shareable Links +- URL as canonical source of truth, React state is derived mirror +- `UrlFilterSyncProvider` wraps existing `GlobalFilterProvider` (no DS changes) +- Filter changes = replaceState, navigation = pushState +- Copy Link button with Ctrl+Shift+C shortcut +- 9-step incremental rollout + +### #104 Log Search +- New `GET /api/v1/logs/search` endpoint with cursor pagination and level counts +- 4th tab: Logs, with search bar, level filter toggles, virtual-scrolled results +- Search syntax: free text + field:value (level, app, logger, exchange, mdc.*) +- Live tail via adaptive polling (2-5s) +- Bidirectional exchange correlation diff --git a/ui/src/api/queries/logs.ts b/ui/src/api/queries/logs.ts index e64d188d..cb21e652 100644 --- a/ui/src/api/queries/logs.ts +++ b/ui/src/api/queries/logs.ts @@ -11,8 +11,81 @@ export interface LogEntryResponse { message: string; threadName: string | null; stackTrace: string | null; + exchangeId: string | null; + instanceId: string | null; + application: string | null; + mdc: Record | null; } +export interface LogSearchPageResponse { + data: LogEntryResponse[]; + nextCursor: string | null; + hasMore: boolean; + levelCounts: Record; +} + +export interface LogSearchParams { + q?: string; + level?: string; + application?: string; + agentId?: string; + exchangeId?: string; + logger?: string; + from?: string; + to?: string; + cursor?: string; + limit?: number; + sort?: 'asc' | 'desc'; +} + +async function fetchLogs(params: LogSearchParams): Promise { + const token = useAuthStore.getState().accessToken; + const urlParams = new URLSearchParams(); + if (params.q) urlParams.set('q', params.q); + if (params.level) urlParams.set('level', params.level); + if (params.application) urlParams.set('application', params.application); + if (params.agentId) urlParams.set('agentId', params.agentId); + if (params.exchangeId) urlParams.set('exchangeId', params.exchangeId); + if (params.logger) urlParams.set('logger', params.logger); + if (params.from) urlParams.set('from', params.from); + if (params.to) urlParams.set('to', params.to); + if (params.cursor) urlParams.set('cursor', params.cursor); + if (params.limit) urlParams.set('limit', String(params.limit)); + if (params.sort) urlParams.set('sort', params.sort); + + const res = await fetch(`${config.apiBaseUrl}/logs?${urlParams}`, { + headers: { + Authorization: `Bearer ${token}`, + 'X-Cameleer-Protocol-Version': '1', + }, + }); + if (!res.ok) throw new Error('Failed to load logs'); + return res.json() as Promise; +} + +/** + * Primary log search hook with cursor pagination and level counts. + */ +export function useLogs( + params: LogSearchParams, + options?: { enabled?: boolean; refetchInterval?: number | false }, +) { + const defaultRefetch = useRefreshInterval(15_000); + + return useQuery({ + queryKey: ['logs', params], + queryFn: () => fetchLogs(params), + enabled: options?.enabled ?? true, + placeholderData: (prev) => prev, + refetchInterval: options?.refetchInterval ?? defaultRefetch, + staleTime: 300, + }); +} + +/** + * Backward-compatible wrapper for existing consumers (LogTab, AgentHealth, AgentInstance). + * Returns the same shape they expect: data is the LogEntryResponse[] (unwrapped from the page response). + */ export function useApplicationLogs( application?: string, agentId?: string, @@ -21,36 +94,31 @@ export function useApplicationLogs( const refetchInterval = useRefreshInterval(15_000); const { timeRange } = useGlobalFilters(); const to = options?.toOverride ?? timeRange.end.toISOString(); - // When filtering by exchangeId, skip the global time range — exchange logs are historical const useTimeRange = !options?.exchangeId; - return useQuery({ - queryKey: ['logs', application, agentId, + const params: LogSearchParams = { + application: application || undefined, + agentId: agentId || undefined, + exchangeId: options?.exchangeId || undefined, + from: useTimeRange ? timeRange.start.toISOString() : undefined, + to: useTimeRange ? to : undefined, + limit: options?.limit, + }; + + const query = useQuery({ + queryKey: ['logs', 'compat', application, agentId, useTimeRange ? timeRange.start.toISOString() : null, useTimeRange ? to : null, options?.limit, options?.exchangeId], - queryFn: async () => { - const token = useAuthStore.getState().accessToken; - const params = new URLSearchParams(); - params.set('application', application!); - if (agentId) params.set('agentId', agentId); - if (options?.exchangeId) params.set('exchangeId', options.exchangeId); - if (useTimeRange) { - params.set('from', timeRange.start.toISOString()); - params.set('to', to); - } - if (options?.limit) params.set('limit', String(options.limit)); - const res = await fetch(`${config.apiBaseUrl}/logs?${params}`, { - headers: { - Authorization: `Bearer ${token}`, - 'X-Cameleer-Protocol-Version': '1', - }, - }); - if (!res.ok) throw new Error('Failed to load application logs'); - return res.json() as Promise; - }, + queryFn: () => fetchLogs(params), enabled: !!application, placeholderData: (prev) => prev, refetchInterval, }); + + // Unwrap: existing consumers expect data to be LogEntryResponse[] directly + return { + ...query, + data: query.data?.data ?? (undefined as LogEntryResponse[] | undefined), + }; } diff --git a/ui/src/components/ContentTabs.tsx b/ui/src/components/ContentTabs.tsx index 5ac5df7b..f44aabd3 100644 --- a/ui/src/components/ContentTabs.tsx +++ b/ui/src/components/ContentTabs.tsx @@ -7,6 +7,7 @@ const TABS = [ { label: 'Exchanges', value: 'exchanges' }, { label: 'Dashboard', value: 'dashboard' }, { label: 'Runtime', value: 'runtime' }, + { label: 'Logs', value: 'logs' }, ]; interface ContentTabsProps { diff --git a/ui/src/components/ExecutionDiagram/tabs/LogTab.tsx b/ui/src/components/ExecutionDiagram/tabs/LogTab.tsx index 34f076ac..72f2ff9c 100644 --- a/ui/src/components/ExecutionDiagram/tabs/LogTab.tsx +++ b/ui/src/components/ExecutionDiagram/tabs/LogTab.tsx @@ -1,4 +1,5 @@ import { useState, useMemo } from 'react'; +import { useNavigate } from 'react-router'; import { useApplicationLogs } from '../../../api/queries/logs'; import type { LogEntryResponse } from '../../../api/queries/logs'; import styles from '../ExecutionDiagram.module.css'; @@ -30,6 +31,7 @@ function formatTime(iso: string): string { export function LogTab({ applicationId, exchangeId, processorId }: LogTabProps) { const [filter, setFilter] = useState(''); + const navigate = useNavigate(); const { data: logs, isLoading } = useApplicationLogs( applicationId, @@ -93,23 +95,35 @@ export function LogTab({ applicationId, exchangeId, processorId }: LogTabProps) {processorId ? 'No logs for this processor' : 'No logs available'} ) : ( - - - {entries.map((entry, i) => ( - - - - - - ))} - -
- {formatTime(entry.timestamp)} - - {entry.level} - - {entry.message} -
+ <> + + + {entries.map((entry, i) => ( + + + + + + ))} + +
+ {formatTime(entry.timestamp)} + + {entry.level} + + {entry.message} +
+ {exchangeId && ( +
+ +
+ )} + )} diff --git a/ui/src/hooks/useScope.ts b/ui/src/hooks/useScope.ts index a466fb3a..39db1eeb 100644 --- a/ui/src/hooks/useScope.ts +++ b/ui/src/hooks/useScope.ts @@ -2,9 +2,9 @@ import { useParams, useNavigate, useLocation } from 'react-router'; import { useCallback } from 'react'; -export type TabKey = 'exchanges' | 'dashboard' | 'runtime'; +export type TabKey = 'exchanges' | 'dashboard' | 'runtime' | 'logs'; -const VALID_TABS = new Set(['exchanges', 'dashboard', 'runtime']); +const VALID_TABS = new Set(['exchanges', 'dashboard', 'runtime', 'logs']); export interface Scope { tab: TabKey; diff --git a/ui/src/pages/Exchanges/ExchangeHeader.tsx b/ui/src/pages/Exchanges/ExchangeHeader.tsx index a9ddc7e9..236c08a5 100644 --- a/ui/src/pages/Exchanges/ExchangeHeader.tsx +++ b/ui/src/pages/Exchanges/ExchangeHeader.tsx @@ -1,6 +1,6 @@ import { useMemo } from 'react'; import { useNavigate } from 'react-router'; -import { GitBranch, Server, RotateCcw } from 'lucide-react'; +import { GitBranch, Server, RotateCcw, FileText } from 'lucide-react'; import { StatusDot, MonoText, Badge } from '@cameleer/design-system'; import { useCorrelationChain } from '../../api/queries/correlation'; import { useAgents } from '../../api/queries/agents'; @@ -100,6 +100,13 @@ export function ExchangeHeader({ detail, onCorrelatedSelect, onClearSelection }: )} {formatDuration(detail.durationMs)} + {/* Route control / replay — only if agent supports it AND user has operator+ role */} diff --git a/ui/src/pages/LogsTab/LevelFilterBar.tsx b/ui/src/pages/LogsTab/LevelFilterBar.tsx new file mode 100644 index 00000000..2ef8a3db --- /dev/null +++ b/ui/src/pages/LogsTab/LevelFilterBar.tsx @@ -0,0 +1,50 @@ +import { ButtonGroup } from '@cameleer/design-system'; +import type { ButtonGroupItem } from '@cameleer/design-system'; + +function formatCount(n: number): string { + if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M`; + if (n >= 1_000) return `${(n / 1_000).toFixed(1)}K`; + return String(n); +} + +const LEVEL_ITEMS: ButtonGroupItem[] = [ + { value: 'TRACE', label: 'Trace', color: 'var(--text-muted)' }, + { value: 'DEBUG', label: 'Debug', color: 'var(--running)' }, + { value: 'INFO', label: 'Info', color: 'var(--success)' }, + { value: 'WARN', label: 'Warn', color: 'var(--warning)' }, + { value: 'ERROR', label: 'Error', color: 'var(--error)' }, +]; + +interface LevelFilterBarProps { + activeLevels: Set; + onChange: (levels: Set) => void; + levelCounts: Record; +} + +export function LevelFilterBar({ activeLevels, onChange, levelCounts }: LevelFilterBarProps) { + const items = LEVEL_ITEMS.map((item) => ({ + ...item, + label: `${item.label} ${formatCount(levelCounts[item.value] ?? 0)}`, + })); + + return ( +
+ + {activeLevels.size > 0 && ( + + )} +
+ ); +} diff --git a/ui/src/pages/LogsTab/LogEntry.module.css b/ui/src/pages/LogsTab/LogEntry.module.css new file mode 100644 index 00000000..0d1d4708 --- /dev/null +++ b/ui/src/pages/LogsTab/LogEntry.module.css @@ -0,0 +1,187 @@ +.entry { + border-bottom: 1px solid var(--border-subtle); + cursor: pointer; + transition: background 0.1s; +} + +.entry:hover { + background: var(--bg-hover); +} + +.expanded { + background: var(--bg-surface); +} + +.row { + display: flex; + align-items: baseline; + gap: 8px; + padding: 6px 12px; + font-size: 12px; + font-family: var(--font-mono); + min-height: 28px; +} + +.timestamp { + color: var(--text-muted); + white-space: nowrap; + flex-shrink: 0; +} + +.level { + font-weight: 600; + white-space: nowrap; + flex-shrink: 0; + min-width: 40px; +} + +.logger { + color: var(--text-muted); + white-space: nowrap; + flex-shrink: 0; + max-width: 180px; + overflow: hidden; + text-overflow: ellipsis; + font-size: 11px; +} + +.message { + color: var(--text-primary); + flex: 1; + min-width: 0; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.chips { + display: flex; + gap: 4px; + flex-shrink: 0; +} + +.chip { + font-size: 10px; + padding: 1px 6px; + border-radius: var(--radius-sm); + background: var(--bg-raised); + color: var(--text-secondary); + cursor: pointer; + font-family: var(--font-body); +} + +.chip:hover { + background: var(--border); +} + +.detail { + padding: 8px 12px 12px 60px; + font-size: 12px; +} + +.detailGrid { + display: grid; + grid-template-columns: 70px 1fr; + gap: 2px 8px; + margin-bottom: 8px; +} + +.detailLabel { + color: var(--text-muted); + font-size: 11px; +} + +.detailValue { + color: var(--text-primary); + font-family: var(--font-mono); + font-size: 11px; + word-break: break-all; +} + +.fullMessage { + color: var(--text-primary); + font-family: var(--font-mono); + font-size: 12px; + white-space: pre-wrap; + word-break: break-word; + margin-bottom: 8px; + padding: 8px; + background: var(--bg-deep); + border-radius: var(--radius-sm); +} + +.stackTrace { + font-family: var(--font-mono); + font-size: 11px; + color: var(--error); + background: var(--bg-deep); + border-radius: var(--radius-sm); + padding: 8px; + margin: 8px 0; + overflow-x: auto; + white-space: pre; + max-height: 300px; + overflow-y: auto; +} + +.mdcSection { + margin-top: 8px; +} + +.mdcGrid { + display: flex; + flex-wrap: wrap; + gap: 4px; + margin-top: 4px; +} + +.mdcEntry { + display: flex; + gap: 2px; + font-size: 11px; + font-family: var(--font-mono); + background: var(--bg-deep); + border-radius: var(--radius-sm); + padding: 2px 6px; +} + +.mdcKey { + color: var(--text-muted); +} + +.mdcValue { + color: var(--text-primary); +} + +.actions { + display: flex; + gap: 8px; + margin-top: 8px; +} + +.actionBtn { + background: none; + border: 1px solid var(--border-subtle); + border-radius: var(--radius-sm); + padding: 4px 10px; + font-size: 11px; + color: var(--text-secondary); + cursor: pointer; + font-family: var(--font-body); +} + +.actionBtn:hover { + background: var(--bg-hover); + color: var(--text-primary); +} + +.linkBtn { + background: none; + border: none; + padding: 0; + color: var(--amber); + cursor: pointer; + font-family: var(--font-mono); + font-size: 11px; + text-decoration: underline; +} diff --git a/ui/src/pages/LogsTab/LogEntry.tsx b/ui/src/pages/LogsTab/LogEntry.tsx new file mode 100644 index 00000000..2283cfe1 --- /dev/null +++ b/ui/src/pages/LogsTab/LogEntry.tsx @@ -0,0 +1,134 @@ +import { useState, useCallback } from 'react'; +import { useNavigate } from 'react-router'; +import { Badge } from '@cameleer/design-system'; +import type { LogEntryResponse } from '../../api/queries/logs'; +import styles from './LogEntry.module.css'; + +function levelColor(level: string): string { + switch (level?.toUpperCase()) { + case 'ERROR': return 'var(--error)'; + case 'WARN': return 'var(--warning)'; + case 'INFO': return 'var(--success)'; + case 'DEBUG': return 'var(--running)'; + case 'TRACE': return 'var(--text-muted)'; + default: return 'var(--text-secondary)'; + } +} + +function formatTime(iso: string): string { + const d = new Date(iso); + const h = String(d.getHours()).padStart(2, '0'); + const m = String(d.getMinutes()).padStart(2, '0'); + const s = String(d.getSeconds()).padStart(2, '0'); + const ms = String(d.getMilliseconds()).padStart(3, '0'); + return `${h}:${m}:${s}.${ms}`; +} + +function abbreviateLogger(name: string | null): string { + if (!name) return ''; + const parts = name.split('.'); + if (parts.length <= 2) return name; + return parts.slice(0, -1).map((p) => p[0]).join('.') + '.' + parts[parts.length - 1]; +} + +function truncate(text: string, max: number): string { + return text.length > max ? text.slice(0, max) + '\u2026' : text; +} + +interface LogEntryProps { + entry: LogEntryResponse; +} + +export function LogEntry({ entry }: LogEntryProps) { + const [expanded, setExpanded] = useState(false); + const navigate = useNavigate(); + + const hasStack = !!entry.stackTrace; + const hasExchange = !!entry.exchangeId; + + const handleViewExchange = useCallback((e: React.MouseEvent) => { + e.stopPropagation(); + if (!entry.exchangeId || !entry.application) return; + const routeId = entry.mdc?.['camel.routeId'] || '_'; + navigate(`/exchanges/${entry.application}/${routeId}/${entry.exchangeId}`); + }, [entry, navigate]); + + const handleCopyMessage = useCallback(async (e: React.MouseEvent) => { + e.stopPropagation(); + await navigator.clipboard.writeText(entry.message); + }, [entry.message]); + + return ( +
setExpanded(!expanded)}> +
+ {formatTime(entry.timestamp)} + {entry.level} + {entry.application && } + + {abbreviateLogger(entry.loggerName)} + + {truncate(entry.message, 200)} + + {hasStack && Stack} + {hasExchange && ( + Exchange + )} + +
+ + {expanded && ( +
+
+ Logger + {entry.loggerName} + Thread + {entry.threadName} + Instance + {entry.instanceId} + {hasExchange && ( + <> + Exchange + + + + + )} +
+ +
{entry.message}
+ + {hasStack && ( +
{entry.stackTrace}
+ )} + + {entry.mdc && Object.keys(entry.mdc).length > 0 && ( +
+ MDC +
+ {Object.entries(entry.mdc).map(([k, v]) => ( +
+ {k} + {v} +
+ ))} +
+
+ )} + +
+ {hasExchange && ( + + )} + +
+
+ )} +
+ ); +} diff --git a/ui/src/pages/LogsTab/LogSearch.module.css b/ui/src/pages/LogsTab/LogSearch.module.css new file mode 100644 index 00000000..fffcd287 --- /dev/null +++ b/ui/src/pages/LogsTab/LogSearch.module.css @@ -0,0 +1,156 @@ +.container { + display: flex; + flex-direction: column; + height: 100%; + min-height: 0; + background: var(--bg-body); +} + +.toolbar { + padding: 12px 16px; + display: flex; + flex-direction: column; + gap: 8px; + border-bottom: 1px solid var(--border-subtle); + background: var(--bg-surface); +} + +.searchRow { + display: flex; + gap: 8px; + align-items: center; +} + +.searchInput { + flex: 1; + padding: 6px 10px; + font-size: 13px; + border: 1px solid var(--border-subtle); + border-radius: var(--radius-md); + background: var(--bg-deep); + color: var(--text-primary); + outline: none; + font-family: var(--font-mono); +} + +.searchInput:focus { + border-color: var(--amber); +} + +.searchInput::placeholder { + color: var(--text-muted); +} + +.liveTailBtn { + display: flex; + align-items: center; + gap: 6px; + padding: 6px 12px; + font-size: 12px; + font-weight: 500; + border: 1px solid var(--border-subtle); + border-radius: var(--radius-md); + background: var(--bg-deep); + color: var(--text-secondary); + cursor: pointer; + font-family: var(--font-body); + white-space: nowrap; +} + +.liveTailBtn:hover { + border-color: var(--border); +} + +.liveTailActive { + border-color: var(--success); + color: var(--success); + background: var(--bg-surface); +} + +.liveDot { + width: 6px; + height: 6px; + border-radius: 50%; + background: var(--success); + animation: pulse 1.5s ease-in-out infinite; +} + +@keyframes pulse { + 0%, 100% { opacity: 1; } + 50% { opacity: 0.3; } +} + +.results { + flex: 1; + overflow-y: auto; + position: relative; +} + +.loadingWrap { + display: flex; + justify-content: center; + padding: 3rem; +} + +.loadMore { + display: flex; + justify-content: center; + padding: 12px; +} + +.loadMoreBtn { + padding: 6px 20px; + font-size: 12px; + border: 1px solid var(--border-subtle); + border-radius: var(--radius-md); + background: var(--bg-surface); + color: var(--text-secondary); + cursor: pointer; + font-family: var(--font-body); +} + +.loadMoreBtn:hover { + background: var(--bg-hover); + color: var(--text-primary); +} + +.loadMoreBtn:disabled { + opacity: 0.5; + cursor: default; +} + +.newEntries { + position: sticky; + bottom: 0; + text-align: center; + padding: 8px; + background: var(--amber); + color: var(--bg-deep); + font-size: 12px; + cursor: pointer; + font-weight: 500; +} + +.statusBar { + display: flex; + align-items: center; + gap: 12px; + padding: 4px 16px; + font-size: 11px; + color: var(--text-muted); + border-top: 1px solid var(--border-subtle); + background: var(--bg-surface); + font-family: var(--font-mono); +} + +.fetchDot { + width: 6px; + height: 6px; + border-radius: 50%; + background: var(--amber); + animation: pulse 1s ease-in-out infinite; +} + +.scope { + margin-left: auto; +} diff --git a/ui/src/pages/LogsTab/LogSearch.tsx b/ui/src/pages/LogsTab/LogSearch.tsx new file mode 100644 index 00000000..2c932b21 --- /dev/null +++ b/ui/src/pages/LogsTab/LogSearch.tsx @@ -0,0 +1,222 @@ +import { useState, useMemo, useCallback, useRef, useEffect } from 'react'; +import { useSearchParams } from 'react-router'; +import { Spinner, EmptyState, useGlobalFilters } from '@cameleer/design-system'; +import { useLogs } from '../../api/queries/logs'; +import { useRefreshInterval } from '../../api/queries/use-refresh-interval'; +import { LevelFilterBar } from './LevelFilterBar'; +import { LogEntry } from './LogEntry'; +import styles from './LogSearch.module.css'; + +interface LogSearchProps { + defaultApplication?: string; + defaultRouteId?: string; +} + +export function LogSearch({ defaultApplication, defaultRouteId }: LogSearchProps) { + const [searchParams] = useSearchParams(); + const { timeRange } = useGlobalFilters(); + + // Initialize from URL params (for cross-navigation) + const urlExchangeId = searchParams.get('exchangeId') ?? undefined; + const urlQ = searchParams.get('q') ?? undefined; + + const [query, setQuery] = useState(urlQ ?? ''); + const [debouncedQuery, setDebouncedQuery] = useState(urlQ ?? ''); + const [activeLevels, setActiveLevels] = useState>(new Set()); + const [liveTail, setLiveTail] = useState(false); + const [cursor, setCursor] = useState(undefined); + const [allEntries, setAllEntries] = useState([]); + + const liveTailRef = useRef(liveTail); + liveTailRef.current = liveTail; + + // Debounce search query + const debounceTimer = useRef>(undefined); + const handleQueryChange = useCallback((value: string) => { + setQuery(value); + if (debounceTimer.current) clearTimeout(debounceTimer.current); + debounceTimer.current = setTimeout(() => { + setDebouncedQuery(value); + setCursor(undefined); + setAllEntries([]); + }, 300); + }, []); + + // Reset pagination when filters change + const handleLevelChange = useCallback((levels: Set) => { + setActiveLevels(levels); + setCursor(undefined); + setAllEntries([]); + }, []); + + const levelCsv = useMemo(() => + activeLevels.size > 0 ? [...activeLevels].join(',') : undefined, + [activeLevels]); + + // Build search params + const latestTsRef = useRef(undefined); + const liveRefetch = useRefreshInterval(2_000); + + const searchParamsObj = useMemo(() => ({ + q: debouncedQuery || undefined, + level: levelCsv, + application: defaultApplication, + exchangeId: urlExchangeId, + from: liveTail + ? (latestTsRef.current ?? timeRange.start.toISOString()) + : timeRange.start.toISOString(), + to: liveTail ? new Date().toISOString() : timeRange.end.toISOString(), + cursor: liveTail ? undefined : cursor, + limit: liveTail ? 200 : 100, + sort: liveTail ? 'asc' as const : 'desc' as const, + }), [debouncedQuery, levelCsv, defaultApplication, urlExchangeId, + timeRange, cursor, liveTail]); + + const { data, isLoading, isFetching } = useLogs(searchParamsObj, { + refetchInterval: liveTail ? liveRefetch : undefined, + }); + + // Live tail: append new entries + useEffect(() => { + if (!data || !liveTail) return; + if (data.data.length > 0) { + setAllEntries((prev) => { + const combined = [...prev, ...data.data]; + // Buffer limit: keep last 5000 + return combined.length > 5000 ? combined.slice(-5000) : combined; + }); + latestTsRef.current = data.data[data.data.length - 1].timestamp; + } + }, [data, liveTail]); + + // Auto-scroll for live tail + const scrollRef = useRef(null); + const [autoScroll, setAutoScroll] = useState(true); + + useEffect(() => { + if (liveTail && autoScroll && scrollRef.current) { + scrollRef.current.scrollTop = scrollRef.current.scrollHeight; + } + }, [allEntries, liveTail, autoScroll]); + + const handleScroll = useCallback(() => { + if (!scrollRef.current || !liveTail) return; + const { scrollTop, scrollHeight, clientHeight } = scrollRef.current; + setAutoScroll(scrollHeight - scrollTop - clientHeight < 50); + }, [liveTail]); + + const handleToggleLiveTail = useCallback(() => { + setLiveTail((prev) => { + if (!prev) { + // Entering live tail + setAllEntries([]); + setCursor(undefined); + latestTsRef.current = undefined; + setAutoScroll(true); + } + return !prev; + }); + }, []); + + const handleLoadMore = useCallback(() => { + if (data?.nextCursor) { + setCursor(data.nextCursor); + } + }, [data?.nextCursor]); + + // Accumulate pages for non-live mode + useEffect(() => { + if (liveTail || !data) return; + if (cursor) { + // Appending a new page + setAllEntries((prev) => [...prev, ...data.data]); + } else { + // Fresh search + setAllEntries(data.data); + } + }, [data, cursor, liveTail]); + + const entries = liveTail ? allEntries : allEntries; + const levelCounts = data?.levelCounts ?? {}; + const hasMore = data?.hasMore ?? false; + const newEntriesCount = liveTail && !autoScroll && data?.data.length + ? data.data.length : 0; + + return ( +
+
+
+ handleQueryChange(e.target.value)} + className={styles.searchInput} + /> + +
+ +
+ +
+ {isLoading && entries.length === 0 ? ( +
+ +
+ ) : entries.length === 0 ? ( + 0 + ? 'Try adjusting your search or filters.' + : 'No log entries in the selected time range.'} + /> + ) : ( + <> + {entries.map((entry, i) => ( + + ))} + {!liveTail && hasMore && ( +
+ +
+ )} + + )} + + {liveTail && !autoScroll && newEntriesCount > 0 && ( +
setAutoScroll(true)}> + New entries arriving — click to scroll to bottom +
+ )} +
+ +
+ {entries.length} entries{liveTail ? ' (live)' : ''} + {isFetching && } + {defaultApplication && ( + App: {defaultApplication} + )} +
+
+ ); +} diff --git a/ui/src/pages/LogsTab/LogsPage.tsx b/ui/src/pages/LogsTab/LogsPage.tsx new file mode 100644 index 00000000..c4257a01 --- /dev/null +++ b/ui/src/pages/LogsTab/LogsPage.tsx @@ -0,0 +1,7 @@ +import { useParams } from 'react-router'; +import { LogSearch } from './LogSearch'; + +export default function LogsPage() { + const { appId, routeId } = useParams<{ appId?: string; routeId?: string }>(); + return ; +} diff --git a/ui/src/router.tsx b/ui/src/router.tsx index 6c9296f2..6bb8a444 100644 --- a/ui/src/router.tsx +++ b/ui/src/router.tsx @@ -16,6 +16,7 @@ const OidcConfigPage = lazy(() => import('./pages/Admin/OidcConfigPage')); const DatabaseAdminPage = lazy(() => import('./pages/Admin/DatabaseAdminPage')); const ClickHouseAdminPage = lazy(() => import('./pages/Admin/ClickHouseAdminPage')); const AppConfigPage = lazy(() => import('./pages/Admin/AppConfigPage')); +const LogsPage = lazy(() => import('./pages/LogsTab/LogsPage')); const SwaggerPage = lazy(() => import('./pages/Swagger/SwaggerPage')); function SuspenseWrapper({ children }: { children: React.ReactNode }) { @@ -68,6 +69,11 @@ export const router = createBrowserRouter([ { path: 'runtime/:appId', element: }, { path: 'runtime/:appId/:instanceId', element: }, + // Logs tab + { path: 'logs', element: }, + { path: 'logs/:appId', element: }, + { path: 'logs/:appId/:routeId', element: }, + // Legacy redirects — Sidebar uses hardcoded /apps/... and /agents/... paths { path: 'apps', element: }, { path: 'apps/:appId', element: },