feat: add Logs tab with cursor-paginated search, level filters, and live tail
All checks were successful
CI / cleanup-branch (push) Has been skipped
CI / build (push) Successful in 1m3s
CI / docker (push) Successful in 1m11s
CI / deploy-feature (push) Has been skipped
CI / deploy (push) Successful in 49s

- Extend GET /api/v1/logs with cursor pagination, multi-level filtering,
  optional application scoping, and level count aggregation
- Add exchangeId, instanceId, application, mdc fields to log responses
- Refactor ClickHouseLogStore with keyset pagination (N+1 pattern)
- Add LogSearchRequest/LogSearchResponse core domain records
- Create LogSearchPageResponse wrapper DTO
- Add Logs as 4th content tab (Exchanges | Dashboard | Runtime | Logs)
- Implement LogSearch component with debounced search, level filter bar,
  expandable log entries, cursor pagination, and live tail mode
- Add cross-navigation: exchange header → logs, log tab → logs tab
- Update ClickHouseLogStoreIT with cursor, multi-level, cross-app tests

Closes: #104

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
hsiegeln
2026-04-02 08:47:16 +02:00
parent a52751da1b
commit b73f5e6dd4
22 changed files with 1405 additions and 119 deletions

View File

@@ -0,0 +1,44 @@
package com.cameleer3.server.core.search;
import java.time.Instant;
import java.util.List;
/**
* Immutable search criteria for querying application logs.
*
* @param q free-text search across message and stack trace
* @param levels log level filter (e.g. ["WARN","ERROR"])
* @param application application ID filter (nullable = all apps)
* @param instanceId agent instance ID filter
* @param exchangeId Camel exchange ID filter
* @param logger logger name substring filter
* @param from inclusive start of time range (required)
* @param to inclusive end of time range (required)
* @param cursor ISO timestamp cursor for keyset pagination
* @param limit page size (1-500, default 100)
* @param sort sort direction: "asc" or "desc" (default "desc")
*/
public record LogSearchRequest(
String q,
List<String> levels,
String application,
String instanceId,
String exchangeId,
String logger,
Instant from,
Instant to,
String cursor,
int limit,
String sort
) {
private static final int DEFAULT_LIMIT = 100;
private static final int MAX_LIMIT = 500;
public LogSearchRequest {
if (limit <= 0) limit = DEFAULT_LIMIT;
if (limit > MAX_LIMIT) limit = MAX_LIMIT;
if (sort == null || !"asc".equalsIgnoreCase(sort)) sort = "desc";
if (levels == null) levels = List.of();
}
}

View File

@@ -0,0 +1,21 @@
package com.cameleer3.server.core.search;
import com.cameleer3.server.core.storage.LogEntryResult;
import java.util.List;
import java.util.Map;
/**
* Log search result with cursor-based pagination and level aggregation.
*
* @param data matching log entries for the current page
* @param nextCursor ISO timestamp cursor for the next page (null if no more)
* @param hasMore whether more results exist beyond this page
* @param levelCounts count of matching logs per level (unaffected by level filter)
*/
public record LogSearchResponse(
List<LogEntryResult> data,
String nextCursor,
boolean hasMore,
Map<String, Long> levelCounts
) {}

View File

@@ -1,6 +1,8 @@
package com.cameleer3.server.core.storage;
import java.time.Instant;
import java.util.Map;
public record LogEntryResult(String timestamp, String level, String loggerName,
String message, String threadName, String stackTrace) {}
String message, String threadName, String stackTrace,
String exchangeId, String instanceId, String application,
Map<String, String> mdc) {}

View File

@@ -1,15 +1,14 @@
package com.cameleer3.server.core.storage;
import com.cameleer3.common.model.LogEntry;
import com.cameleer3.server.core.search.LogSearchRequest;
import com.cameleer3.server.core.search.LogSearchResponse;
import java.time.Instant;
import java.util.List;
public interface LogIndex {
List<LogEntryResult> search(String applicationId, String instanceId, String level,
String query, String exchangeId,
Instant from, Instant to, int limit);
LogSearchResponse search(LogSearchRequest request);
void indexBatch(String instanceId, String applicationId, List<LogEntry> entries);
}