feat(clickhouse): add ClickHouseExecutionStore with batch insert for chunked format

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
hsiegeln
2026-03-31 19:07:33 +02:00
parent b30dfa39f4
commit 81f7f8afe1
3 changed files with 421 additions and 0 deletions

View File

@@ -0,0 +1,151 @@
package com.cameleer3.server.app.storage;
import com.cameleer3.server.core.ingestion.MergedExecution;
import com.cameleer3.server.core.storage.model.FlatProcessorRecord;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.List;
import java.util.Map;
public class ClickHouseExecutionStore {
private final JdbcTemplate jdbc;
private final ObjectMapper objectMapper;
public ClickHouseExecutionStore(JdbcTemplate jdbc) {
this(jdbc, new ObjectMapper());
}
public ClickHouseExecutionStore(JdbcTemplate jdbc, ObjectMapper objectMapper) {
this.jdbc = jdbc;
this.objectMapper = objectMapper;
}
public void insertExecutionBatch(List<MergedExecution> executions) {
if (executions.isEmpty()) return;
jdbc.batchUpdate("""
INSERT INTO executions (
tenant_id, _version, execution_id, route_id, agent_id, application_name,
status, correlation_id, exchange_id, start_time, end_time, duration_ms,
error_message, error_stacktrace, error_type, error_category,
root_cause_type, root_cause_message, diagram_content_hash, engine_level,
input_body, output_body, input_headers, output_headers, attributes,
trace_id, span_id, has_trace_data, is_replay
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
executions.stream().map(e -> new Object[]{
nullToEmpty(e.tenantId()),
e.version(),
nullToEmpty(e.executionId()),
nullToEmpty(e.routeId()),
nullToEmpty(e.agentId()),
nullToEmpty(e.applicationName()),
nullToEmpty(e.status()),
nullToEmpty(e.correlationId()),
nullToEmpty(e.exchangeId()),
Timestamp.from(e.startTime()),
e.endTime() != null ? Timestamp.from(e.endTime()) : null,
e.durationMs(),
nullToEmpty(e.errorMessage()),
nullToEmpty(e.errorStacktrace()),
nullToEmpty(e.errorType()),
nullToEmpty(e.errorCategory()),
nullToEmpty(e.rootCauseType()),
nullToEmpty(e.rootCauseMessage()),
nullToEmpty(e.diagramContentHash()),
nullToEmpty(e.engineLevel()),
nullToEmpty(e.inputBody()),
nullToEmpty(e.outputBody()),
nullToEmpty(e.inputHeaders()),
nullToEmpty(e.outputHeaders()),
nullToEmpty(e.attributes()),
nullToEmpty(e.traceId()),
nullToEmpty(e.spanId()),
e.hasTraceData(),
e.isReplay()
}).toList());
}
public void insertProcessorBatch(String tenantId, String executionId, String routeId,
String applicationName, Instant execStartTime,
List<FlatProcessorRecord> processors) {
if (processors.isEmpty()) return;
jdbc.batchUpdate("""
INSERT INTO processor_executions (
tenant_id, execution_id, seq, parent_seq, parent_processor_id,
processor_id, processor_type, start_time, route_id, application_name,
iteration, iteration_size, status, end_time, duration_ms,
error_message, error_stacktrace, error_type, error_category,
root_cause_type, root_cause_message,
input_body, output_body, input_headers, output_headers, attributes,
resolved_endpoint_uri, circuit_breaker_state,
fallback_triggered, filter_matched, duplicate_message
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
processors.stream().map(p -> new Object[]{
nullToEmpty(tenantId),
nullToEmpty(executionId),
p.seq(),
p.parentSeq(),
nullToEmpty(p.parentProcessorId()),
nullToEmpty(p.processorId()),
nullToEmpty(p.processorType()),
Timestamp.from(p.startTime() != null ? p.startTime() : execStartTime),
nullToEmpty(routeId),
nullToEmpty(applicationName),
p.iteration(),
p.iterationSize(),
nullToEmpty(p.status()),
computeEndTime(p.startTime(), p.durationMs()),
p.durationMs(),
nullToEmpty(p.errorMessage()),
nullToEmpty(p.errorStackTrace()),
nullToEmpty(p.errorType()),
nullToEmpty(p.errorCategory()),
nullToEmpty(p.rootCauseType()),
nullToEmpty(p.rootCauseMessage()),
nullToEmpty(p.inputBody()),
nullToEmpty(p.outputBody()),
mapToJson(p.inputHeaders()),
mapToJson(p.outputHeaders()),
mapToJson(p.attributes()),
nullToEmpty(p.resolvedEndpointUri()),
nullToEmpty(p.circuitBreakerState()),
boolOrFalse(p.fallbackTriggered()),
boolOrFalse(p.filterMatched()),
boolOrFalse(p.duplicateMessage())
}).toList());
}
private static String nullToEmpty(String value) {
return value != null ? value : "";
}
private static boolean boolOrFalse(Boolean value) {
return value != null && value;
}
private static Timestamp computeEndTime(Instant startTime, long durationMs) {
if (startTime != null && durationMs > 0) {
return Timestamp.from(startTime.plusMillis(durationMs));
}
return null;
}
private String mapToJson(Map<String, String> map) {
if (map == null || map.isEmpty()) return "";
try {
return objectMapper.writeValueAsString(map);
} catch (JsonProcessingException e) {
return "";
}
}
}