refactor: extract MetricsQueryStore interface from AgentMetricsController

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
hsiegeln
2026-03-31 17:00:57 +02:00
parent 6e30b7ec65
commit bf0e9ea418
5 changed files with 101 additions and 31 deletions

View File

@@ -1,5 +1,6 @@
package com.cameleer3.server.app.config;
import com.cameleer3.server.app.storage.PostgresMetricsQueryStore;
import com.cameleer3.server.core.admin.AuditRepository;
import com.cameleer3.server.core.admin.AuditService;
import com.cameleer3.server.core.detail.DetailService;
@@ -11,6 +12,7 @@ import com.cameleer3.server.core.storage.model.MetricsSnapshot;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
@Configuration
public class StorageBeanConfig {
@@ -41,4 +43,9 @@ public class StorageBeanConfig {
return new IngestionService(executionStore, diagramStore, metricsBuffer,
searchIndexer::onExecutionUpdated, bodySizeLimit);
}
@Bean
public MetricsQueryStore metricsQueryStore(JdbcTemplate jdbc) {
return new PostgresMetricsQueryStore(jdbc);
}
}

View File

@@ -2,22 +2,23 @@ package com.cameleer3.server.app.controller;
import com.cameleer3.server.app.dto.AgentMetricsResponse;
import com.cameleer3.server.app.dto.MetricBucket;
import org.springframework.jdbc.core.JdbcTemplate;
import com.cameleer3.server.core.storage.MetricsQueryStore;
import com.cameleer3.server.core.storage.model.MetricTimeSeries;
import org.springframework.web.bind.annotation.*;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
@RestController
@RequestMapping("/api/v1/agents/{agentId}/metrics")
public class AgentMetricsController {
private final JdbcTemplate jdbc;
private final MetricsQueryStore metricsQueryStore;
public AgentMetricsController(JdbcTemplate jdbc) {
this.jdbc = jdbc;
public AgentMetricsController(MetricsQueryStore metricsQueryStore) {
this.metricsQueryStore = metricsQueryStore;
}
@GetMapping
@@ -32,34 +33,18 @@ public class AgentMetricsController {
if (to == null) to = Instant.now();
List<String> metricNames = Arrays.asList(names.split(","));
long intervalMs = (to.toEpochMilli() - from.toEpochMilli()) / Math.max(buckets, 1);
String intervalStr = intervalMs + " milliseconds";
Map<String, List<MetricBucket>> result = new LinkedHashMap<>();
for (String name : metricNames) {
result.put(name.trim(), new ArrayList<>());
}
Map<String, List<MetricTimeSeries.Bucket>> raw =
metricsQueryStore.queryTimeSeries(agentId, metricNames, from, to, buckets);
String sql = """
SELECT time_bucket(CAST(? AS interval), collected_at) AS bucket,
metric_name,
AVG(metric_value) AS avg_value
FROM agent_metrics
WHERE agent_id = ?
AND collected_at >= ? AND collected_at < ?
AND metric_name = ANY(?)
GROUP BY bucket, metric_name
ORDER BY bucket
""";
String[] namesArray = metricNames.stream().map(String::trim).toArray(String[]::new);
jdbc.query(sql, rs -> {
String metricName = rs.getString("metric_name");
Instant bucket = rs.getTimestamp("bucket").toInstant();
double value = rs.getDouble("avg_value");
result.computeIfAbsent(metricName, k -> new ArrayList<>())
.add(new MetricBucket(bucket, value));
}, intervalStr, agentId, Timestamp.from(from), Timestamp.from(to), namesArray);
Map<String, List<MetricBucket>> result = raw.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> e.getValue().stream()
.map(b -> new MetricBucket(b.time(), b.value()))
.toList(),
(a, b) -> a,
LinkedHashMap::new));
return new AgentMetricsResponse(result);
}

View File

@@ -0,0 +1,55 @@
package com.cameleer3.server.app.storage;
import com.cameleer3.server.core.storage.MetricsQueryStore;
import com.cameleer3.server.core.storage.model.MetricTimeSeries;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.*;
public class PostgresMetricsQueryStore implements MetricsQueryStore {
private final JdbcTemplate jdbc;
public PostgresMetricsQueryStore(JdbcTemplate jdbc) {
this.jdbc = jdbc;
}
@Override
public Map<String, List<MetricTimeSeries.Bucket>> queryTimeSeries(
String agentId, List<String> metricNames,
Instant from, Instant to, int buckets) {
long intervalMs = (to.toEpochMilli() - from.toEpochMilli()) / Math.max(buckets, 1);
String intervalStr = intervalMs + " milliseconds";
Map<String, List<MetricTimeSeries.Bucket>> result = new LinkedHashMap<>();
for (String name : metricNames) {
result.put(name.trim(), new ArrayList<>());
}
String sql = """
SELECT time_bucket(CAST(? AS interval), collected_at) AS bucket,
metric_name,
AVG(metric_value) AS avg_value
FROM agent_metrics
WHERE agent_id = ?
AND collected_at >= ? AND collected_at < ?
AND metric_name = ANY(?)
GROUP BY bucket, metric_name
ORDER BY bucket
""";
String[] namesArray = metricNames.stream().map(String::trim).toArray(String[]::new);
jdbc.query(sql, rs -> {
String metricName = rs.getString("metric_name");
Instant bucket = rs.getTimestamp("bucket").toInstant();
double value = rs.getDouble("avg_value");
result.computeIfAbsent(metricName, k -> new ArrayList<>())
.add(new MetricTimeSeries.Bucket(bucket, value));
}, intervalStr, agentId, Timestamp.from(from), Timestamp.from(to), namesArray);
return result;
}
}

View File

@@ -0,0 +1,14 @@
package com.cameleer3.server.core.storage;
import com.cameleer3.server.core.storage.model.MetricTimeSeries;
import java.time.Instant;
import java.util.List;
import java.util.Map;
public interface MetricsQueryStore {
Map<String, List<MetricTimeSeries.Bucket>> queryTimeSeries(
String agentId, List<String> metricNames,
Instant from, Instant to, int buckets);
}

View File

@@ -0,0 +1,9 @@
package com.cameleer3.server.core.storage.model;
import java.time.Instant;
import java.util.List;
public record MetricTimeSeries(String metricName, List<Bucket> buckets) {
public record Bucket(Instant time, double value) {}
}