refactor(search): drop dead SearchIndexer subsystem

After the ExecutionController removal (0f635576), SearchIndexer
subscribed to ExecutionUpdatedEvent but nothing publishes that event.
Every SearchIndexerStats metric returned always-zero, and the admin
/api/v1/admin/clickhouse/pipeline endpoint that surfaced those stats
carried no signal.

Backend removed:
- core: SearchIndexer, SearchIndexerStats, ExecutionUpdatedEvent
- app: IndexerPipelineResponse DTO, /pipeline endpoint on
  ClickHouseAdminController (field + ctor param)
- StorageBeanConfig.searchIndexer bean

UI removed:
- IndexerPipeline type + useIndexerPipeline hook in
  api/queries/admin/clickhouse.ts
- Indexer Pipeline card in ClickHouseAdminPage.tsx (plus ProgressBar
  import and pipeline* CSS classes)

OpenAPI schema.d.ts + openapi.json regenerated (stale /pipeline path
and IndexerPipelineResponse schema removed).

SearchIndex interface + ClickHouseSearchIndex impl kept — those are
live and used by SearchService + ExchangeMatchEvaluator.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
hsiegeln
2026-04-21 23:32:49 +02:00
parent a694491140
commit 98cbf8f3fc
11 changed files with 3 additions and 318 deletions

View File

@@ -16,7 +16,6 @@ import com.cameleer.server.core.agent.AgentEventRepository;
import com.cameleer.server.core.agent.AgentInfo; import com.cameleer.server.core.agent.AgentInfo;
import com.cameleer.server.core.agent.AgentRegistryService; import com.cameleer.server.core.agent.AgentRegistryService;
import com.cameleer.server.core.detail.DetailService; import com.cameleer.server.core.detail.DetailService;
import com.cameleer.server.core.indexing.SearchIndexer;
import com.cameleer.server.app.ingestion.ExecutionFlushScheduler; import com.cameleer.server.app.ingestion.ExecutionFlushScheduler;
import com.cameleer.server.app.search.ClickHouseSearchIndex; import com.cameleer.server.app.search.ClickHouseSearchIndex;
import com.cameleer.server.app.storage.ClickHouseExecutionStore; import com.cameleer.server.app.storage.ClickHouseExecutionStore;
@@ -43,13 +42,6 @@ public class StorageBeanConfig {
return new DetailService(executionStore); return new DetailService(executionStore);
} }
@Bean(destroyMethod = "shutdown")
public SearchIndexer searchIndexer(ExecutionStore executionStore, SearchIndex searchIndex,
@Value("${cameleer.server.indexer.debouncems:2000}") long debounceMs,
@Value("${cameleer.server.indexer.queuesize:10000}") int queueSize) {
return new SearchIndexer(executionStore, searchIndex, debounceMs, queueSize);
}
@Bean @Bean
public AuditService auditService(AuditRepository auditRepository) { public AuditService auditService(AuditRepository auditRepository) {
return new AuditService(auditRepository); return new AuditService(auditRepository);

View File

@@ -4,8 +4,6 @@ import com.cameleer.server.app.dto.ClickHousePerformanceResponse;
import com.cameleer.server.app.dto.ClickHouseQueryInfo; import com.cameleer.server.app.dto.ClickHouseQueryInfo;
import com.cameleer.server.app.dto.ClickHouseStatusResponse; import com.cameleer.server.app.dto.ClickHouseStatusResponse;
import com.cameleer.server.app.dto.ClickHouseTableInfo; import com.cameleer.server.app.dto.ClickHouseTableInfo;
import com.cameleer.server.app.dto.IndexerPipelineResponse;
import com.cameleer.server.core.indexing.SearchIndexerStats;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
@@ -31,15 +29,12 @@ import java.util.List;
public class ClickHouseAdminController { public class ClickHouseAdminController {
private final JdbcTemplate clickHouseJdbc; private final JdbcTemplate clickHouseJdbc;
private final SearchIndexerStats indexerStats;
private final String clickHouseUrl; private final String clickHouseUrl;
public ClickHouseAdminController( public ClickHouseAdminController(
@Qualifier("clickHouseJdbcTemplate") JdbcTemplate clickHouseJdbc, @Qualifier("clickHouseJdbcTemplate") JdbcTemplate clickHouseJdbc,
SearchIndexerStats indexerStats,
@Value("${cameleer.server.clickhouse.url:}") String clickHouseUrl) { @Value("${cameleer.server.clickhouse.url:}") String clickHouseUrl) {
this.clickHouseJdbc = clickHouseJdbc; this.clickHouseJdbc = clickHouseJdbc;
this.indexerStats = indexerStats;
this.clickHouseUrl = clickHouseUrl; this.clickHouseUrl = clickHouseUrl;
} }
@@ -157,16 +152,4 @@ public class ClickHouseAdminController {
} }
} }
@GetMapping("/pipeline")
@Operation(summary = "Search indexer pipeline statistics")
public IndexerPipelineResponse getPipeline() {
return new IndexerPipelineResponse(
indexerStats.getQueueDepth(),
indexerStats.getMaxQueueSize(),
indexerStats.getFailedCount(),
indexerStats.getIndexedCount(),
indexerStats.getDebounceMs(),
indexerStats.getIndexingRate(),
indexerStats.getLastIndexedAt());
}
} }

View File

@@ -1,16 +0,0 @@
package com.cameleer.server.app.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.Instant;
@Schema(description = "Search indexer pipeline statistics")
public record IndexerPipelineResponse(
int queueDepth,
int maxQueueSize,
long failedCount,
long indexedCount,
long debounceMs,
double indexingRate,
Instant lastIndexedAt
) {}

View File

@@ -1,5 +0,0 @@
package com.cameleer.server.core.indexing;
import java.time.Instant;
public record ExecutionUpdatedEvent(String executionId, Instant startTime) {}

View File

@@ -1,143 +0,0 @@
package com.cameleer.server.core.indexing;
import com.cameleer.server.core.storage.ExecutionStore;
import com.cameleer.server.core.storage.ExecutionStore.ExecutionRecord;
import com.cameleer.server.core.storage.ExecutionStore.ProcessorRecord;
import com.cameleer.server.core.storage.SearchIndex;
import com.cameleer.server.core.storage.model.ExecutionDocument;
import com.cameleer.server.core.storage.model.ExecutionDocument.ProcessorDoc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
public class SearchIndexer implements SearchIndexerStats {
private static final Logger log = LoggerFactory.getLogger(SearchIndexer.class);
private final ExecutionStore executionStore;
private final SearchIndex searchIndex;
private final long debounceMs;
private final int queueCapacity;
private final Map<String, ScheduledFuture<?>> pending = new ConcurrentHashMap<>();
private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(
r -> { Thread t = new Thread(r, "search-indexer"); t.setDaemon(true); return t; });
private final AtomicLong failedCount = new AtomicLong();
private final AtomicLong indexedCount = new AtomicLong();
private volatile Instant lastIndexedAt;
private final AtomicLong rateWindowStartMs = new AtomicLong(System.currentTimeMillis());
private final AtomicLong rateWindowCount = new AtomicLong();
private volatile double lastRate;
public SearchIndexer(ExecutionStore executionStore, SearchIndex searchIndex,
long debounceMs, int queueCapacity) {
this.executionStore = executionStore;
this.searchIndex = searchIndex;
this.debounceMs = debounceMs;
this.queueCapacity = queueCapacity;
}
public void onExecutionUpdated(ExecutionUpdatedEvent event) {
if (pending.size() >= queueCapacity) {
log.warn("Search indexer queue full, dropping event for {}", event.executionId());
return;
}
ScheduledFuture<?> existing = pending.put(event.executionId(),
scheduler.schedule(() -> indexExecution(event.executionId()),
debounceMs, TimeUnit.MILLISECONDS));
if (existing != null) {
existing.cancel(false);
}
}
private void indexExecution(String executionId) {
pending.remove(executionId);
try {
ExecutionRecord exec = executionStore.findById(executionId).orElse(null);
if (exec == null) return;
List<ProcessorRecord> processors = executionStore.findProcessors(executionId);
List<ProcessorDoc> processorDocs = processors.stream()
.map(p -> new ProcessorDoc(
p.processorId(), p.processorType(), p.status(),
p.errorMessage(), p.errorStacktrace(),
p.inputBody(), p.outputBody(),
p.inputHeaders(), p.outputHeaders(),
p.attributes()))
.toList();
searchIndex.index(new ExecutionDocument(
exec.executionId(), exec.routeId(), exec.instanceId(), exec.applicationId(),
exec.status(), exec.correlationId(), exec.exchangeId(),
exec.startTime(), exec.endTime(), exec.durationMs(),
exec.errorMessage(), exec.errorStacktrace(), processorDocs,
exec.attributes(), exec.hasTraceData(), exec.isReplay()));
indexedCount.incrementAndGet();
lastIndexedAt = Instant.now();
updateRate();
} catch (Exception e) {
failedCount.incrementAndGet();
log.error("Failed to index execution {}", executionId, e);
}
}
private void updateRate() {
long now = System.currentTimeMillis();
long windowStart = rateWindowStartMs.get();
long count = rateWindowCount.incrementAndGet();
long elapsed = now - windowStart;
if (elapsed >= 15_000) { // 15-second window
lastRate = count / (elapsed / 1000.0);
rateWindowStartMs.set(now);
rateWindowCount.set(0);
}
}
@Override
public int getQueueDepth() {
return pending.size();
}
@Override
public int getMaxQueueSize() {
return queueCapacity;
}
@Override
public long getFailedCount() {
return failedCount.get();
}
@Override
public long getIndexedCount() {
return indexedCount.get();
}
@Override
public Instant getLastIndexedAt() {
return lastIndexedAt;
}
@Override
public long getDebounceMs() {
return debounceMs;
}
@Override
public double getIndexingRate() {
return lastRate;
}
public void shutdown() {
scheduler.shutdown();
}
}

View File

@@ -1,14 +0,0 @@
package com.cameleer.server.core.indexing;
import java.time.Instant;
public interface SearchIndexerStats {
int getQueueDepth();
int getMaxQueueSize();
long getFailedCount();
long getIndexedCount();
Instant getLastIndexedAt();
long getDebounceMs();
/** Approximate indexing rate in docs/sec over last measurement window */
double getIndexingRate();
}

File diff suppressed because one or more lines are too long

View File

@@ -38,16 +38,6 @@ export interface ClickHouseQuery {
query: string; query: string;
} }
export interface IndexerPipeline {
queueDepth: number;
maxQueueSize: number;
failedCount: number;
indexedCount: number;
debounceMs: number;
indexingRate: number;
lastIndexedAt: string | null;
}
// ── Query Hooks ──────────────────────────────────────────────────────── // ── Query Hooks ────────────────────────────────────────────────────────
export function useClickHouseStatus() { export function useClickHouseStatus() {
@@ -86,11 +76,3 @@ export function useClickHouseQueries() {
}); });
} }
export function useIndexerPipeline() {
const refetchInterval = useRefreshInterval(10_000);
return useQuery({
queryKey: ['admin', 'clickhouse', 'pipeline'],
queryFn: () => adminFetch<IndexerPipeline>('/clickhouse/pipeline'),
refetchInterval,
});
}

View File

@@ -2044,23 +2044,6 @@ export interface paths {
patch?: never; patch?: never;
trace?: never; trace?: never;
}; };
"/admin/clickhouse/pipeline": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
/** Search indexer pipeline statistics */
get: operations["getPipeline"];
put?: never;
post?: never;
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/admin/clickhouse/performance": { "/admin/clickhouse/performance": {
parameters: { parameters: {
query?: never; query?: never;
@@ -3633,23 +3616,6 @@ export interface components {
readRows?: number; readRows?: number;
query?: string; query?: string;
}; };
/** @description Search indexer pipeline statistics */
IndexerPipelineResponse: {
/** Format: int32 */
queueDepth?: number;
/** Format: int32 */
maxQueueSize?: number;
/** Format: int64 */
failedCount?: number;
/** Format: int64 */
indexedCount?: number;
/** Format: int64 */
debounceMs?: number;
/** Format: double */
indexingRate?: number;
/** Format: date-time */
lastIndexedAt?: string;
};
/** @description ClickHouse storage and performance metrics */ /** @description ClickHouse storage and performance metrics */
ClickHousePerformanceResponse: { ClickHousePerformanceResponse: {
diskSize?: string; diskSize?: string;
@@ -7942,26 +7908,6 @@ export interface operations {
}; };
}; };
}; };
getPipeline: {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody?: never;
responses: {
/** @description OK */
200: {
headers: {
[name: string]: unknown;
};
content: {
"*/*": components["schemas"]["IndexerPipelineResponse"];
};
};
};
};
getPerformance: { getPerformance: {
parameters: { parameters: {
query?: never; query?: never;

View File

@@ -5,30 +5,6 @@
flex-wrap: wrap; flex-wrap: wrap;
} }
/* pipelineCard — card styling via sectionStyles.section */
.pipelineCard {
margin-bottom: 16px;
}
.pipelineTitle {
font-size: 13px;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
}
.pipelineMetrics {
display: flex;
gap: 24px;
margin-top: 8px;
font-size: 12px;
color: var(--text-muted);
}
.pipelineMetrics span {
font-family: var(--font-mono);
}
.tableSection { .tableSection {
margin-bottom: 16px; margin-bottom: 16px;
} }

View File

@@ -1,8 +1,7 @@
import { StatCard, DataTable, ProgressBar } from '@cameleer/design-system'; import { StatCard, DataTable } from '@cameleer/design-system';
import type { Column } from '@cameleer/design-system'; import type { Column } from '@cameleer/design-system';
import { useClickHouseStatus, useClickHouseTables, useClickHousePerformance, useClickHouseQueries, useIndexerPipeline } from '../../api/queries/admin/clickhouse'; import { useClickHouseStatus, useClickHouseTables, useClickHousePerformance, useClickHouseQueries } from '../../api/queries/admin/clickhouse';
import styles from './ClickHouseAdminPage.module.css'; import styles from './ClickHouseAdminPage.module.css';
import sectionStyles from '../../styles/section-card.module.css';
import tableStyles from '../../styles/table-section.module.css'; import tableStyles from '../../styles/table-section.module.css';
export default function ClickHouseAdminPage() { export default function ClickHouseAdminPage() {
@@ -10,7 +9,6 @@ export default function ClickHouseAdminPage() {
const { data: tables } = useClickHouseTables(); const { data: tables } = useClickHouseTables();
const { data: perf } = useClickHousePerformance(); const { data: perf } = useClickHousePerformance();
const { data: queries } = useClickHouseQueries(); const { data: queries } = useClickHouseQueries();
const { data: pipeline } = useIndexerPipeline();
const unreachable = statusError || (status && !status.reachable); const unreachable = statusError || (status && !status.reachable);
const totalSize = (tables || []).reduce((sum, t) => sum + (t.dataSizeBytes || 0), 0); const totalSize = (tables || []).reduce((sum, t) => sum + (t.dataSizeBytes || 0), 0);
@@ -52,20 +50,6 @@ export default function ClickHouseAdminPage() {
</div> </div>
)} )}
{/* Pipeline */}
{pipeline && (
<div className={`${sectionStyles.section} ${styles.pipelineCard}`}>
<div className={styles.pipelineTitle}>Indexer Pipeline</div>
<ProgressBar value={pipeline.maxQueueSize > 0 ? (pipeline.queueDepth / pipeline.maxQueueSize) * 100 : 0} />
<div className={styles.pipelineMetrics}>
<span>Queue: {pipeline.queueDepth}/{pipeline.maxQueueSize}</span>
<span>Indexed: {pipeline.indexedCount.toLocaleString()}</span>
<span>Failed: {pipeline.failedCount}</span>
<span>Rate: {pipeline.indexingRate.toFixed(1)}/s</span>
</div>
</div>
)}
{/* Tables */} {/* Tables */}
<div className={`${tableStyles.tableSection} ${styles.tableSection}`}> <div className={`${tableStyles.tableSection} ${styles.tableSection}`}>
<div className={tableStyles.tableHeader}> <div className={tableStyles.tableHeader}>